Switch config to JSON

This commit is contained in:
Thales Maciel 2026-02-06 18:53:02 -03:00
parent 123dc0160b
commit b296491703
6 changed files with 127 additions and 36 deletions

View file

@ -1,32 +1,40 @@
package config
import (
"encoding/json"
"errors"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/BurntSushi/toml"
)
type Config struct {
Hotkey string `toml:"hotkey"`
FfmpegInput string `toml:"ffmpeg_input"`
WhisperModel string `toml:"whisper_model"`
WhisperLang string `toml:"whisper_lang"`
WhisperDevice string `toml:"whisper_device"`
WhisperExtraArgs string `toml:"whisper_extra_args"`
RecordTimeoutSec int `toml:"record_timeout_sec"`
WhisperTimeoutSec int `toml:"whisper_timeout_sec"`
SegmentSec int `toml:"segment_sec"`
Streaming bool `toml:"streaming"`
InjectionBackend string `toml:"injection_backend"`
Hotkey string `json:"hotkey"`
FfmpegInput string `json:"ffmpeg_input"`
WhisperModel string `json:"whisper_model"`
WhisperLang string `json:"whisper_lang"`
WhisperDevice string `json:"whisper_device"`
WhisperExtraArgs string `json:"whisper_extra_args"`
RecordTimeoutSec int `json:"record_timeout_sec"`
WhisperTimeoutSec int `json:"whisper_timeout_sec"`
SegmentSec int `json:"segment_sec"`
Streaming bool `json:"streaming"`
InjectionBackend string `json:"injection_backend"`
AIEnabled bool `json:"ai_enabled"`
AIProvider string `json:"ai_provider"`
AIModel string `json:"ai_model"`
AITemperature float64 `json:"ai_temperature"`
AISystemPromptFile string `json:"ai_system_prompt_file"`
AIBaseURL string `json:"ai_base_url"`
AIAPIKey string `json:"ai_api_key"`
AITimeoutSec int `json:"ai_timeout_sec"`
}
func DefaultPath() string {
home, _ := os.UserHomeDir()
return filepath.Join(home, ".config", "lel", "config.toml")
return filepath.Join(home, ".config", "lel", "config.json")
}
func Defaults() Config {
@ -42,6 +50,15 @@ func Defaults() Config {
SegmentSec: 5,
Streaming: false,
InjectionBackend: "clipboard",
AIEnabled: false,
AIProvider: "ollama",
AIModel: "llama3.2:3b",
AITemperature: 0.0,
AISystemPromptFile: "",
AIBaseURL: "http://localhost:11434",
AIAPIKey: "",
AITimeoutSec: 20,
}
}
@ -53,7 +70,11 @@ func Load(path string) (Config, error) {
}
if _, err := os.Stat(path); err == nil {
if _, err := toml.DecodeFile(path, &cfg); err != nil {
data, err := os.ReadFile(path)
if err != nil {
return cfg, err
}
if err := json.Unmarshal(data, &cfg); err != nil {
return cfg, err
}
}
@ -113,6 +134,34 @@ func applyEnv(cfg *Config) {
if v := os.Getenv("LEL_INJECTION_BACKEND"); v != "" {
cfg.InjectionBackend = v
}
if v := os.Getenv("LEL_AI_ENABLED"); v != "" {
cfg.AIEnabled = parseBool(v)
}
if v := os.Getenv("LEL_AI_PROVIDER"); v != "" {
cfg.AIProvider = v
}
if v := os.Getenv("LEL_AI_MODEL"); v != "" {
cfg.AIModel = v
}
if v := os.Getenv("LEL_AI_TEMPERATURE"); v != "" {
if n, err := strconv.ParseFloat(v, 64); err == nil {
cfg.AITemperature = n
}
}
if v := os.Getenv("LEL_AI_SYSTEM_PROMPT_FILE"); v != "" {
cfg.AISystemPromptFile = v
}
if v := os.Getenv("LEL_AI_BASE_URL"); v != "" {
cfg.AIBaseURL = v
}
if v := os.Getenv("LEL_AI_API_KEY"); v != "" {
cfg.AIAPIKey = v
}
if v := os.Getenv("LEL_AI_TIMEOUT_SEC"); v != "" {
if n, err := strconv.Atoi(v); err == nil {
cfg.AITimeoutSec = n
}
}
}
func parseBool(v string) bool {