Switch config to JSON

This commit is contained in:
Thales Maciel 2026-02-06 18:53:02 -03:00
parent 123dc0160b
commit b296491703
6 changed files with 127 additions and 36 deletions

16
Makefile Normal file
View file

@ -0,0 +1,16 @@
BIN_DIR := .
LELD := $(BIN_DIR)/leld
LELCTL := $(BIN_DIR)/lelctl
CONFIG := $(HOME)/.config/lel/config.json
.PHONY: build run clean
build:
go build -o $(LELD) ./cmd/leld
go build -o $(LELCTL) ./cmd/lelctl
run:
$(LELD) --config $(CONFIG)
clean:
rm -f $(LELD) $(LELCTL)

View file

@ -1,6 +1,6 @@
# lel # lel
X11 transcription daemon that records audio and runs Whisper, logging the transcript. X11 transcription daemon that records audio, runs Whisper, logs the transcript, and can optionally run AI post-processing before injecting text.
## Requirements ## Requirements
@ -13,26 +13,37 @@ X11 transcription daemon that records audio and runs Whisper, logging the transc
## Build ## Build
```bash ```bash
go build -o leld ./cmd/leld make build
go build -o lelctl ./cmd/lelctl
``` ```
## Config ## Config
Create `~/.config/lel/config.toml`: Create `~/.config/lel/config.json`:
```toml ```json
hotkey = "Cmd+m" {
ffmpeg_input = "pulse:default" "hotkey": "Cmd+m",
whisper_model = "base" "ffmpeg_input": "pulse:default",
whisper_lang = "en" "ffmpeg_path": "",
whisper_device = "cpu" "whisper_model": "base",
whisper_extra_args = "" "whisper_lang": "en",
record_timeout_sec = 120 "whisper_device": "cpu",
whisper_timeout_sec = 300 "whisper_extra_args": "",
segment_sec = 5 "record_timeout_sec": 120,
streaming = false "whisper_timeout_sec": 300,
injection_backend = "clipboard" "segment_sec": 5,
"streaming": false,
"injection_backend": "clipboard",
"ai_enabled": true,
"ai_provider": "ollama",
"ai_model": "llama3.2:3b",
"ai_temperature": 0.0,
"ai_system_prompt_file": "",
"ai_base_url": "http://localhost:11434",
"ai_api_key": "",
"ai_timeout_sec": 20
}
``` ```
Env overrides: Env overrides:
@ -41,11 +52,14 @@ Env overrides:
- `WHISPER_FFMPEG_IN` - `WHISPER_FFMPEG_IN`
- `WHISPER_STREAM`, `WHISPER_SEGMENT_SEC`, `WHISPER_TIMEOUT_SEC` - `WHISPER_STREAM`, `WHISPER_SEGMENT_SEC`, `WHISPER_TIMEOUT_SEC`
- `LEL_RECORD_TIMEOUT_SEC`, `LEL_HOTKEY`, `LEL_INJECTION_BACKEND` - `LEL_RECORD_TIMEOUT_SEC`, `LEL_HOTKEY`, `LEL_INJECTION_BACKEND`
- `LEL_FFMPEG_PATH`
- `LEL_AI_ENABLED`, `LEL_AI_PROVIDER`, `LEL_AI_MODEL`, `LEL_AI_TEMPERATURE`, `LEL_AI_SYSTEM_PROMPT_FILE`
- `LEL_AI_BASE_URL`, `LEL_AI_API_KEY`, `LEL_AI_TIMEOUT_SEC`
## Run manually ## Run manually
```bash ```bash
./leld --config ~/.config/lel/config.toml ./leld --config ~/.config/lel/config.json
``` ```
Disable the tray icon: Disable the tray icon:
@ -70,11 +84,26 @@ systemctl --user enable --now lel
- Press it again to stop and transcribe. - Press it again to stop and transcribe.
- The transcript is logged to stderr. - The transcript is logged to stderr.
Execution flow (single in-flight state machine):
- `recording` -> `transcribing` -> `processing` (optional) -> `outputting` -> `idle`
Injection backends: Injection backends:
- `clipboard`: copy to clipboard and inject via Ctrl+V (requires `xclip` + `xdotool`) - `clipboard`: copy to clipboard and inject via Ctrl+V (requires `xclip` + `xdotool`)
- `injection`: type the text with simulated keypresses (requires `xdotool`) - `injection`: type the text with simulated keypresses (requires `xdotool`)
AI providers:
- `ollama`: calls the local Ollama HTTP API (`/api/generate`)
- `openai_compat`: calls a chat-completions compatible API (`/v1/chat/completions`)
Dependency checks:
- Recording requires `ffmpeg` (or set `ffmpeg_path`)
- Transcribing uses the `whisper` CLI
- Outputting requires `xclip` (and `xdotool` for injection backends)
Control: Control:
```bash ```bash

1
go.mod
View file

@ -3,7 +3,6 @@ module lel
go 1.25.5 go 1.25.5
require ( require (
github.com/BurntSushi/toml v1.6.0
github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc
github.com/getlantern/systray v1.2.2 github.com/getlantern/systray v1.2.2
) )

2
go.sum
View file

@ -1,5 +1,3 @@
github.com/BurntSushi/toml v1.6.0 h1:dRaEfpa2VI55EwlIW72hMRHdWouJeRF7TPYhI+AUQjk=
github.com/BurntSushi/toml v1.6.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc h1:7D+Bh06CRPCJO3gr2F7h1sriovOZ8BMhca2Rg85c2nk= github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc h1:7D+Bh06CRPCJO3gr2F7h1sriovOZ8BMhca2Rg85c2nk=
github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20210121224620-deaf085860bc/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=

View file

@ -1,32 +1,40 @@
package config package config
import ( import (
"encoding/json"
"errors" "errors"
"os" "os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
"github.com/BurntSushi/toml"
) )
type Config struct { type Config struct {
Hotkey string `toml:"hotkey"` Hotkey string `json:"hotkey"`
FfmpegInput string `toml:"ffmpeg_input"` FfmpegInput string `json:"ffmpeg_input"`
WhisperModel string `toml:"whisper_model"` WhisperModel string `json:"whisper_model"`
WhisperLang string `toml:"whisper_lang"` WhisperLang string `json:"whisper_lang"`
WhisperDevice string `toml:"whisper_device"` WhisperDevice string `json:"whisper_device"`
WhisperExtraArgs string `toml:"whisper_extra_args"` WhisperExtraArgs string `json:"whisper_extra_args"`
RecordTimeoutSec int `toml:"record_timeout_sec"` RecordTimeoutSec int `json:"record_timeout_sec"`
WhisperTimeoutSec int `toml:"whisper_timeout_sec"` WhisperTimeoutSec int `json:"whisper_timeout_sec"`
SegmentSec int `toml:"segment_sec"` SegmentSec int `json:"segment_sec"`
Streaming bool `toml:"streaming"` Streaming bool `json:"streaming"`
InjectionBackend string `toml:"injection_backend"` InjectionBackend string `json:"injection_backend"`
AIEnabled bool `json:"ai_enabled"`
AIProvider string `json:"ai_provider"`
AIModel string `json:"ai_model"`
AITemperature float64 `json:"ai_temperature"`
AISystemPromptFile string `json:"ai_system_prompt_file"`
AIBaseURL string `json:"ai_base_url"`
AIAPIKey string `json:"ai_api_key"`
AITimeoutSec int `json:"ai_timeout_sec"`
} }
func DefaultPath() string { func DefaultPath() string {
home, _ := os.UserHomeDir() home, _ := os.UserHomeDir()
return filepath.Join(home, ".config", "lel", "config.toml") return filepath.Join(home, ".config", "lel", "config.json")
} }
func Defaults() Config { func Defaults() Config {
@ -42,6 +50,15 @@ func Defaults() Config {
SegmentSec: 5, SegmentSec: 5,
Streaming: false, Streaming: false,
InjectionBackend: "clipboard", InjectionBackend: "clipboard",
AIEnabled: false,
AIProvider: "ollama",
AIModel: "llama3.2:3b",
AITemperature: 0.0,
AISystemPromptFile: "",
AIBaseURL: "http://localhost:11434",
AIAPIKey: "",
AITimeoutSec: 20,
} }
} }
@ -53,7 +70,11 @@ func Load(path string) (Config, error) {
} }
if _, err := os.Stat(path); err == nil { if _, err := os.Stat(path); err == nil {
if _, err := toml.DecodeFile(path, &cfg); err != nil { data, err := os.ReadFile(path)
if err != nil {
return cfg, err
}
if err := json.Unmarshal(data, &cfg); err != nil {
return cfg, err return cfg, err
} }
} }
@ -113,6 +134,34 @@ func applyEnv(cfg *Config) {
if v := os.Getenv("LEL_INJECTION_BACKEND"); v != "" { if v := os.Getenv("LEL_INJECTION_BACKEND"); v != "" {
cfg.InjectionBackend = v cfg.InjectionBackend = v
} }
if v := os.Getenv("LEL_AI_ENABLED"); v != "" {
cfg.AIEnabled = parseBool(v)
}
if v := os.Getenv("LEL_AI_PROVIDER"); v != "" {
cfg.AIProvider = v
}
if v := os.Getenv("LEL_AI_MODEL"); v != "" {
cfg.AIModel = v
}
if v := os.Getenv("LEL_AI_TEMPERATURE"); v != "" {
if n, err := strconv.ParseFloat(v, 64); err == nil {
cfg.AITemperature = n
}
}
if v := os.Getenv("LEL_AI_SYSTEM_PROMPT_FILE"); v != "" {
cfg.AISystemPromptFile = v
}
if v := os.Getenv("LEL_AI_BASE_URL"); v != "" {
cfg.AIBaseURL = v
}
if v := os.Getenv("LEL_AI_API_KEY"); v != "" {
cfg.AIAPIKey = v
}
if v := os.Getenv("LEL_AI_TIMEOUT_SEC"); v != "" {
if n, err := strconv.Atoi(v); err == nil {
cfg.AITimeoutSec = n
}
}
} }
func parseBool(v string) bool { func parseBool(v string) bool {

View file

@ -4,7 +4,7 @@ After=default.target
[Service] [Service]
Type=simple Type=simple
ExecStart=%h/.local/bin/leld --config %h/.config/lel/config.toml ExecStart=%h/.local/bin/leld --config %h/.config/lel/config.json
Restart=on-failure Restart=on-failure
RestartSec=2 RestartSec=2