Migrate to Python daemon
This commit is contained in:
parent
49ef349d48
commit
d81f3dbffe
42 changed files with 660 additions and 1816 deletions
46
src/aiprocess.py
Normal file
46
src/aiprocess.py
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
import ollama
|
||||
|
||||
|
||||
def load_system_prompt(path: str | None) -> str:
|
||||
if path:
|
||||
return Path(path).read_text(encoding="utf-8").strip()
|
||||
return (Path(__file__).parent / "system_prompt.txt").read_text(encoding="utf-8").strip()
|
||||
|
||||
|
||||
@dataclass
|
||||
class AIConfig:
|
||||
provider: str
|
||||
model: str
|
||||
temperature: float
|
||||
system_prompt_file: str
|
||||
base_url: str
|
||||
api_key: str
|
||||
timeout_sec: int
|
||||
|
||||
|
||||
class OllamaProcessor:
|
||||
def __init__(self, cfg: AIConfig):
|
||||
self.cfg = cfg
|
||||
self.system = load_system_prompt(cfg.system_prompt_file)
|
||||
self.client = ollama.Client(host=cfg.base_url)
|
||||
|
||||
def process(self, text: str) -> str:
|
||||
resp = self.client.generate(
|
||||
model=self.cfg.model,
|
||||
prompt=text,
|
||||
system=self.system,
|
||||
options={"temperature": self.cfg.temperature},
|
||||
)
|
||||
return (resp.get("response") or "").strip()
|
||||
|
||||
|
||||
def build_processor(cfg: AIConfig) -> OllamaProcessor:
|
||||
provider = cfg.provider.strip().lower()
|
||||
if provider != "ollama":
|
||||
raise ValueError(f"unsupported ai provider: {cfg.provider}")
|
||||
return OllamaProcessor(cfg)
|
||||
Loading…
Add table
Add a link
Reference in a new issue