Add pipeline engine and remove legacy compatibility paths

This commit is contained in:
Thales Maciel 2026-02-25 22:40:03 -03:00
parent 3bc473262d
commit e221d49020
18 changed files with 1523 additions and 399 deletions

View file

@ -76,18 +76,41 @@ class LlamaProcessor:
if cleaned_dictionary:
request_payload["dictionary"] = cleaned_dictionary
content = self.chat(
system_prompt=SYSTEM_PROMPT,
user_prompt=json.dumps(request_payload, ensure_ascii=False),
llm_opts={"temperature": 0.0, "response_format": {"type": "json_object"}},
)
return _extract_cleaned_text_from_raw(content)
def chat(
self,
*,
system_prompt: str,
user_prompt: str,
llm_opts: dict[str, Any] | None = None,
) -> str:
opts = dict(llm_opts or {})
temperature = float(opts.pop("temperature", 0.0))
response_format = opts.pop("response_format", None)
if opts:
unknown = ", ".join(sorted(opts.keys()))
raise ValueError(f"unsupported llm options: {unknown}")
kwargs: dict[str, Any] = {
"messages": [
{"role": "system", "content": SYSTEM_PROMPT},
{"role": "user", "content": json.dumps(request_payload, ensure_ascii=False)},
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
"temperature": 0.0,
"temperature": temperature,
}
if _supports_response_format(self.client.create_chat_completion):
kwargs["response_format"] = {"type": "json_object"}
if response_format is not None and _supports_response_format(
self.client.create_chat_completion
):
kwargs["response_format"] = response_format
response = self.client.create_chat_completion(**kwargs)
return _extract_cleaned_text(response)
return _extract_chat_text(response)
def ensure_model():
@ -148,6 +171,10 @@ def _extract_chat_text(payload: Any) -> str:
def _extract_cleaned_text(payload: Any) -> str:
raw = _extract_chat_text(payload)
return _extract_cleaned_text_from_raw(raw)
def _extract_cleaned_text_from_raw(raw: str) -> str:
try:
parsed = json.loads(raw)
except json.JSONDecodeError as exc: