Harden runtime diagnostics for milestone 3
Make the milestone 3 runtime story predictable instead of treating doctor, self-check, and startup failures as loosely related surfaces. Split doctor and self-check into distinct read-only flows, add tri-state diagnostic status with stable IDs and next steps, and reuse that wording in CLI output, service logs, and tray-triggered diagnostics. Add non-mutating config/model probes, a make runtime-check gate, and public recovery/validation docs for the X11 GA roadmap. Validation: make runtime-check; PYTHONPATH=src python3 -m unittest discover -s tests -p 'test_*.py'; python3 -m py_compile src/*.py tests/*.py; PYTHONPATH=src python3 -m aman doctor --help; PYTHONPATH=src python3 -m aman self-check --help. Leave milestone 3 open in the roadmap until the manual X11 validation rows are filled.
This commit is contained in:
parent
a3368056ff
commit
ed1b59240b
16 changed files with 1298 additions and 248 deletions
|
|
@ -1,7 +1,9 @@
|
|||
import json
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
|
|
@ -10,7 +12,13 @@ if str(SRC) not in sys.path:
|
|||
sys.path.insert(0, str(SRC))
|
||||
|
||||
from config import Config
|
||||
from diagnostics import DiagnosticCheck, DiagnosticReport, run_diagnostics
|
||||
from diagnostics import (
|
||||
DiagnosticCheck,
|
||||
DiagnosticReport,
|
||||
run_doctor,
|
||||
run_diagnostics,
|
||||
run_self_check,
|
||||
)
|
||||
|
||||
|
||||
class _FakeDesktop:
|
||||
|
|
@ -18,59 +26,207 @@ class _FakeDesktop:
|
|||
return
|
||||
|
||||
|
||||
class DiagnosticsTests(unittest.TestCase):
|
||||
def test_run_diagnostics_all_checks_pass(self):
|
||||
cfg = Config()
|
||||
with patch("diagnostics.load", return_value=cfg), patch(
|
||||
"diagnostics.resolve_input_device", return_value=1
|
||||
), patch("diagnostics.get_desktop_adapter", return_value=_FakeDesktop()), patch(
|
||||
"diagnostics.ensure_model", return_value=Path("/tmp/model.gguf")
|
||||
):
|
||||
report = run_diagnostics("/tmp/config.json")
|
||||
class _Result:
|
||||
def __init__(self, *, returncode: int = 0, stdout: str = "", stderr: str = ""):
|
||||
self.returncode = returncode
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
|
||||
|
||||
def _systemctl_side_effect(*results: _Result):
|
||||
iterator = iter(results)
|
||||
|
||||
def _runner(_args):
|
||||
return next(iterator)
|
||||
|
||||
return _runner
|
||||
|
||||
|
||||
class DiagnosticsTests(unittest.TestCase):
|
||||
def test_run_doctor_all_checks_pass(self):
|
||||
cfg = Config()
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
config_path = Path(td) / "config.json"
|
||||
config_path.write_text('{"config_version":1}\n', encoding="utf-8")
|
||||
with patch.dict("os.environ", {"DISPLAY": ":0"}, clear=False), patch(
|
||||
"diagnostics.load_existing", return_value=cfg
|
||||
), patch("diagnostics.list_input_devices", return_value=[{"index": 1, "name": "Mic"}]), patch(
|
||||
"diagnostics.resolve_input_device", return_value=1
|
||||
), patch(
|
||||
"diagnostics.get_desktop_adapter", return_value=_FakeDesktop()
|
||||
), patch(
|
||||
"diagnostics._run_systemctl_user",
|
||||
return_value=_Result(returncode=0, stdout="running\n"),
|
||||
), patch("diagnostics.probe_managed_model") as probe_model:
|
||||
report = run_doctor(str(config_path))
|
||||
|
||||
self.assertEqual(report.status, "ok")
|
||||
self.assertTrue(report.ok)
|
||||
ids = [check.id for check in report.checks]
|
||||
self.assertEqual(
|
||||
ids,
|
||||
[check.id for check in report.checks],
|
||||
[
|
||||
"config.load",
|
||||
"session.x11",
|
||||
"runtime.audio",
|
||||
"audio.input",
|
||||
"hotkey.parse",
|
||||
"injection.backend",
|
||||
"provider.runtime",
|
||||
"model.cache",
|
||||
"service.prereq",
|
||||
],
|
||||
)
|
||||
self.assertTrue(all(check.ok for check in report.checks))
|
||||
self.assertTrue(all(check.status == "ok" for check in report.checks))
|
||||
probe_model.assert_not_called()
|
||||
|
||||
def test_run_diagnostics_marks_config_fail_and_skips_dependent_checks(self):
|
||||
with patch("diagnostics.load", side_effect=ValueError("broken config")), patch(
|
||||
"diagnostics.ensure_model", return_value=Path("/tmp/model.gguf")
|
||||
):
|
||||
report = run_diagnostics("/tmp/config.json")
|
||||
def test_run_doctor_missing_config_warns_without_writing(self):
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
config_path = Path(td) / "config.json"
|
||||
with patch.dict("os.environ", {"DISPLAY": ":0"}, clear=False), patch(
|
||||
"diagnostics.list_input_devices", return_value=[]
|
||||
), patch(
|
||||
"diagnostics._run_systemctl_user",
|
||||
return_value=_Result(returncode=0, stdout="running\n"),
|
||||
):
|
||||
report = run_doctor(str(config_path))
|
||||
|
||||
self.assertFalse(report.ok)
|
||||
self.assertEqual(report.status, "warn")
|
||||
results = {check.id: check for check in report.checks}
|
||||
self.assertFalse(results["config.load"].ok)
|
||||
self.assertFalse(results["audio.input"].ok)
|
||||
self.assertFalse(results["hotkey.parse"].ok)
|
||||
self.assertFalse(results["injection.backend"].ok)
|
||||
self.assertFalse(results["provider.runtime"].ok)
|
||||
self.assertFalse(results["model.cache"].ok)
|
||||
self.assertEqual(results["config.load"].status, "warn")
|
||||
self.assertEqual(results["runtime.audio"].status, "warn")
|
||||
self.assertEqual(results["audio.input"].status, "warn")
|
||||
self.assertIn("open Settings", results["config.load"].next_step)
|
||||
self.assertFalse(config_path.exists())
|
||||
|
||||
def test_report_json_schema(self):
|
||||
def test_run_self_check_adds_deeper_readiness_checks(self):
|
||||
cfg = Config()
|
||||
model_path = Path("/tmp/model.gguf")
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
config_path = Path(td) / "config.json"
|
||||
config_path.write_text('{"config_version":1}\n', encoding="utf-8")
|
||||
with patch.dict("os.environ", {"DISPLAY": ":0"}, clear=False), patch(
|
||||
"diagnostics.load_existing", return_value=cfg
|
||||
), patch("diagnostics.list_input_devices", return_value=[{"index": 1, "name": "Mic"}]), patch(
|
||||
"diagnostics.resolve_input_device", return_value=1
|
||||
), patch(
|
||||
"diagnostics.get_desktop_adapter", return_value=_FakeDesktop()
|
||||
), patch(
|
||||
"diagnostics._run_systemctl_user",
|
||||
side_effect=_systemctl_side_effect(
|
||||
_Result(returncode=0, stdout="running\n"),
|
||||
_Result(returncode=0, stdout="/home/test/.config/systemd/user/aman.service\n"),
|
||||
_Result(returncode=0, stdout="enabled\n"),
|
||||
_Result(returncode=0, stdout="active\n"),
|
||||
),
|
||||
), patch(
|
||||
"diagnostics.probe_managed_model",
|
||||
return_value=SimpleNamespace(
|
||||
status="ready",
|
||||
path=model_path,
|
||||
message=f"managed editor model is ready at {model_path}",
|
||||
),
|
||||
), patch(
|
||||
"diagnostics.MODEL_DIR", model_path.parent
|
||||
), patch(
|
||||
"diagnostics.os.access", return_value=True
|
||||
), patch(
|
||||
"diagnostics._load_llama_bindings", return_value=(object(), object())
|
||||
), patch.dict(
|
||||
"sys.modules", {"faster_whisper": SimpleNamespace(WhisperModel=object())}
|
||||
):
|
||||
report = run_self_check(str(config_path))
|
||||
|
||||
self.assertEqual(report.status, "ok")
|
||||
self.assertEqual(
|
||||
[check.id for check in report.checks[-5:]],
|
||||
[
|
||||
"model.cache",
|
||||
"cache.writable",
|
||||
"service.unit",
|
||||
"service.state",
|
||||
"startup.readiness",
|
||||
],
|
||||
)
|
||||
self.assertTrue(all(check.status == "ok" for check in report.checks))
|
||||
|
||||
def test_run_self_check_missing_model_warns_without_downloading(self):
|
||||
cfg = Config()
|
||||
model_path = Path("/tmp/model.gguf")
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
config_path = Path(td) / "config.json"
|
||||
config_path.write_text('{"config_version":1}\n', encoding="utf-8")
|
||||
with patch.dict("os.environ", {"DISPLAY": ":0"}, clear=False), patch(
|
||||
"diagnostics.load_existing", return_value=cfg
|
||||
), patch("diagnostics.list_input_devices", return_value=[{"index": 1, "name": "Mic"}]), patch(
|
||||
"diagnostics.resolve_input_device", return_value=1
|
||||
), patch(
|
||||
"diagnostics.get_desktop_adapter", return_value=_FakeDesktop()
|
||||
), patch(
|
||||
"diagnostics._run_systemctl_user",
|
||||
side_effect=_systemctl_side_effect(
|
||||
_Result(returncode=0, stdout="running\n"),
|
||||
_Result(returncode=0, stdout="/home/test/.config/systemd/user/aman.service\n"),
|
||||
_Result(returncode=0, stdout="enabled\n"),
|
||||
_Result(returncode=0, stdout="active\n"),
|
||||
),
|
||||
), patch(
|
||||
"diagnostics.probe_managed_model",
|
||||
return_value=SimpleNamespace(
|
||||
status="missing",
|
||||
path=model_path,
|
||||
message=f"managed editor model is not cached at {model_path}",
|
||||
),
|
||||
) as probe_model, patch(
|
||||
"diagnostics.MODEL_DIR", model_path.parent
|
||||
), patch(
|
||||
"diagnostics.os.access", return_value=True
|
||||
), patch(
|
||||
"diagnostics._load_llama_bindings", return_value=(object(), object())
|
||||
), patch.dict(
|
||||
"sys.modules", {"faster_whisper": SimpleNamespace(WhisperModel=object())}
|
||||
):
|
||||
report = run_self_check(str(config_path))
|
||||
|
||||
self.assertEqual(report.status, "warn")
|
||||
results = {check.id: check for check in report.checks}
|
||||
self.assertEqual(results["model.cache"].status, "warn")
|
||||
self.assertEqual(results["startup.readiness"].status, "warn")
|
||||
self.assertIn("networked connection", results["model.cache"].next_step)
|
||||
probe_model.assert_called_once()
|
||||
|
||||
def test_run_diagnostics_alias_matches_doctor(self):
|
||||
cfg = Config()
|
||||
with tempfile.TemporaryDirectory() as td:
|
||||
config_path = Path(td) / "config.json"
|
||||
config_path.write_text('{"config_version":1}\n', encoding="utf-8")
|
||||
with patch.dict("os.environ", {"DISPLAY": ":0"}, clear=False), patch(
|
||||
"diagnostics.load_existing", return_value=cfg
|
||||
), patch("diagnostics.list_input_devices", return_value=[{"index": 1, "name": "Mic"}]), patch(
|
||||
"diagnostics.resolve_input_device", return_value=1
|
||||
), patch(
|
||||
"diagnostics.get_desktop_adapter", return_value=_FakeDesktop()
|
||||
), patch(
|
||||
"diagnostics._run_systemctl_user",
|
||||
return_value=_Result(returncode=0, stdout="running\n"),
|
||||
):
|
||||
report = run_diagnostics(str(config_path))
|
||||
|
||||
self.assertEqual(report.status, "ok")
|
||||
self.assertEqual(len(report.checks), 7)
|
||||
|
||||
def test_report_json_schema_includes_status_and_next_step(self):
|
||||
report = DiagnosticReport(
|
||||
checks=[
|
||||
DiagnosticCheck(id="config.load", ok=True, message="ok", hint=""),
|
||||
DiagnosticCheck(id="model.cache", ok=False, message="nope", hint="fix"),
|
||||
DiagnosticCheck(id="config.load", status="warn", message="missing", next_step="open settings"),
|
||||
DiagnosticCheck(id="service.prereq", status="fail", message="broken", next_step="fix systemd"),
|
||||
]
|
||||
)
|
||||
|
||||
payload = json.loads(report.to_json())
|
||||
|
||||
self.assertEqual(payload["status"], "fail")
|
||||
self.assertFalse(payload["ok"])
|
||||
self.assertEqual(payload["checks"][0]["id"], "config.load")
|
||||
self.assertEqual(payload["checks"][1]["hint"], "fix")
|
||||
self.assertEqual(payload["checks"][0]["status"], "warn")
|
||||
self.assertEqual(payload["checks"][0]["next_step"], "open settings")
|
||||
self.assertEqual(payload["checks"][1]["hint"], "fix systemd")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue