Add task sync push milestone

Tasks could start from host content in 2.2.0, but there was still no post-create path to update a live workspace from the host. This change adds the next host-to-task step so repeated fix or review loops do not require recreating the task for every local change.

Add task sync push across the CLI, Python SDK, and MCP server, reusing the existing safe archive import path from seeded task creation instead of introducing a second transfer stack. The implementation keeps sync separate from workspace_seed metadata, validates destinations under /workspace, and documents the current non-atomic recovery path as delete-and-recreate.

Validation:
- uv lock
- UV_CACHE_DIR=.uv-cache uv run pytest --no-cov tests/test_cli.py tests/test_vm_manager.py tests/test_api.py tests/test_server.py tests/test_public_contract.py
- UV_CACHE_DIR=.uv-cache make check
- UV_CACHE_DIR=.uv-cache make dist-check
- real guest-backed smoke: task create --source-path, task sync push, task exec to verify both files, task delete
This commit is contained in:
Thales Maciel 2026-03-11 22:20:55 -03:00
parent aa886b346e
commit 9e11dcf9ab
19 changed files with 461 additions and 41 deletions

View file

@ -33,6 +33,7 @@ def test_create_server_registers_vm_tools(tmp_path: Path) -> None:
assert "vm_status" in tool_names
assert "task_create" in tool_names
assert "task_logs" in tool_names
assert "task_sync_push" in tool_names
def test_vm_run_round_trip(tmp_path: Path) -> None:
@ -183,7 +184,13 @@ def test_task_tools_round_trip(tmp_path: Path) -> None:
raise TypeError("expected structured dictionary result")
return cast(dict[str, Any], structured)
async def _run() -> tuple[dict[str, Any], dict[str, Any], dict[str, Any], dict[str, Any]]:
async def _run() -> tuple[
dict[str, Any],
dict[str, Any],
dict[str, Any],
dict[str, Any],
dict[str, Any],
]:
server = create_server(manager=manager)
created = _extract_structured(
await server.call_tool(
@ -196,22 +203,36 @@ def test_task_tools_round_trip(tmp_path: Path) -> None:
)
)
task_id = str(created["task_id"])
update_dir = tmp_path / "update"
update_dir.mkdir()
(update_dir / "more.txt").write_text("more\n", encoding="utf-8")
synced = _extract_structured(
await server.call_tool(
"task_sync_push",
{
"task_id": task_id,
"source_path": str(update_dir),
"dest": "subdir",
},
)
)
executed = _extract_structured(
await server.call_tool(
"task_exec",
{
"task_id": task_id,
"command": "cat note.txt",
"command": "cat subdir/more.txt",
},
)
)
logs = _extract_structured(await server.call_tool("task_logs", {"task_id": task_id}))
deleted = _extract_structured(await server.call_tool("task_delete", {"task_id": task_id}))
return created, executed, logs, deleted
return created, synced, executed, logs, deleted
created, executed, logs, deleted = asyncio.run(_run())
created, synced, executed, logs, deleted = asyncio.run(_run())
assert created["state"] == "started"
assert created["workspace_seed"]["mode"] == "directory"
assert executed["stdout"] == "ok\n"
assert synced["workspace_sync"]["destination"] == "/workspace/subdir"
assert executed["stdout"] == "more\n"
assert logs["count"] == 1
assert deleted["deleted"] is True