pyro-mcp/examples/openai_responses_workspace_core.py
Thales Maciel c00c699a9f Make workspace-core the default MCP profile
Flip bare pyro mcp serve, create_server(), and Pyro.create_server() to default to workspace-core in 4.0.0 while keeping workspace-full as the explicit advanced opt-in surface.

Rewrite the MCP-facing docs and host-specific examples around the bare default command, update package and catalog compatibility to 4.x, and move the public-contract wording from 3.x compatibility guidance to the new stable default.

Adjust the server, API, and contract tests so bare server creation now asserts the workspace-core tool set, while explicit workspace-full coverage continues to prove shells, services, snapshots, and disk tools remain available.

Validation: uv lock; .venv/bin/pytest --no-cov tests/test_cli.py tests/test_api.py tests/test_server.py tests/test_public_contract.py; UV_CACHE_DIR=.uv-cache make check; UV_CACHE_DIR=.uv-cache make dist-check; real guest-backed smoke for bare Pyro.create_server() plus explicit profile="workspace-full".
2026-03-13 14:14:15 -03:00

90 lines
2.8 KiB
Python

"""Canonical OpenAI Responses API integration centered on workspace-core.
Requirements:
- `pip install openai` or `uv add openai`
- `OPENAI_API_KEY`
This is the recommended persistent-chat example. In 4.x the default MCP server
profile is already `workspace-core`, so it derives tool schemas from
`Pyro.create_server()` and dispatches tool calls back through that same
default-profile server.
"""
from __future__ import annotations
import asyncio
import json
import os
from typing import Any, cast
from pyro_mcp import Pyro
DEFAULT_MODEL = "gpt-5"
def _tool_to_openai(tool: Any) -> dict[str, Any]:
return {
"type": "function",
"name": str(tool.name),
"description": str(getattr(tool, "description", "") or ""),
"strict": True,
"parameters": dict(tool.inputSchema),
}
def _extract_structured(raw_result: object) -> dict[str, Any]:
if not isinstance(raw_result, tuple) or len(raw_result) != 2:
raise TypeError("unexpected call_tool result shape")
_, structured = raw_result
if not isinstance(structured, dict):
raise TypeError("expected structured dictionary result")
return cast(dict[str, Any], structured)
async def run_openai_workspace_core_example(*, prompt: str, model: str = DEFAULT_MODEL) -> str:
from openai import OpenAI # type: ignore[import-not-found]
pyro = Pyro()
server = pyro.create_server()
tools = [_tool_to_openai(tool) for tool in await server.list_tools()]
client = OpenAI()
input_items: list[dict[str, Any]] = [{"role": "user", "content": prompt}]
while True:
response = client.responses.create(
model=model,
input=input_items,
tools=tools,
)
input_items.extend(response.output)
tool_calls = [item for item in response.output if item.type == "function_call"]
if not tool_calls:
return str(response.output_text)
for tool_call in tool_calls:
result = _extract_structured(
await server.call_tool(tool_call.name, json.loads(tool_call.arguments))
)
input_items.append(
{
"type": "function_call_output",
"call_id": tool_call.call_id,
"output": json.dumps(result, sort_keys=True),
}
)
def main() -> None:
model = os.environ.get("OPENAI_MODEL", DEFAULT_MODEL)
prompt = (
"Use the workspace-core tools to create a Debian 12 workspace named "
"`chat-fix`, write `app.py` with `print(\"fixed\")`, run it with "
"`python3 app.py`, export the file to `./app.py`, then delete the workspace. "
"Do not use one-shot vm_run for this request."
)
print(asyncio.run(run_openai_workspace_core_example(prompt=prompt, model=model)))
if __name__ == "__main__":
main()