"""Canonical OpenAI Responses API integration centered on workspace-core. Requirements: - `pip install openai` or `uv add openai` - `OPENAI_API_KEY` This is the recommended persistent-chat example. In 4.x the default MCP server profile is already `workspace-core`, so it derives tool schemas from `Pyro.create_server()` and dispatches tool calls back through that same default-profile server. """ from __future__ import annotations import asyncio import json import os from typing import Any, cast from pyro_mcp import Pyro DEFAULT_MODEL = "gpt-5" def _tool_to_openai(tool: Any) -> dict[str, Any]: return { "type": "function", "name": str(tool.name), "description": str(getattr(tool, "description", "") or ""), "strict": True, "parameters": dict(tool.inputSchema), } def _extract_structured(raw_result: object) -> dict[str, Any]: if not isinstance(raw_result, tuple) or len(raw_result) != 2: raise TypeError("unexpected call_tool result shape") _, structured = raw_result if not isinstance(structured, dict): raise TypeError("expected structured dictionary result") return cast(dict[str, Any], structured) async def run_openai_workspace_core_example(*, prompt: str, model: str = DEFAULT_MODEL) -> str: from openai import OpenAI # type: ignore[import-not-found] pyro = Pyro() server = pyro.create_server() tools = [_tool_to_openai(tool) for tool in await server.list_tools()] client = OpenAI() input_items: list[dict[str, Any]] = [{"role": "user", "content": prompt}] while True: response = client.responses.create( model=model, input=input_items, tools=tools, ) input_items.extend(response.output) tool_calls = [item for item in response.output if item.type == "function_call"] if not tool_calls: return str(response.output_text) for tool_call in tool_calls: result = _extract_structured( await server.call_tool(tool_call.name, json.loads(tool_call.arguments)) ) input_items.append( { "type": "function_call_output", "call_id": tool_call.call_id, "output": json.dumps(result, sort_keys=True), } ) def main() -> None: model = os.environ.get("OPENAI_MODEL", DEFAULT_MODEL) prompt = ( "Use the workspace-core tools to create a Debian 12 workspace named " "`chat-fix`, write `app.py` with `print(\"fixed\")`, run it with " "`python3 app.py`, export the file to `./app.py`, then delete the workspace. " "Do not use one-shot vm_run for this request." ) print(asyncio.run(run_openai_workspace_core_example(prompt=prompt, model=model))) if __name__ == "__main__": main()