Add real runtime materialization pipeline and bundle artifacts
This commit is contained in:
parent
cbf212bb7b
commit
c43c718c83
32 changed files with 1456 additions and 27 deletions
|
|
@ -24,6 +24,7 @@ class RuntimePaths:
|
|||
manifest_path: Path
|
||||
firecracker_bin: Path
|
||||
jailer_bin: Path
|
||||
guest_agent_path: Path | None
|
||||
artifacts_dir: Path
|
||||
notice_path: Path
|
||||
manifest: dict[str, Any]
|
||||
|
|
@ -91,9 +92,21 @@ def resolve_runtime_paths(
|
|||
|
||||
firecracker_bin = bundle_root / str(firecracker_entry.get("path", ""))
|
||||
jailer_bin = bundle_root / str(jailer_entry.get("path", ""))
|
||||
guest_agent_path: Path | None = None
|
||||
guest = manifest.get("guest")
|
||||
if isinstance(guest, dict):
|
||||
agent_entry = guest.get("agent")
|
||||
if isinstance(agent_entry, dict):
|
||||
raw_agent_path = agent_entry.get("path")
|
||||
if isinstance(raw_agent_path, str):
|
||||
guest_agent_path = bundle_root / raw_agent_path
|
||||
artifacts_dir = bundle_root / "profiles"
|
||||
|
||||
for path in (firecracker_bin, jailer_bin, artifacts_dir):
|
||||
required_paths = [firecracker_bin, jailer_bin, artifacts_dir]
|
||||
if guest_agent_path is not None:
|
||||
required_paths.append(guest_agent_path)
|
||||
|
||||
for path in required_paths:
|
||||
if not path.exists():
|
||||
raise RuntimeError(f"runtime asset missing: {path}")
|
||||
|
||||
|
|
@ -112,6 +125,20 @@ def resolve_runtime_paths(
|
|||
raise RuntimeError(
|
||||
f"runtime checksum mismatch for {full_path}; expected {raw_hash}, got {actual}"
|
||||
)
|
||||
if isinstance(guest, dict):
|
||||
agent_entry = guest.get("agent")
|
||||
if isinstance(agent_entry, dict):
|
||||
raw_path = agent_entry.get("path")
|
||||
raw_hash = agent_entry.get("sha256")
|
||||
if not isinstance(raw_path, str) or not isinstance(raw_hash, str):
|
||||
raise RuntimeError("runtime guest agent manifest entry is malformed")
|
||||
full_path = bundle_root / raw_path
|
||||
actual = _sha256(full_path)
|
||||
if actual != raw_hash:
|
||||
raise RuntimeError(
|
||||
f"runtime checksum mismatch for {full_path}; "
|
||||
f"expected {raw_hash}, got {actual}"
|
||||
)
|
||||
profiles = manifest.get("profiles")
|
||||
if not isinstance(profiles, dict):
|
||||
raise RuntimeError("runtime manifest is missing `profiles`")
|
||||
|
|
@ -141,6 +168,7 @@ def resolve_runtime_paths(
|
|||
manifest_path=manifest_path,
|
||||
firecracker_bin=firecracker_bin,
|
||||
jailer_bin=jailer_bin,
|
||||
guest_agent_path=guest_agent_path,
|
||||
artifacts_dir=artifacts_dir,
|
||||
notice_path=notice_path,
|
||||
manifest=manifest,
|
||||
|
|
@ -222,9 +250,11 @@ def doctor_report(*, platform: str = DEFAULT_PLATFORM) -> dict[str, Any]:
|
|||
"manifest_path": str(paths.manifest_path),
|
||||
"firecracker_bin": str(paths.firecracker_bin),
|
||||
"jailer_bin": str(paths.jailer_bin),
|
||||
"guest_agent_path": str(paths.guest_agent_path) if paths.guest_agent_path else None,
|
||||
"artifacts_dir": str(paths.artifacts_dir),
|
||||
"notice_path": str(paths.notice_path),
|
||||
"bundle_version": paths.manifest.get("bundle_version"),
|
||||
"component_versions": paths.manifest.get("component_versions", {}),
|
||||
"profiles": profile_names,
|
||||
"capabilities": {
|
||||
"supports_vm_boot": capabilities.supports_vm_boot,
|
||||
|
|
|
|||
538
src/pyro_mcp/runtime_build.py
Normal file
538
src/pyro_mcp/runtime_build.py
Normal file
|
|
@ -0,0 +1,538 @@
|
|||
"""Local build pipeline for packaged Firecracker runtime bundles."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
import tarfile
|
||||
import urllib.request
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from pyro_mcp.runtime import DEFAULT_PLATFORM
|
||||
|
||||
DEFAULT_RUNTIME_SOURCE_DIR = Path("runtime_sources")
|
||||
DEFAULT_RUNTIME_BUILD_DIR = Path("build/runtime_bundle")
|
||||
DEFAULT_RUNTIME_BUNDLE_DIR = Path("src/pyro_mcp/runtime_bundle")
|
||||
DEFAULT_RUNTIME_MATERIALIZED_DIR = Path("build/runtime_sources")
|
||||
DOWNLOAD_CHUNK_SIZE = 1024 * 1024
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RuntimeBuildLock:
|
||||
bundle_version: str
|
||||
platform: str
|
||||
component_versions: dict[str, str]
|
||||
capabilities: dict[str, bool]
|
||||
binaries: dict[str, str]
|
||||
guest: dict[str, dict[str, str]]
|
||||
profiles: dict[str, dict[str, str]]
|
||||
upstream: dict[str, Any]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RuntimeBuildPaths:
|
||||
source_root: Path
|
||||
source_platform_root: Path
|
||||
build_root: Path
|
||||
build_platform_root: Path
|
||||
bundle_dir: Path
|
||||
materialized_root: Path
|
||||
materialized_platform_root: Path
|
||||
platform: str
|
||||
|
||||
|
||||
def _sha256(path: Path) -> str:
|
||||
digest = hashlib.sha256()
|
||||
with path.open("rb") as fp:
|
||||
for block in iter(lambda: fp.read(DOWNLOAD_CHUNK_SIZE), b""):
|
||||
digest.update(block)
|
||||
return digest.hexdigest()
|
||||
|
||||
|
||||
def _cache_filename(url: str) -> str:
|
||||
digest = hashlib.sha256(url.encode("utf-8")).hexdigest()[:12]
|
||||
return f"{digest}-{Path(url).name}"
|
||||
|
||||
|
||||
def _load_lock(paths: RuntimeBuildPaths) -> RuntimeBuildLock:
|
||||
lock_path = paths.source_platform_root / "runtime.lock.json"
|
||||
payload = json.loads(lock_path.read_text(encoding="utf-8"))
|
||||
if not isinstance(payload, dict):
|
||||
raise RuntimeError(f"invalid runtime lock file: {lock_path}")
|
||||
return RuntimeBuildLock(
|
||||
bundle_version=str(payload["bundle_version"]),
|
||||
platform=str(payload["platform"]),
|
||||
component_versions={
|
||||
str(key): str(value) for key, value in dict(payload["component_versions"]).items()
|
||||
},
|
||||
capabilities={
|
||||
str(key): bool(value) for key, value in dict(payload["capabilities"]).items()
|
||||
},
|
||||
binaries={str(key): str(value) for key, value in dict(payload["binaries"]).items()},
|
||||
guest={
|
||||
str(key): {str(k): str(v) for k, v in dict(value).items()}
|
||||
for key, value in dict(payload["guest"]).items()
|
||||
},
|
||||
profiles={
|
||||
str(key): {str(k): str(v) for k, v in dict(value).items()}
|
||||
for key, value in dict(payload["profiles"]).items()
|
||||
},
|
||||
upstream={str(key): value for key, value in dict(payload.get("upstream", {})).items()},
|
||||
)
|
||||
|
||||
|
||||
def _copy_file(source: Path, dest: Path) -> None:
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(source, dest)
|
||||
|
||||
|
||||
def _copy_notice(paths: RuntimeBuildPaths) -> None:
|
||||
_copy_file(paths.source_root / "NOTICE", paths.build_root / "NOTICE")
|
||||
|
||||
|
||||
def _resolved_source_path(paths: RuntimeBuildPaths, relative_path: str) -> Path:
|
||||
materialized = paths.materialized_platform_root / relative_path
|
||||
if materialized.exists():
|
||||
return materialized
|
||||
return paths.source_platform_root / relative_path
|
||||
|
||||
|
||||
def _download(url: str, dest: Path) -> None: # pragma: no cover - integration helper
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
with urllib.request.urlopen(url) as response, dest.open("wb") as fp: # noqa: S310
|
||||
while True:
|
||||
chunk = response.read(DOWNLOAD_CHUNK_SIZE)
|
||||
if chunk == b"":
|
||||
break
|
||||
fp.write(chunk)
|
||||
|
||||
|
||||
def _run(command: list[str]) -> None: # pragma: no cover - integration helper
|
||||
completed = subprocess.run(command, text=True, capture_output=True, check=False)
|
||||
if completed.returncode != 0:
|
||||
stderr = completed.stderr.strip() or completed.stdout.strip()
|
||||
raise RuntimeError(f"command {' '.join(command)!r} failed: {stderr}")
|
||||
|
||||
|
||||
def validate_sources(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
firecracker_source = _resolved_source_path(paths, lock.binaries["firecracker"])
|
||||
jailer_source = _resolved_source_path(paths, lock.binaries["jailer"])
|
||||
firecracker_text = firecracker_source.read_text(encoding="utf-8", errors="ignore")
|
||||
jailer_text = jailer_source.read_text(encoding="utf-8", errors="ignore")
|
||||
has_shim_binaries = (
|
||||
"bundled firecracker shim" in firecracker_text or "bundled jailer shim" in jailer_text
|
||||
)
|
||||
|
||||
has_placeholder_profiles = False
|
||||
for profile in lock.profiles.values():
|
||||
for kind in ("kernel", "rootfs"):
|
||||
source = _resolved_source_path(paths, profile[kind])
|
||||
text = source.read_text(encoding="utf-8", errors="ignore")
|
||||
if "placeholder-" in text:
|
||||
has_placeholder_profiles = True
|
||||
break
|
||||
if has_placeholder_profiles:
|
||||
break
|
||||
|
||||
if any(lock.capabilities.values()) and (has_shim_binaries or has_placeholder_profiles):
|
||||
raise RuntimeError(
|
||||
"runtime lock advertises guest-capable features while source artifacts are still "
|
||||
"shim/placeholder inputs"
|
||||
)
|
||||
|
||||
|
||||
def materialize_binaries(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
release = lock.upstream.get("firecracker_release")
|
||||
if not isinstance(release, dict):
|
||||
raise RuntimeError("runtime lock is missing upstream.firecracker_release configuration")
|
||||
archive_url = release.get("archive_url")
|
||||
archive_sha256 = release.get("archive_sha256")
|
||||
firecracker_member = release.get("firecracker_member")
|
||||
jailer_member = release.get("jailer_member")
|
||||
if not all(
|
||||
isinstance(value, str)
|
||||
for value in (
|
||||
archive_url,
|
||||
archive_sha256,
|
||||
firecracker_member,
|
||||
jailer_member,
|
||||
)
|
||||
):
|
||||
raise RuntimeError("upstream.firecracker_release is incomplete")
|
||||
archive_url = str(archive_url)
|
||||
archive_sha256 = str(archive_sha256)
|
||||
firecracker_member = str(firecracker_member)
|
||||
jailer_member = str(jailer_member)
|
||||
|
||||
cache_dir = paths.materialized_root / "_downloads"
|
||||
archive_path = cache_dir / _cache_filename(archive_url)
|
||||
if not archive_path.exists():
|
||||
_download(archive_url, archive_path)
|
||||
actual_archive_sha256 = _sha256(archive_path)
|
||||
if actual_archive_sha256 != archive_sha256:
|
||||
raise RuntimeError(
|
||||
"firecracker release archive checksum mismatch: expected "
|
||||
f"{archive_sha256}, got {actual_archive_sha256}"
|
||||
)
|
||||
|
||||
targets = {
|
||||
firecracker_member: paths.materialized_platform_root / lock.binaries["firecracker"],
|
||||
jailer_member: paths.materialized_platform_root / lock.binaries["jailer"],
|
||||
}
|
||||
with tarfile.open(archive_path, "r:gz") as archive:
|
||||
members = {member.name: member for member in archive.getmembers()}
|
||||
for member_name, dest in targets.items():
|
||||
member = members.get(member_name)
|
||||
if member is None:
|
||||
raise RuntimeError(f"release archive is missing {member_name}")
|
||||
extracted = archive.extractfile(member)
|
||||
if extracted is None:
|
||||
raise RuntimeError(f"unable to extract {member_name}")
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest.write_bytes(extracted.read())
|
||||
dest.chmod(dest.stat().st_mode | 0o111)
|
||||
|
||||
|
||||
def materialize_kernel(
|
||||
paths: RuntimeBuildPaths, lock: RuntimeBuildLock
|
||||
) -> None: # pragma: no cover - integration helper
|
||||
kernel_build = lock.upstream.get("kernel_build")
|
||||
if not isinstance(kernel_build, dict):
|
||||
raise RuntimeError("runtime lock is missing upstream.kernel_build configuration")
|
||||
script = kernel_build.get("script")
|
||||
linux_version = kernel_build.get("linux_version")
|
||||
source_url = kernel_build.get("source_url")
|
||||
config_url = kernel_build.get("config_url")
|
||||
builder_image = kernel_build.get("builder_image")
|
||||
if not all(
|
||||
isinstance(value, str)
|
||||
for value in (
|
||||
script,
|
||||
linux_version,
|
||||
source_url,
|
||||
config_url,
|
||||
builder_image,
|
||||
)
|
||||
):
|
||||
raise RuntimeError("upstream.kernel_build is incomplete")
|
||||
script = str(script)
|
||||
linux_version = str(linux_version)
|
||||
source_url = str(source_url)
|
||||
config_url = str(config_url)
|
||||
builder_image = str(builder_image)
|
||||
|
||||
script_path = paths.source_platform_root / script
|
||||
if not script_path.exists():
|
||||
raise RuntimeError(f"kernel build script not found: {script_path}")
|
||||
|
||||
shared_output = paths.materialized_platform_root / "profiles/_shared/vmlinux"
|
||||
shared_output.parent.mkdir(parents=True, exist_ok=True)
|
||||
workdir = paths.materialized_root / "_kernel_work"
|
||||
workdir.mkdir(parents=True, exist_ok=True)
|
||||
_run(
|
||||
[
|
||||
str(script_path),
|
||||
"--builder-image",
|
||||
builder_image,
|
||||
"--linux-version",
|
||||
linux_version,
|
||||
"--source-url",
|
||||
source_url,
|
||||
"--config-url",
|
||||
config_url,
|
||||
"--workdir",
|
||||
str(workdir),
|
||||
"--output",
|
||||
str(shared_output),
|
||||
]
|
||||
)
|
||||
|
||||
for profile in lock.profiles.values():
|
||||
dest = paths.materialized_platform_root / profile["kernel"]
|
||||
_copy_file(shared_output, dest)
|
||||
|
||||
|
||||
def materialize_rootfs(
|
||||
paths: RuntimeBuildPaths, lock: RuntimeBuildLock
|
||||
) -> None: # pragma: no cover - integration helper
|
||||
rootfs_build = lock.upstream.get("rootfs_build")
|
||||
if not isinstance(rootfs_build, dict):
|
||||
raise RuntimeError("runtime lock is missing upstream.rootfs_build configuration")
|
||||
script = rootfs_build.get("script")
|
||||
builder_image = rootfs_build.get("builder_image")
|
||||
debian_release = rootfs_build.get("debian_release")
|
||||
debian_snapshot = rootfs_build.get("debian_snapshot")
|
||||
package_files = rootfs_build.get("package_files")
|
||||
guest_init = rootfs_build.get("guest_init")
|
||||
agent_service = rootfs_build.get("agent_service")
|
||||
if not isinstance(package_files, dict):
|
||||
raise RuntimeError("upstream.rootfs_build.package_files must be a mapping")
|
||||
if not all(
|
||||
isinstance(value, str)
|
||||
for value in (
|
||||
script,
|
||||
builder_image,
|
||||
debian_release,
|
||||
debian_snapshot,
|
||||
guest_init,
|
||||
agent_service,
|
||||
)
|
||||
):
|
||||
raise RuntimeError("upstream.rootfs_build is incomplete")
|
||||
script = str(script)
|
||||
builder_image = str(builder_image)
|
||||
debian_release = str(debian_release)
|
||||
debian_snapshot = str(debian_snapshot)
|
||||
guest_init = str(guest_init)
|
||||
agent_service = str(agent_service)
|
||||
|
||||
script_path = paths.source_platform_root / script
|
||||
guest_agent_path = paths.source_platform_root / lock.guest["agent"]["path"]
|
||||
guest_init_path = paths.source_platform_root / guest_init
|
||||
service_path = paths.source_platform_root / agent_service
|
||||
if not script_path.exists():
|
||||
raise RuntimeError(f"rootfs build script not found: {script_path}")
|
||||
|
||||
workdir = paths.materialized_root / "_rootfs_work"
|
||||
workdir.mkdir(parents=True, exist_ok=True)
|
||||
for profile_name, profile in lock.profiles.items():
|
||||
raw_packages_path = package_files.get(profile_name)
|
||||
if not isinstance(raw_packages_path, str):
|
||||
raise RuntimeError(f"missing package file for profile {profile_name!r}")
|
||||
packages_path = paths.source_platform_root / raw_packages_path
|
||||
output_path = paths.materialized_platform_root / profile["rootfs"]
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
_run(
|
||||
[
|
||||
str(script_path),
|
||||
"--builder-image",
|
||||
builder_image,
|
||||
"--debian-release",
|
||||
debian_release,
|
||||
"--debian-snapshot",
|
||||
debian_snapshot,
|
||||
"--packages-file",
|
||||
str(packages_path),
|
||||
"--guest-agent",
|
||||
str(guest_agent_path),
|
||||
"--guest-init",
|
||||
str(guest_init_path),
|
||||
"--agent-service",
|
||||
str(service_path),
|
||||
"--workdir",
|
||||
str(workdir / profile_name),
|
||||
"--output",
|
||||
str(output_path),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def stage_binaries(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
for name, relative_path in lock.binaries.items():
|
||||
del name
|
||||
source = _resolved_source_path(paths, relative_path)
|
||||
dest = paths.build_platform_root / relative_path
|
||||
_copy_file(source, dest)
|
||||
dest.chmod(dest.stat().st_mode | 0o111)
|
||||
|
||||
|
||||
def stage_kernel(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
for profile in lock.profiles.values():
|
||||
source = _resolved_source_path(paths, profile["kernel"])
|
||||
dest = paths.build_platform_root / profile["kernel"]
|
||||
_copy_file(source, dest)
|
||||
|
||||
|
||||
def stage_rootfs(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
for profile in lock.profiles.values():
|
||||
source = _resolved_source_path(paths, profile["rootfs"])
|
||||
dest = paths.build_platform_root / profile["rootfs"]
|
||||
_copy_file(source, dest)
|
||||
|
||||
|
||||
def stage_agent(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> None:
|
||||
for artifact in lock.guest.values():
|
||||
source = _resolved_source_path(paths, artifact["path"])
|
||||
dest = paths.build_platform_root / artifact["path"]
|
||||
_copy_file(source, dest)
|
||||
dest.chmod(dest.stat().st_mode | 0o111)
|
||||
|
||||
|
||||
def generate_manifest(paths: RuntimeBuildPaths, lock: RuntimeBuildLock) -> dict[str, Any]:
|
||||
manifest: dict[str, Any] = {
|
||||
"bundle_version": lock.bundle_version,
|
||||
"platform": lock.platform,
|
||||
"component_versions": lock.component_versions,
|
||||
"capabilities": lock.capabilities,
|
||||
"binaries": {},
|
||||
"guest": {},
|
||||
"profiles": {},
|
||||
}
|
||||
for name, relative_path in lock.binaries.items():
|
||||
full_path = paths.build_platform_root / relative_path
|
||||
manifest["binaries"][name] = {"path": relative_path, "sha256": _sha256(full_path)}
|
||||
for name, artifact in lock.guest.items():
|
||||
full_path = paths.build_platform_root / artifact["path"]
|
||||
manifest["guest"][name] = {
|
||||
"path": artifact["path"],
|
||||
"sha256": _sha256(full_path),
|
||||
}
|
||||
for name, profile in lock.profiles.items():
|
||||
kernel_path = paths.build_platform_root / profile["kernel"]
|
||||
rootfs_path = paths.build_platform_root / profile["rootfs"]
|
||||
manifest["profiles"][name] = {
|
||||
"description": profile["description"],
|
||||
"kernel": {"path": profile["kernel"], "sha256": _sha256(kernel_path)},
|
||||
"rootfs": {"path": profile["rootfs"], "sha256": _sha256(rootfs_path)},
|
||||
}
|
||||
manifest_path = paths.build_platform_root / "manifest.json"
|
||||
manifest_path.write_text(
|
||||
json.dumps(manifest, indent=2, sort_keys=True) + "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
return manifest
|
||||
|
||||
|
||||
def sync_bundle(paths: RuntimeBuildPaths) -> None:
|
||||
bundle_platform_dir = paths.bundle_dir / paths.platform
|
||||
bundle_notice_path = paths.bundle_dir / "NOTICE"
|
||||
if bundle_platform_dir.exists():
|
||||
shutil.rmtree(bundle_platform_dir)
|
||||
bundle_platform_dir.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copytree(paths.build_platform_root, bundle_platform_dir)
|
||||
_copy_file(paths.build_root / "NOTICE", bundle_notice_path)
|
||||
|
||||
|
||||
def build_bundle(paths: RuntimeBuildPaths, *, sync: bool) -> dict[str, Any]:
|
||||
lock = _load_lock(paths)
|
||||
if lock.platform != paths.platform:
|
||||
raise RuntimeError(
|
||||
"runtime lock platform "
|
||||
f"{lock.platform!r} does not match requested platform {paths.platform!r}"
|
||||
)
|
||||
validate_sources(paths, lock)
|
||||
if paths.build_root.exists():
|
||||
shutil.rmtree(paths.build_root)
|
||||
paths.build_platform_root.mkdir(parents=True, exist_ok=True)
|
||||
_copy_notice(paths)
|
||||
stage_binaries(paths, lock)
|
||||
stage_kernel(paths, lock)
|
||||
stage_rootfs(paths, lock)
|
||||
stage_agent(paths, lock)
|
||||
manifest = generate_manifest(paths, lock)
|
||||
if sync:
|
||||
sync_bundle(paths)
|
||||
return manifest
|
||||
|
||||
|
||||
def materialize_sources(paths: RuntimeBuildPaths) -> None:
|
||||
lock = _load_lock(paths)
|
||||
materialize_binaries(paths, lock)
|
||||
materialize_kernel(paths, lock)
|
||||
materialize_rootfs(paths, lock)
|
||||
|
||||
|
||||
def _build_paths(
|
||||
*,
|
||||
source_dir: Path,
|
||||
build_dir: Path,
|
||||
bundle_dir: Path,
|
||||
platform: str,
|
||||
materialized_dir: Path = DEFAULT_RUNTIME_MATERIALIZED_DIR,
|
||||
) -> RuntimeBuildPaths:
|
||||
return RuntimeBuildPaths(
|
||||
source_root=source_dir,
|
||||
source_platform_root=source_dir / platform,
|
||||
build_root=build_dir,
|
||||
build_platform_root=build_dir / platform,
|
||||
bundle_dir=bundle_dir,
|
||||
materialized_root=materialized_dir,
|
||||
materialized_platform_root=materialized_dir / platform,
|
||||
platform=platform,
|
||||
)
|
||||
|
||||
|
||||
def _build_parser() -> argparse.ArgumentParser: # pragma: no cover - CLI wiring
|
||||
parser = argparse.ArgumentParser(description="Build packaged runtime bundles for pyro-mcp.")
|
||||
parser.add_argument(
|
||||
"command",
|
||||
choices=[
|
||||
"fetch-binaries",
|
||||
"build-kernel",
|
||||
"build-rootfs",
|
||||
"materialize",
|
||||
"stage-binaries",
|
||||
"stage-kernel",
|
||||
"stage-rootfs",
|
||||
"stage-agent",
|
||||
"validate",
|
||||
"manifest",
|
||||
"sync",
|
||||
"bundle",
|
||||
],
|
||||
)
|
||||
parser.add_argument("--platform", default=DEFAULT_PLATFORM)
|
||||
parser.add_argument("--source-dir", default=str(DEFAULT_RUNTIME_SOURCE_DIR))
|
||||
parser.add_argument("--build-dir", default=str(DEFAULT_RUNTIME_BUILD_DIR))
|
||||
parser.add_argument("--bundle-dir", default=str(DEFAULT_RUNTIME_BUNDLE_DIR))
|
||||
parser.add_argument("--materialized-dir", default=str(DEFAULT_RUNTIME_MATERIALIZED_DIR))
|
||||
return parser
|
||||
|
||||
|
||||
def main() -> None: # pragma: no cover - CLI wiring
|
||||
args = _build_parser().parse_args()
|
||||
paths = _build_paths(
|
||||
source_dir=Path(args.source_dir),
|
||||
build_dir=Path(args.build_dir),
|
||||
bundle_dir=Path(args.bundle_dir),
|
||||
materialized_dir=Path(args.materialized_dir),
|
||||
platform=args.platform,
|
||||
)
|
||||
lock = _load_lock(paths)
|
||||
if args.command == "fetch-binaries":
|
||||
materialize_binaries(paths, lock)
|
||||
return
|
||||
if args.command == "build-kernel":
|
||||
materialize_kernel(paths, lock)
|
||||
return
|
||||
if args.command == "build-rootfs":
|
||||
materialize_rootfs(paths, lock)
|
||||
return
|
||||
if args.command == "materialize":
|
||||
materialize_sources(paths)
|
||||
return
|
||||
if args.command == "bundle":
|
||||
build_bundle(paths, sync=True)
|
||||
return
|
||||
if args.command == "stage-binaries":
|
||||
paths.build_platform_root.mkdir(parents=True, exist_ok=True)
|
||||
_copy_notice(paths)
|
||||
stage_binaries(paths, lock)
|
||||
return
|
||||
if args.command == "stage-kernel":
|
||||
paths.build_platform_root.mkdir(parents=True, exist_ok=True)
|
||||
stage_kernel(paths, lock)
|
||||
return
|
||||
if args.command == "stage-rootfs":
|
||||
paths.build_platform_root.mkdir(parents=True, exist_ok=True)
|
||||
stage_rootfs(paths, lock)
|
||||
return
|
||||
if args.command == "stage-agent":
|
||||
paths.build_platform_root.mkdir(parents=True, exist_ok=True)
|
||||
stage_agent(paths, lock)
|
||||
return
|
||||
if args.command == "validate":
|
||||
validate_sources(paths, lock)
|
||||
return
|
||||
if args.command == "manifest":
|
||||
generate_manifest(paths, lock)
|
||||
return
|
||||
if args.command == "sync":
|
||||
sync_bundle(paths)
|
||||
return
|
||||
raise RuntimeError(f"unknown command: {args.command}")
|
||||
Binary file not shown.
Binary file not shown.
74
src/pyro_mcp/runtime_bundle/linux-x86_64/guest/pyro_guest_agent.py
Executable file
74
src/pyro_mcp/runtime_bundle/linux-x86_64/guest/pyro_guest_agent.py
Executable file
|
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Minimal guest-side exec agent for pyro runtime bundles."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import socket
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
PORT = 5005
|
||||
BUFFER_SIZE = 65536
|
||||
|
||||
|
||||
def _read_request(conn: socket.socket) -> dict[str, Any]:
|
||||
chunks: list[bytes] = []
|
||||
while True:
|
||||
data = conn.recv(BUFFER_SIZE)
|
||||
if data == b"":
|
||||
break
|
||||
chunks.append(data)
|
||||
if b"\n" in data:
|
||||
break
|
||||
payload = json.loads(b"".join(chunks).decode("utf-8").strip())
|
||||
if not isinstance(payload, dict):
|
||||
raise RuntimeError("request must be a JSON object")
|
||||
return payload
|
||||
|
||||
|
||||
def _run_command(command: str, timeout_seconds: int) -> dict[str, Any]:
|
||||
started = time.monotonic()
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
["/bin/sh", "-lc", command],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=timeout_seconds,
|
||||
check=False,
|
||||
)
|
||||
return {
|
||||
"stdout": proc.stdout,
|
||||
"stderr": proc.stderr,
|
||||
"exit_code": proc.returncode,
|
||||
"duration_ms": int((time.monotonic() - started) * 1000),
|
||||
}
|
||||
except subprocess.TimeoutExpired:
|
||||
return {
|
||||
"stdout": "",
|
||||
"stderr": f"command timed out after {timeout_seconds}s",
|
||||
"exit_code": 124,
|
||||
"duration_ms": int((time.monotonic() - started) * 1000),
|
||||
}
|
||||
|
||||
|
||||
def main() -> None:
|
||||
family = getattr(socket, "AF_VSOCK", None)
|
||||
if family is None:
|
||||
raise SystemExit("AF_VSOCK is unavailable")
|
||||
with socket.socket(family, socket.SOCK_STREAM) as server:
|
||||
server.bind((socket.VMADDR_CID_ANY, PORT))
|
||||
server.listen(1)
|
||||
while True:
|
||||
conn, _ = server.accept()
|
||||
with conn:
|
||||
request = _read_request(conn)
|
||||
command = str(request.get("command", ""))
|
||||
timeout_seconds = int(request.get("timeout_seconds", 30))
|
||||
response = _run_command(command, timeout_seconds)
|
||||
conn.sendall((json.dumps(response) + "\n").encode("utf-8"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,48 +1,66 @@
|
|||
{
|
||||
"bundle_version": "0.1.0",
|
||||
"platform": "linux-x86_64",
|
||||
"binaries": {
|
||||
"firecracker": {
|
||||
"path": "bin/firecracker",
|
||||
"sha256": "2ff2d53551abcbf7ddebd921077214bff31910d4dfd894cc6fe66511d9f188e7"
|
||||
"sha256": "b99ea49b8d8b7bfa307d3845585d6a97f7642aa17a985749900370070d8ca930"
|
||||
},
|
||||
"jailer": {
|
||||
"path": "bin/jailer",
|
||||
"sha256": "d79e972b3ede34b1c3eb9d54c9f1853a62a8525f78c39c8dab4d5d79a6783fe9"
|
||||
"sha256": "86622337f91df329cca72bb21cd1324fb8b6fa47931601d65ee4b2c72ef2cae5"
|
||||
}
|
||||
},
|
||||
"bundle_version": "0.1.0",
|
||||
"capabilities": {
|
||||
"guest_exec": false,
|
||||
"guest_network": false,
|
||||
"vm_boot": false
|
||||
},
|
||||
"component_versions": {
|
||||
"base_distro": "debian-bookworm-20250210",
|
||||
"firecracker": "1.12.1",
|
||||
"guest_agent": "0.1.0-dev",
|
||||
"jailer": "1.12.1",
|
||||
"kernel": "5.10.210"
|
||||
},
|
||||
"guest": {
|
||||
"agent": {
|
||||
"path": "guest/pyro_guest_agent.py",
|
||||
"sha256": "65bf8a9a57ffd7321463537e598c4b30f0a13046cbd4538f1b65bc351da5d3c0"
|
||||
}
|
||||
},
|
||||
"platform": "linux-x86_64",
|
||||
"profiles": {
|
||||
"debian-base": {
|
||||
"description": "Minimal Debian userspace for shell and core Unix tooling.",
|
||||
"kernel": {
|
||||
"path": "profiles/debian-base/vmlinux",
|
||||
"sha256": "a0bd6422be1061bb3b70a7895e82f66c25c59022d1e8a72b6fc9cdee4136f108"
|
||||
"sha256": "15bcea4fa224131951888408978ff22fc2173f2782365c0617a900fe029bd8fb"
|
||||
},
|
||||
"rootfs": {
|
||||
"path": "profiles/debian-base/rootfs.ext4",
|
||||
"sha256": "2794a4bdc232b6a6267cfc1eaaa696f0efccd2f8f2e130f3ade736637de89dcd"
|
||||
}
|
||||
},
|
||||
"debian-git": {
|
||||
"description": "Debian base environment with Git preinstalled.",
|
||||
"kernel": {
|
||||
"path": "profiles/debian-git/vmlinux",
|
||||
"sha256": "eaf871c952bf6476f0299b1f501eddc302105e53c99c86161fa815e90cf5bc9f"
|
||||
},
|
||||
"rootfs": {
|
||||
"path": "profiles/debian-git/rootfs.ext4",
|
||||
"sha256": "17863bd1496a9a08d89d6e4c73bd619d39bbe7f6089f1903837525629557c076"
|
||||
"sha256": "46247e10fe9b223b15c4ccc672710c2f3013bf562ed9cf9b48af1f092d966494"
|
||||
}
|
||||
},
|
||||
"debian-build": {
|
||||
"description": "Debian Git environment with common build tools for source builds.",
|
||||
"kernel": {
|
||||
"path": "profiles/debian-build/vmlinux",
|
||||
"sha256": "c33994b1da43cf2f11ac9d437c034eaa71496b566a45028a9ae6f657105dc2b6"
|
||||
"sha256": "15bcea4fa224131951888408978ff22fc2173f2782365c0617a900fe029bd8fb"
|
||||
},
|
||||
"rootfs": {
|
||||
"path": "profiles/debian-build/rootfs.ext4",
|
||||
"sha256": "ac148235c86a51c87228e17a8cf2c9452921886c094de42b470d5f42dab70226"
|
||||
"sha256": "a0e9ec968b0fc6826f94a678164abc8c9b661adf87984184bd08abd1da15d7b6"
|
||||
}
|
||||
},
|
||||
"debian-git": {
|
||||
"description": "Debian base environment with Git preinstalled.",
|
||||
"kernel": {
|
||||
"path": "profiles/debian-git/vmlinux",
|
||||
"sha256": "15bcea4fa224131951888408978ff22fc2173f2782365c0617a900fe029bd8fb"
|
||||
},
|
||||
"rootfs": {
|
||||
"path": "profiles/debian-git/rootfs.ext4",
|
||||
"sha256": "e28ba2e3fa9ed37bcc9fc04a9b4414f0b29d8c7378508e10be78049a38c25894"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import ipaddress
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
|
@ -45,10 +46,29 @@ def build_launch_plan(instance: VmInstanceLike) -> FirecrackerLaunchPlan:
|
|||
guest_network_path = instance.workdir / "guest-network.json"
|
||||
guest_exec_path = instance.workdir / "guest-exec.json"
|
||||
|
||||
boot_args = [
|
||||
"console=ttyS0",
|
||||
"reboot=k",
|
||||
"panic=1",
|
||||
"pci=off",
|
||||
"init=/opt/pyro/bin/pyro-init",
|
||||
]
|
||||
if instance.network is not None:
|
||||
network = ipaddress.ip_network(instance.network.subnet_cidr, strict=False)
|
||||
prefixlen = network.prefixlen
|
||||
boot_args.extend(
|
||||
[
|
||||
f"pyro.guest_ip={instance.network.guest_ip}",
|
||||
f"pyro.gateway_ip={instance.network.gateway_ip}",
|
||||
f"pyro.netmask={prefixlen}",
|
||||
f"pyro.dns={','.join(instance.network.dns_servers)}",
|
||||
]
|
||||
)
|
||||
|
||||
config: dict[str, Any] = {
|
||||
"boot-source": {
|
||||
"kernel_image_path": instance.metadata["kernel_image"],
|
||||
"boot_args": "console=ttyS0 reboot=k panic=1 pci=off",
|
||||
"boot_args": " ".join(boot_args),
|
||||
},
|
||||
"drives": [
|
||||
{
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue