feat: add gitea agentic runtime control plane
This commit is contained in:
5
engine/devops_agent/__init__.py
Normal file
5
engine/devops_agent/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""Runtime package for agentic DevOps workflow execution."""
|
||||
|
||||
__all__ = ["__version__"]
|
||||
|
||||
__version__ = "0.1.0"
|
||||
BIN
engine/devops_agent/__pycache__/__init__.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/__init__.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/cli.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/cli.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/compiler.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/compiler.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/evidence.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/evidence.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/policies.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/policies.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/runtime.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/runtime.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/spec.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/spec.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/__pycache__/validator.cpython-314.pyc
Normal file
BIN
engine/devops_agent/__pycache__/validator.cpython-314.pyc
Normal file
Binary file not shown.
130
engine/devops_agent/cli.py
Normal file
130
engine/devops_agent/cli.py
Normal file
@@ -0,0 +1,130 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
|
||||
from engine.devops_agent.compiler import compile_workflow
|
||||
from engine.devops_agent.providers.gitea import GiteaProvider
|
||||
from engine.devops_agent.runtime import run_issue_comment_workflow
|
||||
from engine.devops_agent.spec import load_workflow_spec
|
||||
from engine.devops_agent.validator import validate_workflow_spec
|
||||
|
||||
|
||||
def build_parser() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="devops-agent",
|
||||
description="CLI for the agentic DevOps runtime.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
action="store_true",
|
||||
help="Print the runtime version and exit.",
|
||||
)
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
|
||||
compile_parser = subparsers.add_parser("compile")
|
||||
compile_parser.add_argument("spec_path")
|
||||
compile_parser.add_argument("--output", required=True)
|
||||
|
||||
validate_parser = subparsers.add_parser("validate")
|
||||
validate_parser.add_argument("spec_path")
|
||||
|
||||
run_parser = subparsers.add_parser("run")
|
||||
run_parser.add_argument("spec_path")
|
||||
run_parser.add_argument("--event-payload", required=True)
|
||||
run_parser.add_argument("--output-dir", required=True)
|
||||
run_parser.add_argument("--base-url", required=True)
|
||||
run_parser.add_argument("--token", required=True)
|
||||
|
||||
acceptance_parser = subparsers.add_parser("acceptance")
|
||||
acceptance_parser.add_argument("spec_path")
|
||||
acceptance_parser.add_argument("--base-url", required=True)
|
||||
acceptance_parser.add_argument("--repo", required=True)
|
||||
acceptance_parser.add_argument("--token", required=True)
|
||||
acceptance_parser.add_argument("--issue-number", required=True)
|
||||
acceptance_parser.add_argument("--output-dir", required=True)
|
||||
acceptance_parser.add_argument(
|
||||
"--comment-body",
|
||||
default="@devops-agent acceptance run",
|
||||
)
|
||||
return parser
|
||||
|
||||
|
||||
def _load_compile_and_validate(spec_path: str) -> tuple[dict[str, object], list[str]]:
|
||||
spec = load_workflow_spec(spec_path)
|
||||
errors = validate_workflow_spec(spec)
|
||||
return compile_workflow(spec), errors
|
||||
|
||||
|
||||
def main(argv: Sequence[str] | None = None) -> int:
|
||||
parser = build_parser()
|
||||
args = parser.parse_args(argv)
|
||||
if args.version:
|
||||
from engine.devops_agent import __version__
|
||||
|
||||
print(__version__)
|
||||
return 0
|
||||
if not getattr(args, "command", None):
|
||||
parser.print_help()
|
||||
return 0
|
||||
|
||||
if args.command == "compile":
|
||||
lock, errors = _load_compile_and_validate(args.spec_path)
|
||||
if errors:
|
||||
print(json.dumps({"errors": errors}, ensure_ascii=False, indent=2))
|
||||
return 1
|
||||
output_path = Path(args.output)
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
output_path.write_text(json.dumps(lock, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
return 0
|
||||
|
||||
if args.command == "validate":
|
||||
_, errors = _load_compile_and_validate(args.spec_path)
|
||||
if errors:
|
||||
print(json.dumps({"errors": errors}, ensure_ascii=False, indent=2))
|
||||
return 1
|
||||
print("workflow is valid")
|
||||
return 0
|
||||
|
||||
if args.command == "run":
|
||||
lock, errors = _load_compile_and_validate(args.spec_path)
|
||||
if errors:
|
||||
print(json.dumps({"errors": errors}, ensure_ascii=False, indent=2))
|
||||
return 1
|
||||
provider = GiteaProvider(base_url=args.base_url, token=args.token)
|
||||
payload = json.loads(Path(args.event_payload).read_text(encoding="utf-8"))
|
||||
run_issue_comment_workflow(
|
||||
lock=lock,
|
||||
provider=provider,
|
||||
event_payload=payload,
|
||||
output_dir=args.output_dir,
|
||||
)
|
||||
return 0
|
||||
|
||||
if args.command == "acceptance":
|
||||
lock, errors = _load_compile_and_validate(args.spec_path)
|
||||
if errors:
|
||||
print(json.dumps({"errors": errors}, ensure_ascii=False, indent=2))
|
||||
return 1
|
||||
provider = GiteaProvider(base_url=args.base_url, token=args.token)
|
||||
payload = {
|
||||
"repository": {"full_name": args.repo},
|
||||
"issue": {"number": int(args.issue_number)},
|
||||
"comment": {"body": args.comment_body},
|
||||
}
|
||||
run_issue_comment_workflow(
|
||||
lock=lock,
|
||||
provider=provider,
|
||||
event_payload=payload,
|
||||
output_dir=args.output_dir,
|
||||
)
|
||||
return 0
|
||||
|
||||
parser.error(f"unsupported command: {args.command}")
|
||||
return 2
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
42
engine/devops_agent/compiler.py
Normal file
42
engine/devops_agent/compiler.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from engine.devops_agent.spec import WorkflowSpec
|
||||
|
||||
|
||||
def _compile_triggers(frontmatter: dict[str, Any]) -> list[dict[str, Any]]:
|
||||
triggers = frontmatter.get("on") or {}
|
||||
if not isinstance(triggers, dict):
|
||||
return []
|
||||
|
||||
compiled: list[dict[str, Any]] = []
|
||||
for event_name, event_config in triggers.items():
|
||||
normalized = {
|
||||
"event": str(event_name),
|
||||
}
|
||||
if isinstance(event_config, dict):
|
||||
normalized.update(event_config)
|
||||
compiled.append(normalized)
|
||||
return compiled
|
||||
|
||||
|
||||
def compile_workflow(spec: WorkflowSpec) -> dict[str, Any]:
|
||||
policy = spec.frontmatter.get("policy") or {}
|
||||
evidence = spec.frontmatter.get("evidence") or {}
|
||||
|
||||
return {
|
||||
"version": 1,
|
||||
"workflow_name": spec.name,
|
||||
"provider": spec.provider,
|
||||
"source": str(spec.source_path.as_posix()),
|
||||
"triggers": _compile_triggers(spec.frontmatter),
|
||||
"safe_outputs": spec.frontmatter.get("safe_outputs") or {},
|
||||
"required_evidence": evidence.get("required") or [],
|
||||
"policy": {
|
||||
"require_human_merge": bool(policy.get("require_human_merge", True)),
|
||||
"require_fixed_issue": bool(policy.get("require_fixed_issue", False)),
|
||||
"path_scope": policy.get("path_scope") or [],
|
||||
},
|
||||
"instructions": spec.body,
|
||||
}
|
||||
16
engine/devops_agent/evidence.py
Normal file
16
engine/devops_agent/evidence.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def write_run_artifact(output_dir: str | Path, artifact: dict[str, Any]) -> Path:
|
||||
destination = Path(output_dir)
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
artifact_path = destination / "run-artifact.json"
|
||||
artifact_path.write_text(
|
||||
json.dumps(artifact, ensure_ascii=False, indent=2),
|
||||
encoding="utf-8",
|
||||
)
|
||||
return artifact_path
|
||||
43
engine/devops_agent/policies.py
Normal file
43
engine/devops_agent/policies.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
|
||||
class PolicyViolation(PermissionError):
|
||||
"""Raised when runtime behavior violates declared workflow policy."""
|
||||
|
||||
|
||||
def _normalize_path(path: str) -> str:
|
||||
return path.replace("\\", "/").lstrip("./")
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class RuntimePolicy:
|
||||
safe_outputs: dict[str, dict[str, int | str | bool]]
|
||||
path_scope: list[str]
|
||||
_operation_counts: dict[str, int] = field(default_factory=dict)
|
||||
|
||||
def assert_operation_allowed(self, action: str) -> None:
|
||||
config = self.safe_outputs.get(action)
|
||||
if config is None:
|
||||
raise PolicyViolation(f"write action '{action}' is not declared in safe_outputs")
|
||||
|
||||
current_count = self._operation_counts.get(action, 0) + 1
|
||||
max_count = int(config.get("max", current_count))
|
||||
if current_count > max_count:
|
||||
raise PolicyViolation(f"write action '{action}' exceeded max count {max_count}")
|
||||
|
||||
self._operation_counts[action] = current_count
|
||||
|
||||
def assert_path_allowed(self, path: str) -> None:
|
||||
normalized = _normalize_path(path)
|
||||
if not self.path_scope:
|
||||
raise PolicyViolation("file writes are not allowed without an explicit path scope")
|
||||
|
||||
for allowed_prefix in self.path_scope:
|
||||
if normalized.startswith(_normalize_path(allowed_prefix)):
|
||||
return
|
||||
|
||||
raise PolicyViolation(
|
||||
f"path '{normalized}' is outside allowed path scope {self.path_scope}"
|
||||
)
|
||||
4
engine/devops_agent/providers/__init__.py
Normal file
4
engine/devops_agent/providers/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from engine.devops_agent.providers.base import IssueProvider
|
||||
from engine.devops_agent.providers.gitea import GiteaProvider
|
||||
|
||||
__all__ = ["IssueProvider", "GiteaProvider"]
|
||||
Binary file not shown.
BIN
engine/devops_agent/providers/__pycache__/base.cpython-314.pyc
Normal file
BIN
engine/devops_agent/providers/__pycache__/base.cpython-314.pyc
Normal file
Binary file not shown.
BIN
engine/devops_agent/providers/__pycache__/gitea.cpython-314.pyc
Normal file
BIN
engine/devops_agent/providers/__pycache__/gitea.cpython-314.pyc
Normal file
Binary file not shown.
16
engine/devops_agent/providers/base.py
Normal file
16
engine/devops_agent/providers/base.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Protocol
|
||||
|
||||
|
||||
class IssueProvider(Protocol):
|
||||
def get_issue(self, repo: str, issue_number: int) -> dict[str, Any]: ...
|
||||
|
||||
def post_issue_comment(
|
||||
self,
|
||||
repo: str,
|
||||
issue_number: int,
|
||||
body: str,
|
||||
) -> dict[str, Any]: ...
|
||||
|
||||
def parse_issue_comment_event(self, payload: dict[str, Any]) -> dict[str, Any]: ...
|
||||
73
engine/devops_agent/providers/gitea.py
Normal file
73
engine/devops_agent/providers/gitea.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Callable
|
||||
from urllib.request import Request, urlopen
|
||||
|
||||
|
||||
Transport = Callable[..., dict[str, Any]]
|
||||
|
||||
|
||||
class GiteaProvider:
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
base_url: str,
|
||||
token: str,
|
||||
transport: Transport | None = None,
|
||||
) -> None:
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.token = token
|
||||
self.transport = transport
|
||||
|
||||
def _request(
|
||||
self,
|
||||
*,
|
||||
method: str,
|
||||
path: str,
|
||||
body: dict[str, object] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
url = f"{self.base_url}{path}"
|
||||
headers = {
|
||||
"Authorization": f"token {self.token}",
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
if self.transport is not None:
|
||||
return self.transport(method=method, url=url, headers=headers, body=body)
|
||||
|
||||
payload = None if body is None else json.dumps(body).encode("utf-8")
|
||||
request = Request(url, method=method, headers=headers, data=payload)
|
||||
with urlopen(request, timeout=30) as response:
|
||||
raw = response.read().decode("utf-8")
|
||||
return json.loads(raw) if raw else {}
|
||||
|
||||
def get_issue(self, repo: str, issue_number: int) -> dict[str, Any]:
|
||||
return self._request(
|
||||
method="GET",
|
||||
path=f"/api/v1/repos/{repo}/issues/{issue_number}",
|
||||
)
|
||||
|
||||
def post_issue_comment(
|
||||
self,
|
||||
repo: str,
|
||||
issue_number: int,
|
||||
body: str,
|
||||
) -> dict[str, Any]:
|
||||
return self._request(
|
||||
method="POST",
|
||||
path=f"/api/v1/repos/{repo}/issues/{issue_number}/comments",
|
||||
body={"body": body},
|
||||
)
|
||||
|
||||
def parse_issue_comment_event(self, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
repository = payload.get("repository") or {}
|
||||
issue = payload.get("issue") or {}
|
||||
comment = payload.get("comment") or {}
|
||||
|
||||
return {
|
||||
"repo": repository.get("full_name", ""),
|
||||
"issue_number": int(issue.get("number", 0)),
|
||||
"comment_body": str(comment.get("body", "")),
|
||||
}
|
||||
65
engine/devops_agent/runtime.py
Normal file
65
engine/devops_agent/runtime.py
Normal file
@@ -0,0 +1,65 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from engine.devops_agent.evidence import write_run_artifact
|
||||
from engine.devops_agent.policies import RuntimePolicy
|
||||
|
||||
|
||||
def run_issue_comment_workflow(
|
||||
*,
|
||||
lock: dict[str, Any],
|
||||
provider: Any,
|
||||
event_payload: dict[str, Any],
|
||||
output_dir: str | Path,
|
||||
) -> dict[str, Any]:
|
||||
event = provider.parse_issue_comment_event(event_payload)
|
||||
repo = str(event["repo"])
|
||||
issue_number = int(event["issue_number"])
|
||||
issue = provider.get_issue(repo, issue_number)
|
||||
|
||||
policy = RuntimePolicy(
|
||||
safe_outputs=lock.get("safe_outputs") or {},
|
||||
path_scope=lock.get("policy", {}).get("path_scope") or [],
|
||||
)
|
||||
policy.assert_operation_allowed("add_comment")
|
||||
|
||||
verification_summary = (
|
||||
f"Workflow `{lock['workflow_name']}` processed issue #{issue_number} "
|
||||
f"and prepared evidence for review."
|
||||
)
|
||||
comment_response = provider.post_issue_comment(
|
||||
repo,
|
||||
issue_number,
|
||||
verification_summary,
|
||||
)
|
||||
|
||||
artifact: dict[str, Any] = {
|
||||
"run_id": f"{lock['workflow_name']}-issue-{issue_number}",
|
||||
"workflow_name": lock["workflow_name"],
|
||||
"provider": lock["provider"],
|
||||
"event": event,
|
||||
"plan_state": {
|
||||
"status": "pending_review",
|
||||
"repo": repo,
|
||||
"issue_number": issue_number,
|
||||
"issue_title": issue.get("title", ""),
|
||||
},
|
||||
"operations": [
|
||||
{
|
||||
"action": "add_comment",
|
||||
"issue_number": issue_number,
|
||||
"repo": repo,
|
||||
}
|
||||
],
|
||||
"evidence": {
|
||||
"issue_comment": comment_response,
|
||||
"verification_summary": verification_summary,
|
||||
},
|
||||
"result": "success",
|
||||
}
|
||||
artifact_path = write_run_artifact(output_dir, artifact)
|
||||
artifact["artifact_path"] = str(artifact_path.as_posix())
|
||||
artifact_path.write_text(__import__("json").dumps(artifact, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||
return artifact
|
||||
60
engine/devops_agent/spec.py
Normal file
60
engine/devops_agent/spec.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class WorkflowSpecError(ValueError):
|
||||
"""Raised when a workflow spec cannot be parsed or is incomplete."""
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class WorkflowSpec:
|
||||
name: str
|
||||
provider: str
|
||||
frontmatter: dict[str, Any]
|
||||
body: str
|
||||
source_path: Path
|
||||
|
||||
|
||||
def _split_frontmatter(raw_text: str) -> tuple[str, str]:
|
||||
if not raw_text.startswith("---"):
|
||||
raise WorkflowSpecError("workflow spec must start with frontmatter")
|
||||
|
||||
parts = raw_text.split("\n---", 1)
|
||||
if len(parts) != 2:
|
||||
raise WorkflowSpecError("workflow spec frontmatter is not terminated")
|
||||
|
||||
frontmatter_text = parts[0][4:]
|
||||
body = parts[1].lstrip("\r\n")
|
||||
return frontmatter_text, body
|
||||
|
||||
|
||||
def load_workflow_spec(path: str | Path) -> WorkflowSpec:
|
||||
source_path = Path(path)
|
||||
raw_text = source_path.read_text(encoding="utf-8")
|
||||
frontmatter_text, body = _split_frontmatter(raw_text)
|
||||
|
||||
payload = yaml.safe_load(frontmatter_text) or {}
|
||||
if not isinstance(payload, dict):
|
||||
raise WorkflowSpecError("workflow spec frontmatter must be a mapping")
|
||||
if True in payload and "on" not in payload:
|
||||
payload["on"] = payload.pop(True)
|
||||
|
||||
name = str(payload.get("name") or "").strip()
|
||||
provider = str(payload.get("provider") or "").strip()
|
||||
if not name:
|
||||
raise WorkflowSpecError("workflow spec is missing required field: name")
|
||||
if not provider:
|
||||
raise WorkflowSpecError("workflow spec is missing required field: provider")
|
||||
|
||||
return WorkflowSpec(
|
||||
name=name,
|
||||
provider=provider,
|
||||
frontmatter=payload,
|
||||
body=body,
|
||||
source_path=source_path,
|
||||
)
|
||||
49
engine/devops_agent/validator.py
Normal file
49
engine/devops_agent/validator.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from engine.devops_agent.spec import WorkflowSpec
|
||||
|
||||
WRITE_PERMISSIONS = {"issues", "pull_requests", "contents"}
|
||||
|
||||
|
||||
def _is_write_permission(value: Any) -> bool:
|
||||
return str(value).strip().lower() == "write"
|
||||
|
||||
|
||||
def validate_workflow_spec(spec: WorkflowSpec) -> list[str]:
|
||||
errors: list[str] = []
|
||||
|
||||
if spec.provider not in {"gitea"}:
|
||||
errors.append(f"unsupported provider: {spec.provider}")
|
||||
|
||||
triggers = spec.frontmatter.get("on")
|
||||
if not isinstance(triggers, dict) or not triggers:
|
||||
errors.append("workflow spec must declare at least one trigger in 'on'")
|
||||
|
||||
permissions = spec.frontmatter.get("permissions") or {}
|
||||
safe_outputs = spec.frontmatter.get("safe_outputs") or {}
|
||||
|
||||
if not isinstance(permissions, dict):
|
||||
errors.append("'permissions' must be a mapping")
|
||||
if not isinstance(safe_outputs, dict):
|
||||
errors.append("'safe_outputs' must be a mapping")
|
||||
if isinstance(permissions, dict):
|
||||
has_write_permission = any(
|
||||
permission_name in WRITE_PERMISSIONS and _is_write_permission(permission_value)
|
||||
for permission_name, permission_value in permissions.items()
|
||||
)
|
||||
if has_write_permission and not safe_outputs:
|
||||
errors.append("write permissions require declared safe_outputs")
|
||||
|
||||
policy = spec.frontmatter.get("policy") or {}
|
||||
if policy and not isinstance(policy, dict):
|
||||
errors.append("'policy' must be a mapping")
|
||||
elif isinstance(policy, dict) and "path_scope" in policy:
|
||||
path_scope = policy["path_scope"]
|
||||
if not isinstance(path_scope, list) or any(
|
||||
not isinstance(item, str) or not item.strip() for item in path_scope
|
||||
):
|
||||
errors.append("policy.path_scope must be a list of non-empty path prefixes")
|
||||
|
||||
return errors
|
||||
Reference in New Issue
Block a user