feat: add repository mesh reconciliation round

This commit is contained in:
2026-04-30 10:50:07 -03:00
parent 3d2748adf5
commit b79fdce99d
113 changed files with 81555 additions and 22807 deletions

View File

@@ -8,6 +8,7 @@ from pathlib import Path
from .models import as_plain_data
from .matrix import build_global_recommendations, build_matrix, build_platform_reports
from .mcp_contract import build_mcp_contract_report, mcp_provider_compact_json, mcp_provider_payload
from .operational_dossier import build_execution_round_dossier
from .governance_engine import build_governance_portfolio, compact_governance_payload
from .human_rulebook import evaluate_rulebook, rulebook_compact_json
@@ -15,6 +16,20 @@ from .human_readiness_registry import build_readiness_registry
from .runtime_budget import build_round_line_budget
from .orders import build_exit_orders
from .reports import generate
from .repository_mesh import mesh_summary_payload, run_repository_mesh
from .repository_mesh_reconciliation import apply_reconciliation_to_report, reconciliation_payload
from .repository_mesh_runtime import (
acquire_lock,
build_runtime_cycle,
cron_scheduler_spec,
release_lock,
scheduler_payload,
windows_scheduler_spec,
write_runtime_artifacts,
)
from .repository_mesh_semantic import write_repository_mesh_semantic_state
from .repository_mesh_readiness import build_mesh_readiness_report, write_readiness_artifacts
from .repository_mesh_gitea import build_gitea_mesh_plan, write_gitea_plan_artifacts
from .scanner import environment_summary, scan_ecosystem
from .storage import table_counts
@@ -45,6 +60,16 @@ def build_parser() -> argparse.ArgumentParser:
rulebook = sub.add_parser("rulebook", help="Print compact human-operational rulebook JSON.")
rulebook.add_argument("--ecosystem-root", default="G:/_codex-git")
rulebook.add_argument("--limit", type=int, default=0)
mcp_provider = sub.add_parser("mcp-provider", help="Print the compact Mais Humana MCP provider payload.")
mcp_provider.add_argument("--ecosystem-root", default="G:/_codex-git")
mcp_provider.add_argument("--limit", type=int, default=80)
mcp_provider.add_argument("--envelope", action="store_true")
repo_mesh = sub.add_parser("repo-mesh", help="Inventory repository mirrors and write safe synchronization artifacts.")
repo_mesh.add_argument("--ecosystem-root", default="G:/_codex-git")
repo_mesh.add_argument("--project-root", default="G:/_codex-git/tudo-para-ia-mais-humana-plataform")
repo_mesh.add_argument("--central-platform-folder", default="")
repo_mesh.add_argument("--fetch", action="store_true")
repo_mesh.add_argument("--plugin-auth-attempt", default="")
return parser
@@ -146,6 +171,137 @@ def command_rulebook(args: argparse.Namespace) -> int:
return 0
def command_mcp_provider(args: argparse.Namespace) -> int:
scans = scan_ecosystem(Path(args.ecosystem_root))
cells = build_matrix(scans)
reports = build_platform_reports(scans, cells)
rulebook = evaluate_rulebook(reports)
contracts = build_mcp_contract_report(rulebook)
payload = (
mcp_provider_payload(contracts, limit=args.limit)
if args.envelope
else mcp_provider_compact_json(contracts, limit=args.limit)
)
print(json.dumps(payload, ensure_ascii=False, indent=2))
return 0
def command_repo_mesh(args: argparse.Namespace) -> int:
central = Path(args.central_platform_folder) if args.central_platform_folder else None
central_write_error = ""
central_for_write = central
if central_for_write is not None:
try:
for folder_name in ("reports", "indexes", "audit", "status"):
(central_for_write / folder_name).mkdir(parents=True, exist_ok=True)
except OSError as exc:
central_write_error = f"{type(exc).__name__}: {exc}"
central_for_write = None
report, records = run_repository_mesh(
ecosystem_root=Path(args.ecosystem_root),
project_root=Path(args.project_root),
central_platform_folder=central_for_write,
fetch=bool(args.fetch),
plugin_auth_attempt=args.plugin_auth_attempt,
)
plan, reconciliation_records = apply_reconciliation_to_report(
report,
Path(args.project_root),
central_platform_folder=central_for_write,
)
project_root = Path(args.project_root)
lock = acquire_lock(project_root / "dados" / "repository-mesh.lock.json", owner="mais_humana.cli.repo-mesh")
cycle = build_runtime_cycle(report, plan, lock=lock, execute=False)
specs = (
windows_scheduler_spec(
python_exe="C:\\Users\\Ami\\.cache\\codex-runtimes\\codex-primary-runtime\\dependencies\\python\\python.exe",
project_root=project_root,
ecosystem_root=Path(args.ecosystem_root),
central_platform_folder=central_for_write,
),
cron_scheduler_spec(
python_exe="python",
project_root=project_root,
ecosystem_root=Path(args.ecosystem_root),
central_platform_folder=central_for_write,
),
)
runtime_records = write_runtime_artifacts(cycle, specs, project_root, central_platform_folder=central_for_write)
semantic_write_error = ""
semantic_path_used = ""
if central is not None:
try:
central_semantic_path = central / "controle-semantico.sqlite"
semantic_counts = write_repository_mesh_semantic_state(
central_semantic_path,
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(central_semantic_path)
except Exception as exc:
semantic_write_error = f"{type(exc).__name__}: {exc}"
semantic_counts = write_repository_mesh_semantic_state(
project_root / "controle-semantico.sqlite",
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(project_root / "controle-semantico.sqlite")
else:
semantic_counts = write_repository_mesh_semantic_state(
project_root / "controle-semantico.sqlite",
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(project_root / "controle-semantico.sqlite")
readiness = build_mesh_readiness_report(report, plan, cycle, specs, semantic_counts)
readiness_records = write_readiness_artifacts(readiness, project_root, central_platform_folder=central_for_write)
gitea_plan = build_gitea_mesh_plan(report)
gitea_records = write_gitea_plan_artifacts(gitea_plan, project_root, central_platform_folder=central_for_write)
release_lock(lock)
payload = mesh_summary_payload(report)
payload["reconciliation"] = reconciliation_payload(plan)
payload["runtime"] = {
"cycleId": cycle.cycle_id,
"allowed": cycle.allowed_count,
"blocked": cycle.blocked_count,
"skipped": cycle.skipped_count,
"schedulers": scheduler_payload(specs),
}
payload["readiness"] = readiness.to_dict()
payload["gitea"] = gitea_plan.to_dict()
payload["centralWrite"] = {
"requested": str(central) if central is not None else "",
"used": str(central_for_write) if central_for_write is not None else "",
"error": central_write_error,
"semanticPath": semantic_path_used,
"semanticError": semantic_write_error,
}
payload["generatedFiles"] = [
record.path
for record in tuple(records)
+ tuple(reconciliation_records)
+ tuple(runtime_records)
+ tuple(readiness_records)
+ tuple(gitea_records)
]
if central_write_error:
status_path = project_root / "dados" / "repository-mesh-central-write-status.json"
status_path.parent.mkdir(parents=True, exist_ok=True)
status_path.write_text(
json.dumps(payload["centralWrite"], ensure_ascii=False, indent=2, sort_keys=True),
encoding="utf-8",
)
payload["generatedFiles"].append(str(status_path))
print(json.dumps(payload, ensure_ascii=False, indent=2))
return 0
def main(argv: list[str] | None = None) -> int:
parser = build_parser()
args = parser.parse_args(argv)
@@ -165,6 +321,10 @@ def main(argv: list[str] | None = None) -> int:
return command_line_budget(args)
if args.command == "rulebook":
return command_rulebook(args)
if args.command == "mcp-provider":
return command_mcp_provider(args)
if args.command == "repo-mesh":
return command_repo_mesh(args)
parser.error(f"unknown command: {args.command}")
return 2