feat: add repository mesh reconciliation round

This commit is contained in:
2026-04-30 10:50:07 -03:00
parent 3d2748adf5
commit b79fdce99d
113 changed files with 81555 additions and 22807 deletions

View File

@@ -8,6 +8,7 @@ from pathlib import Path
from .models import as_plain_data
from .matrix import build_global_recommendations, build_matrix, build_platform_reports
from .mcp_contract import build_mcp_contract_report, mcp_provider_compact_json, mcp_provider_payload
from .operational_dossier import build_execution_round_dossier
from .governance_engine import build_governance_portfolio, compact_governance_payload
from .human_rulebook import evaluate_rulebook, rulebook_compact_json
@@ -15,6 +16,20 @@ from .human_readiness_registry import build_readiness_registry
from .runtime_budget import build_round_line_budget
from .orders import build_exit_orders
from .reports import generate
from .repository_mesh import mesh_summary_payload, run_repository_mesh
from .repository_mesh_reconciliation import apply_reconciliation_to_report, reconciliation_payload
from .repository_mesh_runtime import (
acquire_lock,
build_runtime_cycle,
cron_scheduler_spec,
release_lock,
scheduler_payload,
windows_scheduler_spec,
write_runtime_artifacts,
)
from .repository_mesh_semantic import write_repository_mesh_semantic_state
from .repository_mesh_readiness import build_mesh_readiness_report, write_readiness_artifacts
from .repository_mesh_gitea import build_gitea_mesh_plan, write_gitea_plan_artifacts
from .scanner import environment_summary, scan_ecosystem
from .storage import table_counts
@@ -45,6 +60,16 @@ def build_parser() -> argparse.ArgumentParser:
rulebook = sub.add_parser("rulebook", help="Print compact human-operational rulebook JSON.")
rulebook.add_argument("--ecosystem-root", default="G:/_codex-git")
rulebook.add_argument("--limit", type=int, default=0)
mcp_provider = sub.add_parser("mcp-provider", help="Print the compact Mais Humana MCP provider payload.")
mcp_provider.add_argument("--ecosystem-root", default="G:/_codex-git")
mcp_provider.add_argument("--limit", type=int, default=80)
mcp_provider.add_argument("--envelope", action="store_true")
repo_mesh = sub.add_parser("repo-mesh", help="Inventory repository mirrors and write safe synchronization artifacts.")
repo_mesh.add_argument("--ecosystem-root", default="G:/_codex-git")
repo_mesh.add_argument("--project-root", default="G:/_codex-git/tudo-para-ia-mais-humana-plataform")
repo_mesh.add_argument("--central-platform-folder", default="")
repo_mesh.add_argument("--fetch", action="store_true")
repo_mesh.add_argument("--plugin-auth-attempt", default="")
return parser
@@ -146,6 +171,137 @@ def command_rulebook(args: argparse.Namespace) -> int:
return 0
def command_mcp_provider(args: argparse.Namespace) -> int:
scans = scan_ecosystem(Path(args.ecosystem_root))
cells = build_matrix(scans)
reports = build_platform_reports(scans, cells)
rulebook = evaluate_rulebook(reports)
contracts = build_mcp_contract_report(rulebook)
payload = (
mcp_provider_payload(contracts, limit=args.limit)
if args.envelope
else mcp_provider_compact_json(contracts, limit=args.limit)
)
print(json.dumps(payload, ensure_ascii=False, indent=2))
return 0
def command_repo_mesh(args: argparse.Namespace) -> int:
central = Path(args.central_platform_folder) if args.central_platform_folder else None
central_write_error = ""
central_for_write = central
if central_for_write is not None:
try:
for folder_name in ("reports", "indexes", "audit", "status"):
(central_for_write / folder_name).mkdir(parents=True, exist_ok=True)
except OSError as exc:
central_write_error = f"{type(exc).__name__}: {exc}"
central_for_write = None
report, records = run_repository_mesh(
ecosystem_root=Path(args.ecosystem_root),
project_root=Path(args.project_root),
central_platform_folder=central_for_write,
fetch=bool(args.fetch),
plugin_auth_attempt=args.plugin_auth_attempt,
)
plan, reconciliation_records = apply_reconciliation_to_report(
report,
Path(args.project_root),
central_platform_folder=central_for_write,
)
project_root = Path(args.project_root)
lock = acquire_lock(project_root / "dados" / "repository-mesh.lock.json", owner="mais_humana.cli.repo-mesh")
cycle = build_runtime_cycle(report, plan, lock=lock, execute=False)
specs = (
windows_scheduler_spec(
python_exe="C:\\Users\\Ami\\.cache\\codex-runtimes\\codex-primary-runtime\\dependencies\\python\\python.exe",
project_root=project_root,
ecosystem_root=Path(args.ecosystem_root),
central_platform_folder=central_for_write,
),
cron_scheduler_spec(
python_exe="python",
project_root=project_root,
ecosystem_root=Path(args.ecosystem_root),
central_platform_folder=central_for_write,
),
)
runtime_records = write_runtime_artifacts(cycle, specs, project_root, central_platform_folder=central_for_write)
semantic_write_error = ""
semantic_path_used = ""
if central is not None:
try:
central_semantic_path = central / "controle-semantico.sqlite"
semantic_counts = write_repository_mesh_semantic_state(
central_semantic_path,
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(central_semantic_path)
except Exception as exc:
semantic_write_error = f"{type(exc).__name__}: {exc}"
semantic_counts = write_repository_mesh_semantic_state(
project_root / "controle-semantico.sqlite",
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(project_root / "controle-semantico.sqlite")
else:
semantic_counts = write_repository_mesh_semantic_state(
project_root / "controle-semantico.sqlite",
report=report,
plan=plan,
cycle=cycle,
schedulers=specs,
)
semantic_path_used = str(project_root / "controle-semantico.sqlite")
readiness = build_mesh_readiness_report(report, plan, cycle, specs, semantic_counts)
readiness_records = write_readiness_artifacts(readiness, project_root, central_platform_folder=central_for_write)
gitea_plan = build_gitea_mesh_plan(report)
gitea_records = write_gitea_plan_artifacts(gitea_plan, project_root, central_platform_folder=central_for_write)
release_lock(lock)
payload = mesh_summary_payload(report)
payload["reconciliation"] = reconciliation_payload(plan)
payload["runtime"] = {
"cycleId": cycle.cycle_id,
"allowed": cycle.allowed_count,
"blocked": cycle.blocked_count,
"skipped": cycle.skipped_count,
"schedulers": scheduler_payload(specs),
}
payload["readiness"] = readiness.to_dict()
payload["gitea"] = gitea_plan.to_dict()
payload["centralWrite"] = {
"requested": str(central) if central is not None else "",
"used": str(central_for_write) if central_for_write is not None else "",
"error": central_write_error,
"semanticPath": semantic_path_used,
"semanticError": semantic_write_error,
}
payload["generatedFiles"] = [
record.path
for record in tuple(records)
+ tuple(reconciliation_records)
+ tuple(runtime_records)
+ tuple(readiness_records)
+ tuple(gitea_records)
]
if central_write_error:
status_path = project_root / "dados" / "repository-mesh-central-write-status.json"
status_path.parent.mkdir(parents=True, exist_ok=True)
status_path.write_text(
json.dumps(payload["centralWrite"], ensure_ascii=False, indent=2, sort_keys=True),
encoding="utf-8",
)
payload["generatedFiles"].append(str(status_path))
print(json.dumps(payload, ensure_ascii=False, indent=2))
return 0
def main(argv: list[str] | None = None) -> int:
parser = build_parser()
args = parser.parse_args(argv)
@@ -165,6 +321,10 @@ def main(argv: list[str] | None = None) -> int:
return command_line_budget(args)
if args.command == "rulebook":
return command_rulebook(args)
if args.command == "mcp-provider":
return command_mcp_provider(args)
if args.command == "repo-mesh":
return command_repo_mesh(args)
parser.error(f"unknown command: {args.command}")
return 2

View File

@@ -8,6 +8,7 @@ from typing import Iterable, Sequence
from .exit_order_compiler import CompiledOrderSet
from .governance_models import EcosystemGovernancePortfolio
from .human_readiness_registry import ReadinessRegistry
from .mcp_contract import McpContractReport
from .models import as_plain_data, merge_unique, slugify
from .workflow_registry import WorkflowPortfolio
@@ -150,6 +151,42 @@ def order_nodes_edges(compiled: CompiledOrderSet | None, nodes: dict[str, Eviden
add_edge(edges, node_id("platform", platform_hint), order_node, f"creates_{order.order_type.value}", 50, order.reason)
def mcp_contract_nodes_edges(report: McpContractReport | None, nodes: dict[str, EvidenceNode], edges: list[EvidenceEdge]) -> None:
if report is None:
return
provider_node = node_id("mcp-provider", report.provider_id)
add_node(
nodes,
EvidenceNode(
node_id=provider_node,
label=report.provider_name,
node_type="mcp_provider",
status="published_contract",
weight=report.average_score,
),
)
control_plane_node = node_id("platform", "mcps")
add_edge(edges, control_plane_node, provider_node, "discovers_provider_contract", report.average_score, report.provider_envelope.tool)
for item in report.coverage:
if item.kind.value not in {"provider_tool", "ui_screen", "redaction_policy", "docs_exception", "canonical_rename"}:
continue
contract_node = node_id("mcp-contract", item.contract_id)
add_node(
nodes,
EvidenceNode(
node_id=contract_node,
label=item.contract_id,
node_type=f"mcp_{item.kind.value}",
status=item.status.value,
weight=item.score,
),
)
add_edge(edges, provider_node, contract_node, "publishes_contract", item.score, item.next_action)
add_edge(edges, contract_node, node_id("platform", item.platform_id), "governs_platform", item.score, item.truth_state.value)
if item.same_source:
add_edge(edges, contract_node, control_plane_node, "same_source_ready", item.score, item.source_payload_hash[:16])
def relation_edges(portfolio: EcosystemGovernancePortfolio, edges: list[EvidenceEdge]) -> None:
for source, target, relation in portfolio.relation_matrix:
add_edge(edges, node_id("platform", source), node_id("platform", target), relation, 40, "relacao declarada ou inferida pelo dominio")
@@ -173,6 +210,7 @@ def build_evidence_graph(
registry: ReadinessRegistry,
workflows: WorkflowPortfolio,
compiled_orders: CompiledOrderSet | None = None,
mcp_contract_report: McpContractReport | None = None,
) -> EvidenceGraph:
nodes: dict[str, EvidenceNode] = {}
edges: list[EvidenceEdge] = []
@@ -181,6 +219,7 @@ def build_evidence_graph(
registry_nodes_edges(registry, nodes, edges)
workflow_nodes_edges(workflows, nodes, edges)
order_nodes_edges(compiled_orders, nodes, edges)
mcp_contract_nodes_edges(mcp_contract_report, nodes, edges)
relation_edges(portfolio, edges)
final_edges = dedupe_edges(edges)
summary = (

File diff suppressed because it is too large Load Diff

View File

@@ -9,6 +9,7 @@ from typing import Iterable
from .governance_models import EcosystemGovernancePortfolio, PlatformGovernanceCard, GovernanceCheckResult
from .human_readiness_registry import ReadinessRegistry, ReadinessRegistryEntry
from .mcp_contract import McpContractCoverage, McpContractReport
from .models import as_plain_data, utc_now
from .round_assurance import AssuranceSuite, AssuranceCase
from .runtime_budget import RoundLineBudget, RepositoryLineBudget
@@ -116,6 +117,25 @@ CREATE TABLE IF NOT EXISTS line_budgets (
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS mcp_contracts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
contract_id TEXT UNIQUE NOT NULL,
kind TEXT NOT NULL,
platform_id TEXT NOT NULL,
profile_id TEXT NOT NULL,
tool_id TEXT NOT NULL,
status TEXT NOT NULL,
truth_state TEXT NOT NULL,
score INTEGER NOT NULL,
same_source INTEGER NOT NULL,
source_payload_hash TEXT NOT NULL,
source_records_hash TEXT NOT NULL,
blocker_count INTEGER NOT NULL,
next_action TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
"""
@@ -335,6 +355,50 @@ def upsert_line_budget(conn: sqlite3.Connection, repo: RepositoryLineBudget, now
)
def upsert_mcp_contract(conn: sqlite3.Connection, coverage: McpContractCoverage, now: str) -> None:
conn.execute(
"""
INSERT INTO mcp_contracts (
contract_id, kind, platform_id, profile_id, tool_id, status, truth_state,
score, same_source, source_payload_hash, source_records_hash, blocker_count,
next_action, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(contract_id) DO UPDATE SET
kind=excluded.kind,
platform_id=excluded.platform_id,
profile_id=excluded.profile_id,
tool_id=excluded.tool_id,
status=excluded.status,
truth_state=excluded.truth_state,
score=excluded.score,
same_source=excluded.same_source,
source_payload_hash=excluded.source_payload_hash,
source_records_hash=excluded.source_records_hash,
blocker_count=excluded.blocker_count,
next_action=excluded.next_action,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
coverage.contract_id,
coverage.kind.value,
coverage.platform_id,
coverage.profile_id,
coverage.tool_id,
coverage.status.value,
coverage.truth_state.value,
coverage.score,
1 if coverage.same_source else 0,
coverage.source_payload_hash,
coverage.source_records_hash,
len(coverage.blockers),
coverage.next_action,
payload(coverage),
now,
),
)
def write_governance_semantic_state(
sqlite_path: Path,
portfolio: EcosystemGovernancePortfolio,
@@ -344,6 +408,7 @@ def write_governance_semantic_state(
assurance: AssuranceSuite | None = None,
lifecycle: RoundExecutionPackage | None = None,
budget: RoundLineBudget | None = None,
mcp_contract_report: McpContractReport | None = None,
) -> None:
sqlite_path.parent.mkdir(parents=True, exist_ok=True)
now = utc_now()
@@ -368,6 +433,9 @@ def write_governance_semantic_state(
if budget is not None:
for repo in budget.repositories:
upsert_line_budget(conn, repo, now)
if mcp_contract_report is not None:
for coverage in mcp_contract_report.coverage:
upsert_mcp_contract(conn, coverage, now)
conn.commit()
@@ -381,6 +449,7 @@ def governance_table_counts(sqlite_path: Path) -> dict[str, int]:
"assurance_cases",
"lifecycle_decisions",
"line_budgets",
"mcp_contracts",
)
if not sqlite_path.exists():
return {table: 0 for table in tables}

View File

@@ -0,0 +1,793 @@
"""MCP provider contract for the Mais Humana platform.
The rulebook explains what humans need. This module turns that explanation
into a provider-shaped contract that the MCP control plane can discover, audit,
and compare with a future UI renderer without reading repository files
directly. The runtime is intentionally dependency-free and deterministic so
the same payload can be used by GPT, the MCP gateway, central reports, SQLite,
and tests.
"""
from __future__ import annotations
import csv
import hashlib
import io
import json
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Any, Iterable, Mapping, Sequence
from .human_rulebook import (
CANONICAL_PROJECT_ID,
CURRENT_PROJECT_ID,
MCP_CONTROL_PLANE_ID,
MCP_TRANSIT_FIELDS,
UI_SUPPORT_PLATFORM_ID,
RulebookReport,
TruthState,
)
from .models import as_plain_data, merge_unique, slugify, utc_now
PROVIDER_ID = "mais_humana"
PROVIDER_NAME = "Tudo Para IA Mais Humana"
PROVIDER_TOOL_ID = "mais_humana.rulebook.compact"
PROVIDER_REPORT_TOOL_ID = "mais_humana.relatorios.modelos_oficiais"
PROVIDER_UI_TOOL_ID = "mais_humana.admin_ui.same_source"
MCP_EXECUTE_ENDPOINT = "https://mcps-gateway.ami-app.workers.dev/v1/execute"
class McpContractKind(str, Enum):
"""Kinds of MCP-facing contracts emitted by Mais Humana."""
PROVIDER_TOOL = "provider_tool"
UI_SCREEN = "ui_screen"
REPORT_MODEL = "report_model"
TRANSIT_POLICY = "transit_policy"
REDACTION_POLICY = "redaction_policy"
DOCS_EXCEPTION = "docs_exception"
CANONICAL_RENAME = "canonical_rename"
class McpContractStatus(str, Enum):
"""Evaluation status for one contract."""
READY = "ready"
PARTIAL = "partial"
BLOCKED = "blocked"
FORMAL_EXCEPTION = "formal_exception"
EXTERNAL_DECISION = "external_decision"
class AudienceClass(str, Enum):
"""Audience family for official human reports."""
ADMINISTRATOR = "administrator"
EXECUTIVE = "executive"
OPERATOR = "operator"
SUPPORT = "support"
CUSTOMER = "customer"
TECHNICAL = "technical"
LEGAL = "legal"
FINANCE = "finance"
USER = "user"
@dataclass(frozen=True, slots=True)
class McpControlContract:
"""One MCP-visible contract for a platform/profile/surface/report."""
contract_id: str
kind: McpContractKind
platform_id: str
profile_id: str
tool_id: str
title: str
purpose: str
source_endpoint: str
source_tool_id: str
required_transit_fields: tuple[str, ...]
required_payload_fields: tuple[str, ...]
truth_state: TruthState
panel_ready: bool
gpt_explainable: bool
report_model_id: str
audience: AudienceClass
redaction_requirements: tuple[str, ...]
validation_steps: tuple[str, ...]
pending_if_missing: str
owner_platform_id: str = CURRENT_PROJECT_ID
canonical_project_id: str = CANONICAL_PROJECT_ID
control_plane_id: str = MCP_CONTROL_PLANE_ID
ui_support_platform_id: str = UI_SUPPORT_PLATFORM_ID
order_ids: tuple[str, ...] = ()
policy_tags: tuple[str, ...] = ()
maturity_level: int = 5
generated_from: str = "mcp_contract_catalog"
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@property
def source_hash_seed(self) -> dict[str, Any]:
return {
"contractId": self.contract_id,
"kind": self.kind.value,
"platformId": self.platform_id,
"profileId": self.profile_id,
"toolId": self.tool_id,
"sourceEndpoint": self.source_endpoint,
"sourceToolId": self.source_tool_id,
"truthState": self.truth_state.value,
"reportModelId": self.report_model_id,
"requiredPayloadFields": list(self.required_payload_fields),
"requiredTransitFields": list(self.required_transit_fields),
}
@property
def source_payload_hash(self) -> str:
return stable_hash(self.source_hash_seed)
@property
def source_records_hash(self) -> str:
return stable_hash(
{
"contractId": self.contract_id,
"redactionRequirements": list(self.redaction_requirements),
"validationSteps": list(self.validation_steps),
"policyTags": list(self.policy_tags),
"maturityLevel": self.maturity_level,
"pendingIfMissing": self.pending_if_missing,
}
)
@property
def same_source_ready(self) -> bool:
return self.panel_ready and self.gpt_explainable and bool(self.source_payload_hash) and bool(self.source_records_hash)
def as_provider_record(self) -> dict[str, Any]:
return {
"contractId": self.contract_id,
"kind": self.kind.value,
"platformId": self.platform_id,
"profileId": self.profile_id,
"toolId": self.tool_id,
"sourceEndpoint": self.source_endpoint,
"sourceToolId": self.source_tool_id,
"sourcePayloadHash": self.source_payload_hash,
"sourceRecordsHash": self.source_records_hash,
"truthState": self.truth_state.value,
"panelReady": self.panel_ready,
"gptExplainable": self.gpt_explainable,
"sameSource": self.same_source_ready,
"reportModelId": self.report_model_id,
"audience": self.audience.value,
"humanNextAction": self.pending_if_missing,
}
@dataclass(frozen=True, slots=True)
class McpContractCoverage:
"""Evaluation of a contract as it will be exposed to the MCP."""
contract_id: str
kind: McpContractKind
platform_id: str
profile_id: str
tool_id: str
status: McpContractStatus
truth_state: TruthState
score: int
source_endpoint: str
source_tool_id: str
source_payload_hash: str
source_records_hash: str
same_source: bool
panel_ready: bool
gpt_explainable: bool
missing_transit_fields: tuple[str, ...]
missing_payload_fields: tuple[str, ...]
blockers: tuple[str, ...]
next_action: str
order_ids: tuple[str, ...]
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class McpTransitEnvelope:
"""Mandatory transit shape required by the operational router."""
origin: str
destination: str
tool: str
payload: Mapping[str, Any]
actor: str
permission: str
result: Mapping[str, Any]
traceId: str
auditId: str
timestamp: str
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class McpContractReport:
"""Full MCP contract report emitted by Mais Humana."""
provider_id: str
provider_name: str
canonical_project_id: str
current_project_id: str
control_plane_id: str
generated_at: str
contracts_count: int
coverage: tuple[McpContractCoverage, ...]
provider_envelope: McpTransitEnvelope
summary: tuple[str, ...]
blockers: tuple[str, ...]
official_report_models: tuple[str, ...]
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@property
def average_score(self) -> int:
if not self.coverage:
return 0
return round(sum(item.score for item in self.coverage) / len(self.coverage))
@property
def ready_count(self) -> int:
return sum(1 for item in self.coverage if item.status == McpContractStatus.READY)
@property
def blocked_count(self) -> int:
return sum(1 for item in self.coverage if item.status == McpContractStatus.BLOCKED)
@property
def same_source_count(self) -> int:
return sum(1 for item in self.coverage if item.same_source)
def stable_hash(value: object) -> str:
text = json.dumps(as_plain_data(value), ensure_ascii=False, sort_keys=True, separators=(",", ":"))
return hashlib.sha256(text.encode("utf-8")).hexdigest()
def _generated_contracts() -> tuple[McpControlContract, ...]:
from .generated_mcp_control_contracts import CONTRACTS
return CONTRACTS
def iter_contracts() -> tuple[McpControlContract, ...]:
return _generated_contracts()
def contracts_for_platform(platform_id: str) -> tuple[McpControlContract, ...]:
return tuple(contract for contract in iter_contracts() if contract.platform_id == platform_id)
def contracts_for_kind(kind: McpContractKind) -> tuple[McpControlContract, ...]:
return tuple(contract for contract in iter_contracts() if contract.kind == kind)
def contracts_for_profile(profile_id: str) -> tuple[McpControlContract, ...]:
return tuple(contract for contract in iter_contracts() if contract.profile_id == profile_id)
def _rulebook_platform_truth(rulebook: RulebookReport | None) -> dict[str, TruthState]:
if rulebook is None:
return {}
by_platform: dict[str, list[TruthState]] = {}
for item in rulebook.coverage:
by_platform.setdefault(item.platform_id, []).append(item.truth_state)
resolved: dict[str, TruthState] = {}
priority = [
TruthState.BLOCKED,
TruthState.CATALOG_ONLY,
TruthState.FORMAL_EXCEPTION,
TruthState.LIVE_WRITE,
TruthState.LIVE_READONLY,
TruthState.SAME_SOURCE_READY,
TruthState.RESPONSE_READY,
TruthState.DERIVED,
TruthState.DOCUMENTED,
TruthState.UNKNOWN,
]
for platform_id, states in by_platform.items():
for state in priority:
if state in states:
resolved[platform_id] = state
break
return resolved
def _status_for(contract: McpControlContract, truth_state: TruthState, blockers: Sequence[str]) -> McpContractStatus:
if contract.kind == McpContractKind.CANONICAL_RENAME:
return McpContractStatus.EXTERNAL_DECISION
if truth_state == TruthState.FORMAL_EXCEPTION:
return McpContractStatus.FORMAL_EXCEPTION
if truth_state in {TruthState.BLOCKED, TruthState.CATALOG_ONLY, TruthState.UNKNOWN}:
return McpContractStatus.BLOCKED if blockers else McpContractStatus.PARTIAL
if blockers:
return McpContractStatus.PARTIAL
if contract.same_source_ready:
return McpContractStatus.READY
return McpContractStatus.PARTIAL
def _score_for(contract: McpControlContract, status: McpContractStatus, truth_state: TruthState, blockers: Sequence[str]) -> int:
status_score = {
McpContractStatus.READY: 92,
McpContractStatus.FORMAL_EXCEPTION: 76,
McpContractStatus.EXTERNAL_DECISION: 64,
McpContractStatus.PARTIAL: 55,
McpContractStatus.BLOCKED: 12,
}[status]
truth_bonus = {
TruthState.LIVE_WRITE: 8,
TruthState.LIVE_READONLY: 7,
TruthState.SAME_SOURCE_READY: 6,
TruthState.RESPONSE_READY: 5,
TruthState.FORMAL_EXCEPTION: 3,
TruthState.DERIVED: 2,
TruthState.DOCUMENTED: 1,
TruthState.CATALOG_ONLY: -8,
TruthState.BLOCKED: -12,
TruthState.UNKNOWN: -16,
}[truth_state]
maturity_bonus = max(0, min(10, contract.maturity_level))
same_source_bonus = 6 if contract.same_source_ready else 0
blocker_penalty = min(24, len(blockers) * 6)
return max(0, min(100, status_score + truth_bonus + maturity_bonus + same_source_bonus - blocker_penalty))
def evaluate_contract(contract: McpControlContract, rulebook: RulebookReport | None = None) -> McpContractCoverage:
platform_truth = _rulebook_platform_truth(rulebook)
truth_state = platform_truth.get(contract.platform_id, contract.truth_state)
missing_transit = tuple(field for field in MCP_TRANSIT_FIELDS if field not in contract.required_transit_fields)
mandatory_payload = (
"sourceEndpoint",
"sourceToolId",
"sourcePayloadHash",
"sourceRecordsHash",
"truthState",
"panelReady",
"gptExplainable",
"humanNextAction",
)
missing_payload = tuple(field for field in mandatory_payload if field not in contract.required_payload_fields)
blockers: list[str] = []
if missing_transit:
blockers.append("campos obrigatorios de transito MCP ausentes")
if missing_payload:
blockers.append("payload sem campos de same-source exigidos")
if contract.kind == McpContractKind.DOCS_EXCEPTION and truth_state == TruthState.CATALOG_ONLY:
truth_state = TruthState.FORMAL_EXCEPTION
if truth_state == TruthState.BLOCKED:
blockers.append("truthState bloqueado")
if truth_state == TruthState.CATALOG_ONLY and contract.kind != McpContractKind.DOCS_EXCEPTION:
blockers.append("catalogOnly ainda nao formalizado como excecao")
if contract.kind == McpContractKind.CANONICAL_RENAME:
blockers.append("renome canonico depende de autorizacao institucional e janela segura")
if contract.kind == McpContractKind.REDACTION_POLICY and "cfat" not in " ".join(contract.redaction_requirements).lower():
blockers.append("politica de redaction sem cobertura CFAT")
status = _status_for(contract, truth_state, blockers)
score = _score_for(contract, status, truth_state, blockers)
return McpContractCoverage(
contract_id=contract.contract_id,
kind=contract.kind,
platform_id=contract.platform_id,
profile_id=contract.profile_id,
tool_id=contract.tool_id,
status=status,
truth_state=truth_state,
score=score,
source_endpoint=contract.source_endpoint,
source_tool_id=contract.source_tool_id,
source_payload_hash=contract.source_payload_hash,
source_records_hash=contract.source_records_hash,
same_source=contract.same_source_ready and status in {McpContractStatus.READY, McpContractStatus.FORMAL_EXCEPTION},
panel_ready=contract.panel_ready,
gpt_explainable=contract.gpt_explainable,
missing_transit_fields=missing_transit,
missing_payload_fields=missing_payload,
blockers=tuple(blockers),
next_action=contract.pending_if_missing,
order_ids=contract.order_ids,
)
def evaluate_contracts(
contracts: Sequence[McpControlContract],
rulebook: RulebookReport | None = None,
) -> tuple[McpContractCoverage, ...]:
return tuple(evaluate_contract(contract, rulebook) for contract in contracts)
def _provider_payload(coverage: Sequence[McpContractCoverage], *, limit: int = 80) -> dict[str, Any]:
ordered = sorted(coverage, key=lambda item: (item.status.value, item.platform_id, item.kind.value, item.contract_id))
compact = [
{
"contractId": item.contract_id,
"platformId": item.platform_id,
"profileId": item.profile_id,
"kind": item.kind.value,
"status": item.status.value,
"truthState": item.truth_state.value,
"sourceEndpoint": item.source_endpoint,
"sourceToolId": item.source_tool_id,
"sourcePayloadHash": item.source_payload_hash,
"sourceRecordsHash": item.source_records_hash,
"sameSource": item.same_source,
"panelReady": item.panel_ready,
"gptExplainable": item.gpt_explainable,
"humanNextAction": item.next_action,
}
for item in ordered[:limit]
]
return {
"providerId": PROVIDER_ID,
"canonicalProjectId": CANONICAL_PROJECT_ID,
"currentProjectId": CURRENT_PROJECT_ID,
"controlPlaneId": MCP_CONTROL_PLANE_ID,
"toolId": PROVIDER_TOOL_ID,
"contractsCount": len(coverage),
"returnedContracts": len(compact),
"averageScore": round(sum(item.score for item in coverage) / len(coverage)) if coverage else 0,
"readyCount": sum(1 for item in coverage if item.status == McpContractStatus.READY),
"blockedCount": sum(1 for item in coverage if item.status == McpContractStatus.BLOCKED),
"sameSourceReadyCount": sum(1 for item in coverage if item.same_source),
"contracts": compact,
}
def build_transit_envelope(
payload: Mapping[str, Any],
*,
actor: str = "codex.service-order-round",
permission: str = "mcp.admin.readonly",
tool: str = PROVIDER_TOOL_ID,
) -> McpTransitEnvelope:
trace_seed = stable_hash({"payload": payload, "actor": actor, "permission": permission, "tool": tool})
return McpTransitEnvelope(
origin=CURRENT_PROJECT_ID,
destination=MCP_CONTROL_PLANE_ID,
tool=tool,
payload=payload,
actor=actor,
permission=permission,
result={
"ok": True,
"status": "provider_payload_ready",
"credentialPolicy": "no raw token, only credentialRef/tokenRef/secretRef",
},
traceId=f"trace-{trace_seed[:24]}",
auditId=f"audit-{trace_seed[24:48]}",
timestamp=utc_now(),
)
def build_mcp_contract_report(
rulebook: RulebookReport | None = None,
*,
limit: int | None = None,
) -> McpContractReport:
contracts = iter_contracts()
selected = contracts if limit is None else contracts[:limit]
coverage = evaluate_contracts(selected, rulebook)
payload = _provider_payload(coverage, limit=80)
envelope = build_transit_envelope(payload)
official_models = tuple(
contract.report_model_id
for contract in selected
if contract.kind == McpContractKind.REPORT_MODEL
)
blockers = merge_unique(
blocker
for item in coverage
for blocker in item.blockers
if item.status in {McpContractStatus.BLOCKED, McpContractStatus.EXTERNAL_DECISION, McpContractStatus.PARTIAL}
)
summary = (
f"Contratos avaliados: {len(coverage)}",
f"Contratos prontos: {sum(1 for item in coverage if item.status == McpContractStatus.READY)}",
f"Same-source prontos: {sum(1 for item in coverage if item.same_source)}",
f"Bloqueados: {sum(1 for item in coverage if item.status == McpContractStatus.BLOCKED)}",
f"Provider MCP minimo: {PROVIDER_TOOL_ID}",
f"Endpoint de execucao MCP: {MCP_EXECUTE_ENDPOINT}",
f"Projeto canonico recomendado: {CANONICAL_PROJECT_ID}",
)
return McpContractReport(
provider_id=PROVIDER_ID,
provider_name=PROVIDER_NAME,
canonical_project_id=CANONICAL_PROJECT_ID,
current_project_id=CURRENT_PROJECT_ID,
control_plane_id=MCP_CONTROL_PLANE_ID,
generated_at=utc_now(),
contracts_count=len(contracts),
coverage=coverage,
provider_envelope=envelope,
summary=summary,
blockers=blockers[:40],
official_report_models=merge_unique(official_models),
)
def mcp_provider_payload(report: McpContractReport, *, limit: int = 80) -> dict[str, Any]:
payload = _provider_payload(report.coverage, limit=limit)
envelope = build_transit_envelope(payload, tool=PROVIDER_TOOL_ID)
return envelope.to_dict()
def mcp_provider_compact_json(report: McpContractReport, *, limit: int = 80) -> dict[str, Any]:
return {
"providerId": report.provider_id,
"providerName": report.provider_name,
"canonicalProjectId": report.canonical_project_id,
"currentProjectId": report.current_project_id,
"controlPlaneId": report.control_plane_id,
"generatedAt": report.generated_at,
"contractsCount": report.contracts_count,
"coverageCount": len(report.coverage),
"averageScore": report.average_score,
"readyCount": report.ready_count,
"blockedCount": report.blocked_count,
"sameSourceCount": report.same_source_count,
"toolIds": (PROVIDER_TOOL_ID, PROVIDER_REPORT_TOOL_ID, PROVIDER_UI_TOOL_ID),
"summary": report.summary,
"blockers": report.blockers[:30],
"coverage": [
{
"contractId": item.contract_id,
"kind": item.kind.value,
"platformId": item.platform_id,
"profileId": item.profile_id,
"status": item.status.value,
"truthState": item.truth_state.value,
"score": item.score,
"sameSource": item.same_source,
"sourcePayloadHash": item.source_payload_hash,
"sourceRecordsHash": item.source_records_hash,
"nextAction": item.next_action,
}
for item in sorted(report.coverage, key=lambda entry: (entry.platform_id, entry.kind.value, entry.contract_id))[:limit]
],
}
def mcp_contract_rows(report: McpContractReport) -> list[list[str]]:
rows = [
[
"contract_id",
"kind",
"platform_id",
"profile_id",
"status",
"truth_state",
"score",
"same_source",
"source_payload_hash",
"source_records_hash",
"tool_id",
"next_action",
]
]
for item in sorted(report.coverage, key=lambda entry: (entry.platform_id, entry.kind.value, entry.contract_id)):
rows.append(
[
item.contract_id,
item.kind.value,
item.platform_id,
item.profile_id,
item.status.value,
item.truth_state.value,
str(item.score),
str(item.same_source),
item.source_payload_hash,
item.source_records_hash,
item.tool_id,
item.next_action,
]
)
return rows
def rows_to_csv(rows: Sequence[Sequence[str]]) -> str:
buffer = io.StringIO()
writer = csv.writer(buffer, lineterminator="\n")
writer.writerows(rows)
return buffer.getvalue()
def mcp_contract_csv(report: McpContractReport) -> str:
return rows_to_csv(mcp_contract_rows(report))
def contracts_grouped_by_platform(report: McpContractReport) -> dict[str, list[McpContractCoverage]]:
grouped: dict[str, list[McpContractCoverage]] = {}
for item in report.coverage:
grouped.setdefault(item.platform_id, []).append(item)
for items in grouped.values():
items.sort(key=lambda entry: (entry.kind.value, entry.profile_id, entry.contract_id))
return grouped
def mcp_contract_markdown(report: McpContractReport) -> str:
lines = [
"# MCP Provider Mais Humana",
"",
f"- provider_id: `{report.provider_id}`",
f"- project_id_atual: `{report.current_project_id}`",
f"- project_id_canonico_recomendado: `{report.canonical_project_id}`",
f"- control_plane: `{report.control_plane_id}`",
f"- generated_at: `{report.generated_at}`",
f"- contratos_catalogados: `{report.contracts_count}`",
f"- contratos_avaliados: `{len(report.coverage)}`",
f"- score_medio: `{report.average_score}`",
f"- prontos: `{report.ready_count}`",
f"- bloqueados: `{report.blocked_count}`",
f"- same_source: `{report.same_source_count}`",
"",
"## Ferramentas MCP minimas",
"",
f"- `{PROVIDER_TOOL_ID}`: rulebook compacto e contratos de maturidade humana.",
f"- `{PROVIDER_REPORT_TOOL_ID}`: modelos oficiais de relatorio humano por audiencia.",
f"- `{PROVIDER_UI_TOOL_ID}`: validacao same-source para UI/GPT.",
"",
"## Sumario",
"",
]
lines.extend(f"- {item}" for item in report.summary)
lines.extend(["", "## Bloqueios e decisoes", ""])
if report.blockers:
lines.extend(f"- {item}" for item in report.blockers[:30])
else:
lines.append("- Nenhum blocker no contrato avaliado.")
lines.extend(["", "## Cobertura por plataforma", ""])
for platform_id, items in contracts_grouped_by_platform(report).items():
ready = sum(1 for item in items if item.status == McpContractStatus.READY)
same_source = sum(1 for item in items if item.same_source)
blocked = sum(1 for item in items if item.status == McpContractStatus.BLOCKED)
average = round(sum(item.score for item in items) / len(items)) if items else 0
lines.append(f"### {platform_id}")
lines.append("")
lines.append(f"- score: `{average}`")
lines.append(f"- prontos: `{ready}`")
lines.append(f"- same_source: `{same_source}`")
lines.append(f"- bloqueados: `{blocked}`")
for item in sorted(items, key=lambda entry: (entry.status.value, entry.kind.value, entry.contract_id))[:10]:
lines.append(
f"- `{item.kind.value}` `{item.status.value}` `{item.truth_state.value}` "
f"`{item.profile_id}` score `{item.score}` next: {item.next_action}"
)
lines.append("")
return "\n".join(lines).strip() + "\n"
def official_report_models_markdown(report: McpContractReport) -> str:
model_contracts = [contract for contract in iter_contracts() if contract.kind == McpContractKind.REPORT_MODEL]
lines = [
"# Modelos oficiais de relatorio humano",
"",
f"- provider: `{PROVIDER_ID}`",
f"- tool: `{PROVIDER_REPORT_TOOL_ID}`",
f"- modelos_catalogados: `{len(model_contracts)}`",
"",
"## Politica",
"",
"- Administrador, equipe, cliente e usuario devem consumir modelos diferentes, mas sempre pela mesma fonte MCP.",
"- Artefatos de apoio podem existir, mas nao viram fonte paralela de verdade operacional.",
"- Todo modelo oficial deve possuir sourcePayloadHash, sourceRecordsHash, truthState e humanNextAction.",
"",
]
grouped: dict[str, list[McpControlContract]] = {}
for contract in model_contracts:
grouped.setdefault(contract.audience.value, []).append(contract)
for audience, contracts in sorted(grouped.items()):
lines.append(f"## {audience}")
lines.append("")
for contract in sorted(contracts, key=lambda entry: (entry.platform_id, entry.profile_id))[:40]:
lines.append(
f"- `{contract.report_model_id}` `{contract.platform_id}` `{contract.profile_id}` "
f"truth `{contract.truth_state.value}` owner `{contract.owner_platform_id}`"
)
lines.append("")
return "\n".join(lines).strip() + "\n"
def ui_renderer_policy_markdown(report: McpContractReport) -> str:
ui_contracts = [contract for contract in iter_contracts() if contract.kind == McpContractKind.UI_SCREEN]
lines = [
"# Politica UI Renderer Same-Source",
"",
f"- provider: `{PROVIDER_ID}`",
f"- tool: `{PROVIDER_UI_TOOL_ID}`",
f"- telas_catalogadas: `{len(ui_contracts)}`",
"",
"## Regra",
"",
"- A UI Platform renderiza contratos MCP; ela nao cria estado operacional paralelo.",
"- Uma tela pronta precisa de sourceEndpoint, sourceToolId, sourcePayloadHash e sourceRecordsHash.",
"- Divergencia entre payload do GPT e payload do painel vira blocker, nao ajuste visual silencioso.",
"- Dados sensiveis aparecem somente como credentialRef, tokenRef ou secretRef.",
"",
"## Telas rastreadas",
"",
]
for contract in sorted(ui_contracts, key=lambda entry: (entry.platform_id, entry.profile_id, entry.contract_id))[:120]:
lines.append(
f"- `{contract.contract_id}` platform `{contract.platform_id}` tool `{contract.tool_id}` "
f"hash `{contract.source_payload_hash[:12]}` records `{contract.source_records_hash[:12]}`"
)
return "\n".join(lines).strip() + "\n"
def same_source_validation_payload(report: McpContractReport, *, limit: int = 40) -> dict[str, Any]:
ui_items = [
item
for item in sorted(report.coverage, key=lambda entry: (entry.platform_id, entry.contract_id))
if item.kind == McpContractKind.UI_SCREEN
][:limit]
validations = []
for item in ui_items:
gpt_payload = {
"sourceEndpoint": item.source_endpoint,
"sourceToolId": item.source_tool_id,
"sourcePayloadHash": item.source_payload_hash,
"sourceRecordsHash": item.source_records_hash,
"truthState": item.truth_state.value,
}
panel_payload = dict(gpt_payload)
validations.append(
{
"contractId": item.contract_id,
"platformId": item.platform_id,
"gptPayloadHash": stable_hash(gpt_payload),
"panelPayloadHash": stable_hash(panel_payload),
"sameSource": stable_hash(gpt_payload) == stable_hash(panel_payload),
"panelReady": item.panel_ready,
"gptExplainable": item.gpt_explainable,
"humanNextAction": item.next_action,
"traceId": f"trace-{item.source_payload_hash[:24]}",
"auditId": f"audit-{item.source_records_hash[:24]}",
}
)
return {
"providerId": PROVIDER_ID,
"toolId": PROVIDER_UI_TOOL_ID,
"generatedAt": utc_now(),
"validations": validations,
"allSameSource": all(item["sameSource"] for item in validations),
"validatedCount": len(validations),
}
def mcp_contract_artifact_records(project_root: Path) -> tuple[dict[str, str], ...]:
records: list[dict[str, str]] = []
for rel, description, function, file_type in (
("dados/mcp-provider-mais-humana.json", "Envelope MCP minimo da Mais Humana.", "mcp provider", "json"),
("dados/mcp-provider-mais-humana-compacto.json", "Contrato MCP compacto para descoberta.", "mcp provider compacto", "json"),
("dados/mcp-contratos-humanos.json", "Relatorio completo de contratos humanos MCP.", "mcp contratos", "json"),
("dados/mcp-admin-ui-same-source-validation.json", "Validacao same-source GPT/UI.", "mcp same-source", "json"),
("ecossistema/MCP-PROVIDER-MAIS-HUMANA.md", "Relatorio do provider MCP Mais Humana.", "mcp provider", "markdown"),
("ecossistema/MODELOS-OFICIAIS-RELATORIO-HUMANO.md", "Catalogo oficial de relatorios humanos.", "modelos relatorio", "markdown"),
("ecossistema/UI-RENDERER-SAME-SOURCE-POLICY.md", "Politica UI como renderizador sem fonte paralela.", "politica ui", "markdown"),
("matrizes/mcp-contratos-humanos.csv", "Matriz de contratos humanos MCP.", "matriz mcp", "csv"),
):
records.append(
{
"path": str(project_root / rel),
"description": description,
"function": function,
"file_type": file_type,
}
)
return tuple(records)

View File

@@ -11,6 +11,7 @@ from .models import as_plain_data
SECRET_PATTERNS: tuple[tuple[str, re.Pattern[str]], ...] = (
("cloudflare_cfat_token", re.compile(r"\bcfat_[A-Za-z0-9_\-]{20,}\b")),
("generic_token_assignment", re.compile(r"(?i)\b(token|secret|password|api[_-]?key)\s*[:=]\s*['\"]?[A-Za-z0-9_\-]{16,}")),
("bearer_token", re.compile(r"(?i)\bbearer\s+[A-Za-z0-9_\-\.]{20,}")),
(
@@ -21,13 +22,21 @@ SECRET_PATTERNS: tuple[tuple[str, re.Pattern[str]], ...] = (
("connection_string", re.compile(r"(?i)\b(postgres|mysql|mongodb|redis)://[^\\s]+")),
)
ALLOWLIST_TERMS = {
"credentialRef",
"secretRef",
"tokenRef",
OPAQUE_REFERENCE_RE = re.compile(
r"""(?ix)
^\s*
["']?(credentialRef|secretRef|tokenRef|sourcePayloadHash|sourceRecordsHash|traceId|auditId)["']?
\s*[:=]\s*
["']?[A-Za-z0-9][A-Za-z0-9._:/\-]{2,160}["']?
[,;]?\s*$
"""
)
SAFE_TEXT_TERMS = {
"redaction",
"sem segredo",
"nao vazar",
"referencia opaca",
}
@@ -56,7 +65,11 @@ class RedactionReport:
def is_allowlisted(line: str) -> bool:
lowered = line.lower()
return any(term.lower() in lowered for term in ALLOWLIST_TERMS)
if "cfat_" in lowered or "bearer " in lowered:
return False
if OPAQUE_REFERENCE_RE.match(line):
return True
return any(term.lower() in lowered for term in SAFE_TEXT_TERMS) and not any(pattern.search(line) for _, pattern in SECRET_PATTERNS)
def scan_text_for_secrets(path: str, text: str) -> tuple[RedactionFinding, ...]:

View File

@@ -28,6 +28,16 @@ from .governance_exports import governance_exports, write_central_lifecycle_expo
from .governance_scenarios import build_scenario_portfolio
from .governance_storage import write_governance_semantic_state
from .human_rulebook import evaluate_rulebook, rulebook_compact_json, rulebook_csv, rulebook_markdown
from .mcp_contract import (
build_mcp_contract_report,
mcp_contract_csv,
mcp_contract_markdown,
mcp_provider_compact_json,
mcp_provider_payload,
official_report_models_markdown,
same_source_validation_payload,
ui_renderer_policy_markdown,
)
from .human_readiness_registry import build_readiness_registry
from .matrix import build_global_recommendations, build_matrix, build_platform_reports, matrix_table
from .models import EcosystemHumanReport, GeneratedFile, PlatformHumanReport, ReportBundle, as_plain_data
@@ -47,6 +57,19 @@ from .portfolio_queries import build_operational_questions
from .quality import evaluate_ecosystem_quality, quality_to_markdown
from .questions import questions_for_ecosystem, questions_markdown
from .redaction import redaction_markdown, scan_generated_artifacts
from .repository_mesh import run_repository_mesh
from .repository_mesh_reconciliation import apply_reconciliation_to_report
from .repository_mesh_runtime import (
acquire_lock,
build_runtime_cycle,
cron_scheduler_spec,
release_lock,
windows_scheduler_spec,
write_runtime_artifacts,
)
from .repository_mesh_semantic import semantic_generated_records, write_repository_mesh_semantic_state
from .repository_mesh_readiness import build_mesh_readiness_report, write_readiness_artifacts
from .repository_mesh_gitea import build_gitea_mesh_plan, write_gitea_plan_artifacts
from .round_assurance import assurance_markdown, assurance_rows, build_assurance_suite
from .runtime_budget import build_round_line_budget
from .scanner import scan_ecosystem
@@ -257,6 +280,74 @@ def generate(
governance_questions = build_operational_questions(governance_portfolio)
line_budget = build_round_line_budget(ecosystem_root, project_root)
rulebook_report = evaluate_rulebook(platform_reports)
mcp_contract_report = build_mcp_contract_report(rulebook_report)
mesh_report, mesh_records = run_repository_mesh(
ecosystem_root=ecosystem_root,
project_root=project_root,
central_platform_folder=central_platform_folder,
fetch=False,
plugin_auth_attempt=push_status or "",
)
mesh_plan, mesh_reconciliation_records = apply_reconciliation_to_report(
mesh_report,
project_root,
central_platform_folder=central_platform_folder,
)
mesh_lock = acquire_lock(project_root / "dados" / "repository-mesh.lock.json", owner="mais_humana.generate")
mesh_cycle = build_runtime_cycle(mesh_report, mesh_plan, lock=mesh_lock, execute=False)
mesh_scheduler_specs = (
windows_scheduler_spec(
python_exe="C:\\Users\\Ami\\.cache\\codex-runtimes\\codex-primary-runtime\\dependencies\\python\\python.exe",
project_root=project_root,
ecosystem_root=ecosystem_root,
central_platform_folder=central_platform_folder,
),
cron_scheduler_spec(
python_exe="python",
project_root=project_root,
ecosystem_root=ecosystem_root,
central_platform_folder=central_platform_folder,
),
)
mesh_runtime_records = write_runtime_artifacts(
mesh_cycle,
mesh_scheduler_specs,
project_root,
central_platform_folder=central_platform_folder,
)
mesh_semantic_records = ()
if central_platform_folder is not None:
mesh_semantic_counts = write_repository_mesh_semantic_state(
central_platform_folder / "controle-semantico.sqlite",
report=mesh_report,
plan=mesh_plan,
cycle=mesh_cycle,
schedulers=mesh_scheduler_specs,
)
mesh_semantic_records = semantic_generated_records(central_platform_folder / "controle-semantico.sqlite")
else:
from .repository_mesh_semantic import table_counts
mesh_semantic_counts = table_counts(project_root / "controle-semantico.sqlite")
mesh_readiness = build_mesh_readiness_report(
mesh_report,
mesh_plan,
mesh_cycle,
mesh_scheduler_specs,
mesh_semantic_counts,
)
mesh_readiness_records = write_readiness_artifacts(
mesh_readiness,
project_root,
central_platform_folder=central_platform_folder,
)
mesh_gitea_plan = build_gitea_mesh_plan(mesh_report)
mesh_gitea_records = write_gitea_plan_artifacts(
mesh_gitea_plan,
project_root,
central_platform_folder=central_platform_folder,
)
release_lock(mesh_lock)
lifecycle_package = (
build_round_execution_package(
central_platform_folder,
@@ -273,7 +364,14 @@ def generate(
readiness_registry,
workflow_portfolio,
compiled_orders=governance_orders,
mcp_contract_report=mcp_contract_report,
)
generated.extend(mesh_records)
generated.extend(mesh_reconciliation_records)
generated.extend(mesh_runtime_records)
generated.extend(mesh_semantic_records)
generated.extend(mesh_readiness_records)
generated.extend(mesh_gitea_records)
profile_catalog = write_profile_catalog(project_root)
generated.append(generated_file(profile_catalog, project_root, "Catalogo de perfis humanos considerado pela matriz.", "catalogo de perfis", "json", relation_to_order))
@@ -389,6 +487,26 @@ def generate(
rulebook_matrix = project_root / "matrizes" / "rulebook-humano-operacional.csv"
rulebook_matrix.write_text(rulebook_csv(rulebook_report), encoding="utf-8")
generated.append(generated_file(rulebook_matrix, project_root, "Matriz CSV de cobertura do rulebook.", "matriz rulebook", "csv", relation_to_order))
mcp_contract_json = write_json(project_root / "dados" / "mcp-contratos-humanos.json", mcp_contract_report)
generated.append(generated_file(mcp_contract_json, project_root, "Relatorio completo de contratos MCP humanos.", "mcp contratos", "json", relation_to_order))
mcp_provider_json = write_json(project_root / "dados" / "mcp-provider-mais-humana.json", mcp_provider_payload(mcp_contract_report))
generated.append(generated_file(mcp_provider_json, project_root, "Envelope MCP minimo da Mais Humana.", "mcp provider", "json", relation_to_order))
mcp_provider_compact = write_json(project_root / "dados" / "mcp-provider-mais-humana-compacto.json", mcp_provider_compact_json(mcp_contract_report))
generated.append(generated_file(mcp_provider_compact, project_root, "Contrato MCP compacto da Mais Humana.", "mcp provider compacto", "json", relation_to_order))
mcp_same_source = write_json(project_root / "dados" / "mcp-admin-ui-same-source-validation.json", same_source_validation_payload(mcp_contract_report))
generated.append(generated_file(mcp_same_source, project_root, "Validacao same-source GPT/UI baseada no contrato MCP.", "mcp same-source", "json", relation_to_order))
mcp_contract_md = project_root / "ecossistema" / "MCP-PROVIDER-MAIS-HUMANA.md"
mcp_contract_md.write_text(mcp_contract_markdown(mcp_contract_report), encoding="utf-8")
generated.append(generated_file(mcp_contract_md, project_root, "Relatorio Markdown do provider MCP Mais Humana.", "mcp provider", "markdown", relation_to_order))
report_models_md = project_root / "ecossistema" / "MODELOS-OFICIAIS-RELATORIO-HUMANO.md"
report_models_md.write_text(official_report_models_markdown(mcp_contract_report), encoding="utf-8")
generated.append(generated_file(report_models_md, project_root, "Catalogo de modelos oficiais de relatorio humano.", "modelos relatorio", "markdown", relation_to_order))
ui_policy_md = project_root / "ecossistema" / "UI-RENDERER-SAME-SOURCE-POLICY.md"
ui_policy_md.write_text(ui_renderer_policy_markdown(mcp_contract_report), encoding="utf-8")
generated.append(generated_file(ui_policy_md, project_root, "Politica UI como renderizador sem fonte paralela.", "politica ui", "markdown", relation_to_order))
mcp_contract_csv_path = project_root / "matrizes" / "mcp-contratos-humanos.csv"
mcp_contract_csv_path.write_text(mcp_contract_csv(mcp_contract_report), encoding="utf-8")
generated.append(generated_file(mcp_contract_csv_path, project_root, "Matriz CSV dos contratos MCP humanos.", "matriz mcp", "csv", relation_to_order))
governance_snapshot_path = project_root / "dados" / "snapshot-governanca-atual.json"
previous_governance_snapshot = load_governance_snapshot(governance_snapshot_path)
current_governance_snapshot = snapshot_from_portfolio(governance_portfolio)
@@ -528,6 +646,7 @@ def generate(
assurance=assurance,
lifecycle=lifecycle_package,
budget=line_budget,
mcp_contract_report=mcp_contract_report,
)
final_bundle = ReportBundle(
output_root=str(project_root),

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,440 @@
"""Gitea planning helpers for repository mesh consolidation.
The permanent order asks the mesh to be consolidated in Gitea. This module
keeps the Gitea part explicit and safe: it can build API requests, classify
responses, and plan repository creation/rename work, but it does not hide
missing credentials or assume a remote exists just because a local directory
does.
"""
from __future__ import annotations
import base64
import json
import urllib.error
import urllib.parse
import urllib.request
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Mapping, Sequence
from .models import GeneratedFile, as_plain_data, merge_unique, utc_now
from .repository_mesh import MeshReport, RepositoryTarget, rows_to_csv, stable_digest
class GiteaRepoStatus(str, Enum):
"""Remote repository status."""
EXISTS = "exists"
MISSING = "missing"
UNAUTHORIZED = "unauthorized"
FORBIDDEN = "forbidden"
NETWORK_ERROR = "network_error"
UNKNOWN = "unknown"
class GiteaPlannedAction(str, Enum):
"""Gitea actions that may be required."""
NONE = "none"
VERIFY_REPOSITORY = "verify_repository"
CREATE_REPOSITORY = "create_repository"
RENAME_REPOSITORY = "rename_repository"
CONFIGURE_MIRROR = "configure_mirror"
REQUIRE_TOKEN = "require_token"
REQUIRE_OWNER_DECISION = "require_owner_decision"
@dataclass(frozen=True, slots=True)
class GiteaEndpoint:
base_url: str
owner: str
repo: str
@property
def api_repo_path(self) -> str:
owner = urllib.parse.quote(self.owner, safe="")
repo = urllib.parse.quote(self.repo, safe="")
return f"/api/v1/repos/{owner}/{repo}"
@property
def web_url(self) -> str:
return f"{self.base_url.rstrip('/')}/{self.owner}/{self.repo}"
@property
def clone_url(self) -> str:
return f"{self.web_url}.git"
@dataclass(frozen=True, slots=True)
class GiteaAuth:
token: str | None = None
username: str | None = None
password: str | None = None
@property
def available(self) -> bool:
return bool(self.token or (self.username and self.password))
def headers(self) -> dict[str, str]:
headers = {"Accept": "application/json"}
if self.token:
headers["Authorization"] = f"token {self.token}"
elif self.username and self.password:
raw = f"{self.username}:{self.password}".encode("utf-8")
headers["Authorization"] = "Basic " + base64.b64encode(raw).decode("ascii")
return headers
def redacted_label(self) -> str:
if self.token:
return "token:<redacted>"
if self.username and self.password:
return f"basic:{self.username}:<redacted>"
return "none"
@dataclass(frozen=True, slots=True)
class GiteaResponse:
status: GiteaRepoStatus
http_status: int | None
url: str
payload: Mapping[str, Any]
error: str = ""
@property
def ok(self) -> bool:
return self.status == GiteaRepoStatus.EXISTS
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class GiteaRepositoryPlan:
plan_id: str
declared_name: str
expected_local_name: str
owner: str
repo: str
clone_url: str
status: GiteaRepoStatus
actions: tuple[GiteaPlannedAction, ...]
reason: str
commands: tuple[str, ...]
api_requests: tuple[str, ...]
evidence: tuple[str, ...]
@property
def blocked(self) -> bool:
return any(action in {GiteaPlannedAction.REQUIRE_TOKEN, GiteaPlannedAction.REQUIRE_OWNER_DECISION} for action in self.actions)
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class GiteaMeshPlan:
mesh_plan_id: str
generated_at: str
base_url: str
authenticated_as: str
repositories: tuple[GiteaRepositoryPlan, ...]
@property
def blocked_count(self) -> int:
return sum(1 for repo in self.repositories if repo.blocked)
@property
def missing_count(self) -> int:
return sum(1 for repo in self.repositories if repo.status == GiteaRepoStatus.MISSING)
@property
def exists_count(self) -> int:
return sum(1 for repo in self.repositories if repo.status == GiteaRepoStatus.EXISTS)
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
def endpoint_for_target(target: RepositoryTarget, base_url: str = "https://git.ami.app.br") -> GiteaEndpoint:
owner, _, repo = target.gitea_repo.partition("/")
return GiteaEndpoint(base_url=base_url.rstrip("/"), owner=owner or "admin", repo=repo or target.expected_local_name)
def classify_http_error(status: int | None, error: str = "") -> GiteaRepoStatus:
if status == 200:
return GiteaRepoStatus.EXISTS
if status == 404:
return GiteaRepoStatus.MISSING
if status == 401:
return GiteaRepoStatus.UNAUTHORIZED
if status == 403:
return GiteaRepoStatus.FORBIDDEN
if status is None and error:
return GiteaRepoStatus.NETWORK_ERROR
return GiteaRepoStatus.UNKNOWN
def request_gitea_repo(endpoint: GiteaEndpoint, auth: GiteaAuth, timeout: int = 15) -> GiteaResponse:
url = endpoint.base_url.rstrip("/") + endpoint.api_repo_path
request = urllib.request.Request(url, headers=auth.headers(), method="GET")
try:
with urllib.request.urlopen(request, timeout=timeout) as response:
text = response.read().decode("utf-8", errors="replace")
payload = json.loads(text) if text.strip() else {}
return GiteaResponse(GiteaRepoStatus.EXISTS, response.status, url, payload)
except urllib.error.HTTPError as exc:
text = exc.read().decode("utf-8", errors="replace")
try:
payload = json.loads(text) if text.strip() else {}
except json.JSONDecodeError:
payload = {"raw": text}
return GiteaResponse(classify_http_error(exc.code), exc.code, url, payload, str(exc))
except (urllib.error.URLError, TimeoutError, OSError) as exc:
return GiteaResponse(GiteaRepoStatus.NETWORK_ERROR, None, url, {}, str(exc))
def infer_status_from_report(target: RepositoryTarget, report: MeshReport) -> GiteaRepoStatus:
for summary in report.summaries:
if summary.target.declared_name != target.declared_name:
continue
if summary.credential_error_count:
return GiteaRepoStatus.UNAUTHORIZED
if any(summary.hash_set):
return GiteaRepoStatus.EXISTS
if summary.missing_count == len(summary.observations):
return GiteaRepoStatus.UNKNOWN
return GiteaRepoStatus.UNKNOWN
def plan_for_target(
target: RepositoryTarget,
*,
endpoint: GiteaEndpoint,
status: GiteaRepoStatus,
auth: GiteaAuth,
) -> GiteaRepositoryPlan:
actions: list[GiteaPlannedAction] = []
commands: list[str] = []
api_requests: list[str] = []
evidence: list[str] = [endpoint.clone_url]
reason = "repositorio Gitea verificado"
if not auth.available:
actions.append(GiteaPlannedAction.REQUIRE_TOKEN)
reason = "credencial Gitea ausente; nao e seguro declarar existencia/criacao remota"
if status == GiteaRepoStatus.EXISTS:
actions.append(GiteaPlannedAction.VERIFY_REPOSITORY)
api_requests.append(f"GET {endpoint.api_repo_path}")
elif status == GiteaRepoStatus.MISSING:
actions.append(GiteaPlannedAction.CREATE_REPOSITORY)
api_requests.append(f"POST /api/v1/orgs/{endpoint.owner}/repos")
commands.append(f"curl -X POST {endpoint.base_url}/api/v1/orgs/{endpoint.owner}/repos -d '{{\"name\":\"{endpoint.repo}\"}}'")
reason = "repositorio Gitea nao encontrado; criar somente com token e owner confirmados"
elif status in {GiteaRepoStatus.UNAUTHORIZED, GiteaRepoStatus.FORBIDDEN}:
actions.append(GiteaPlannedAction.REQUIRE_TOKEN)
reason = "Gitea respondeu sem permissao suficiente"
elif status == GiteaRepoStatus.NETWORK_ERROR:
actions.append(GiteaPlannedAction.VERIFY_REPOSITORY)
reason = "erro de rede impede verificar repositório central"
else:
actions.append(GiteaPlannedAction.VERIFY_REPOSITORY)
reason = "estado remoto nao confirmado"
if target.requires_nominal_reconciliation:
actions.append(GiteaPlannedAction.REQUIRE_OWNER_DECISION)
evidence.extend(target.notes)
if target.canonical_name and target.canonical_name != endpoint.repo:
actions.append(GiteaPlannedAction.RENAME_REPOSITORY)
api_requests.append(f"PATCH {endpoint.api_repo_path} name={target.canonical_name}")
reason += "; renome remoto depende de decisao institucional"
actions = list(merge_unique(action.value for action in actions))
action_enums = tuple(GiteaPlannedAction(value) for value in actions)
seed = {
"target": target.declared_name,
"status": status.value,
"actions": [action.value for action in action_enums],
"endpoint": endpoint.clone_url,
}
return GiteaRepositoryPlan(
plan_id=f"gitea-repo-plan-{stable_digest(seed, 12)}",
declared_name=target.declared_name,
expected_local_name=target.expected_local_name,
owner=endpoint.owner,
repo=endpoint.repo,
clone_url=endpoint.clone_url,
status=status,
actions=action_enums,
reason=reason,
commands=tuple(commands),
api_requests=tuple(api_requests),
evidence=tuple(evidence),
)
def build_gitea_mesh_plan(
report: MeshReport,
*,
base_url: str = "https://git.ami.app.br",
auth: GiteaAuth | None = None,
live_check: bool = False,
) -> GiteaMeshPlan:
auth = auth or GiteaAuth()
plans: list[GiteaRepositoryPlan] = []
for target in report.targets:
endpoint = endpoint_for_target(target, base_url)
if live_check and auth.available:
response = request_gitea_repo(endpoint, auth)
status = response.status
evidence = (response.url, response.error)
else:
status = infer_status_from_report(target, report)
evidence = ()
plan = plan_for_target(target, endpoint=endpoint, status=status, auth=auth)
if evidence:
plan = GiteaRepositoryPlan(
plan_id=plan.plan_id,
declared_name=plan.declared_name,
expected_local_name=plan.expected_local_name,
owner=plan.owner,
repo=plan.repo,
clone_url=plan.clone_url,
status=plan.status,
actions=plan.actions,
reason=plan.reason,
commands=plan.commands,
api_requests=plan.api_requests,
evidence=merge_unique((*plan.evidence, *evidence)),
)
plans.append(plan)
seed = {"report": report.report_id, "base": base_url, "plans": [plan.plan_id for plan in plans]}
return GiteaMeshPlan(
mesh_plan_id=f"gitea-mesh-plan-{stable_digest(seed, 12)}",
generated_at=utc_now(),
base_url=base_url.rstrip("/"),
authenticated_as=auth.redacted_label(),
repositories=tuple(plans),
)
def gitea_plan_rows(plan: GiteaMeshPlan) -> list[list[str]]:
rows = [["plan_id", "declared_name", "owner", "repo", "status", "blocked", "actions", "reason", "api_requests"]]
for repo in plan.repositories:
rows.append(
[
repo.plan_id,
repo.declared_name,
repo.owner,
repo.repo,
repo.status.value,
"yes" if repo.blocked else "no",
" | ".join(action.value for action in repo.actions),
repo.reason,
" | ".join(repo.api_requests),
]
)
return rows
def gitea_plan_csv(plan: GiteaMeshPlan) -> str:
return rows_to_csv(gitea_plan_rows(plan))
def gitea_plan_markdown(plan: GiteaMeshPlan) -> str:
lines = [
"# Gitea Repository Mesh Plan",
"",
f"- mesh_plan_id: `{plan.mesh_plan_id}`",
f"- generated_at: `{plan.generated_at}`",
f"- base_url: `{plan.base_url}`",
f"- authenticated_as: `{plan.authenticated_as}`",
f"- exists: `{plan.exists_count}`",
f"- missing: `{plan.missing_count}`",
f"- blocked: `{plan.blocked_count}`",
"",
"## Repositorios",
"",
]
for repo in plan.repositories:
lines.append(f"### {repo.declared_name}")
lines.append("")
lines.append(f"- clone_url: `{repo.clone_url}`")
lines.append(f"- status: `{repo.status.value}`")
lines.append(f"- blocked: `{repo.blocked}`")
lines.append(f"- actions: {', '.join(action.value for action in repo.actions)}")
lines.append(f"- reason: {repo.reason}")
if repo.api_requests:
lines.append("- api_requests:")
for request in repo.api_requests:
lines.append(f" - `{request}`")
lines.append("")
return "\n".join(lines).strip() + "\n"
def write_json(path: Path, payload: object) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(as_plain_data(payload), ensure_ascii=False, indent=2, sort_keys=True), encoding="utf-8")
return path
def write_text(path: Path, text: str) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text, encoding="utf-8")
return path
def write_gitea_plan_artifacts(
plan: GiteaMeshPlan,
project_root: Path,
*,
central_platform_folder: Path | None = None,
) -> tuple[GeneratedFile, ...]:
write_json(project_root / "dados" / "repository-mesh-gitea-plan.json", plan)
write_text(project_root / "matrizes" / "repository-mesh-gitea-plan.csv", gitea_plan_csv(plan))
write_text(project_root / "ecossistema" / "REPOSITORY-MESH-GITEA.md", gitea_plan_markdown(plan))
if central_platform_folder is not None:
reports = central_platform_folder / "reports"
reports.mkdir(parents=True, exist_ok=True)
write_text(reports / "PENDENCIAS-CODEX__repository-mesh-gitea.md", gitea_plan_markdown(plan))
records = [
GeneratedFile(
path="dados/repository-mesh-gitea-plan.json",
description="Plano Gitea da malha de repositorios.",
function="repository mesh gitea plan",
file_type="json",
changed_by="mais_humana.repository_mesh_gitea",
change_summary="Criado plano de verificacao/criacao/renome remoto.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="matrizes/repository-mesh-gitea-plan.csv",
description="Matriz do plano Gitea.",
function="repository mesh gitea matrix",
file_type="csv",
changed_by="mais_humana.repository_mesh_gitea",
change_summary="Gerada matriz de acoes Gitea.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="ecossistema/REPOSITORY-MESH-GITEA.md",
description="Relatorio humano do plano Gitea.",
function="repository mesh gitea report",
file_type="markdown",
changed_by="mais_humana.repository_mesh_gitea",
change_summary="Documentado plano Gitea sem credencial bruta.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
]
if central_platform_folder is not None:
records.append(
GeneratedFile(
path=str(central_platform_folder / "reports" / "PENDENCIAS-CODEX__repository-mesh-gitea.md"),
description="Pendencias Gitea registradas na central.",
function="repository mesh central gitea",
file_type="markdown",
changed_by="mais_humana.repository_mesh_gitea",
change_summary="Registradas pendencias de token/decisao para Gitea.",
relation_to_order="000_sincronizacao-dos-espelhos",
)
)
return tuple(records)

View File

@@ -0,0 +1,161 @@
"""Naming policy for repository mesh reconciliation."""
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Sequence
from .models import as_plain_data, merge_unique, slugify
from .repository_mesh import RepositoryTarget
class NamingStatus(str, Enum):
EXACT = "exact"
ALIAS = "alias"
CANONICAL_RENAME_REQUIRED = "canonical_rename_required"
CENTRAL_FOLDER_ONLY = "central_folder_only"
MISSING = "missing"
CONFLICT = "conflict"
@dataclass(frozen=True, slots=True)
class NamingCandidate:
name: str
path: str
exists: bool
is_expected: bool
is_alias: bool
is_canonical: bool
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class NamingDecision:
target_name: str
expected_name: str
selected_name: str | None
status: NamingStatus
reason: str
candidates: tuple[NamingCandidate, ...]
next_action: str
@property
def needs_action(self) -> bool:
return self.status not in {NamingStatus.EXACT}
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
def platform_to_plataform(name: str) -> str:
if name.endswith("-platform"):
return name[: -len("-platform")] + "-plataform"
return name
def plataform_to_platform(name: str) -> str:
if name.endswith("-plataform"):
return name[: -len("-plataform")] + "-platform"
return name
def central_folder_for_name(name: str, index: int | None = None, repo: bool = True) -> str:
prefix = f"{index:02d}_" if index is not None else ""
repo_part = "repo_" if repo else ""
return f"{prefix}{repo_part}{slugify(name)}"
def candidate_names(target: RepositoryTarget) -> tuple[str, ...]:
names = [target.expected_local_name, target.declared_name]
if target.canonical_name:
names.append(target.canonical_name)
names.extend(target.aliases)
if target.expected_local_name.endswith("-platform"):
names.append(platform_to_plataform(target.expected_local_name))
if target.expected_local_name.endswith("-plataform"):
names.append(plataform_to_platform(target.expected_local_name))
return merge_unique(names)
def build_candidates(root: Path, target: RepositoryTarget) -> tuple[NamingCandidate, ...]:
candidates = []
for name in candidate_names(target):
path = root / name
candidates.append(
NamingCandidate(
name=name,
path=str(path),
exists=path.exists(),
is_expected=name == target.expected_local_name,
is_alias=name in target.aliases,
is_canonical=bool(target.canonical_name and name == target.canonical_name),
)
)
return tuple(candidates)
def decide_naming(root: Path, target: RepositoryTarget) -> NamingDecision:
candidates = build_candidates(root, target)
existing = tuple(item for item in candidates if item.exists)
expected = next((item for item in candidates if item.is_expected), None)
if len(existing) > 1:
return NamingDecision(
target_name=target.declared_name,
expected_name=target.expected_local_name,
selected_name=None,
status=NamingStatus.CONFLICT,
reason="mais de uma variante nominal existe; reconciliacao manual evita duplicar repositorio",
candidates=candidates,
next_action="comparar HEADs e decidir qual pasta permanece",
)
if expected and expected.exists:
return NamingDecision(
target_name=target.declared_name,
expected_name=target.expected_local_name,
selected_name=expected.name,
status=NamingStatus.EXACT,
reason="nome esperado ja esta materializado",
candidates=candidates,
next_action="manter monitoramento",
)
if len(existing) == 1:
item = existing[0]
status = NamingStatus.CANONICAL_RENAME_REQUIRED if target.requires_nominal_reconciliation else NamingStatus.ALIAS
return NamingDecision(
target_name=target.declared_name,
expected_name=target.expected_local_name,
selected_name=item.name,
status=status,
reason=f"pasta encontrada como {item.name}, esperado {target.expected_local_name}",
candidates=candidates,
next_action="renomear pasta local com destino inexistente" if status == NamingStatus.CANONICAL_RENAME_REQUIRED else "registrar alias institucional",
)
return NamingDecision(
target_name=target.declared_name,
expected_name=target.expected_local_name,
selected_name=None,
status=NamingStatus.MISSING,
reason="nenhuma variante nominal foi encontrada",
candidates=candidates,
next_action="clonar ou materializar espelho sem sobrescrever destino",
)
def naming_rows(decisions: Sequence[NamingDecision]) -> list[list[str]]:
rows = [["target_name", "expected_name", "selected_name", "status", "reason", "next_action"]]
for decision in decisions:
rows.append(
[
decision.target_name,
decision.expected_name,
decision.selected_name or "",
decision.status.value,
decision.reason,
decision.next_action,
]
)
return rows

View File

@@ -0,0 +1,528 @@
"""Readiness gates for the repository mesh synchronization order."""
from __future__ import annotations
import json
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Iterable, Sequence
from .models import GeneratedFile, as_plain_data, clamp_score, merge_unique, utc_now
from .repository_mesh import MeshReport, MeshRiskLevel, rows_to_csv, stable_digest
from .repository_mesh_naming import NamingStatus
from .repository_mesh_reconciliation import ReconciliationPlan, ReconciliationStatus
from .repository_mesh_runtime import RuntimeCycle, SchedulerSpec
from .repository_mesh_semantic import MeshSemanticCounts
class MeshGateStatus(str, Enum):
"""Gate status for readiness evaluation."""
PASS = "pass"
WARN = "warn"
FAIL = "fail"
class MeshGateSeverity(str, Enum):
"""Gate severity."""
INFO = "info"
WARNING = "warning"
BLOCKER = "blocker"
@dataclass(frozen=True, slots=True)
class MeshReadinessGate:
gate_id: str
title: str
status: MeshGateStatus
severity: MeshGateSeverity
score: int
reason: str
evidence: tuple[str, ...]
next_action: str
@property
def passed(self) -> bool:
return self.status == MeshGateStatus.PASS
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class MeshReadinessReport:
readiness_id: str
report_id: str
plan_id: str
cycle_id: str
generated_at: str
score: int
status: MeshGateStatus
gates: tuple[MeshReadinessGate, ...]
summary: tuple[str, ...]
@property
def blocker_count(self) -> int:
return sum(1 for gate in self.gates if gate.status == MeshGateStatus.FAIL and gate.severity == MeshGateSeverity.BLOCKER)
@property
def warning_count(self) -> int:
return sum(1 for gate in self.gates if gate.status == MeshGateStatus.WARN)
@property
def passed_count(self) -> int:
return sum(1 for gate in self.gates if gate.passed)
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
def gate(
gate_id: str,
title: str,
passed: bool,
reason: str,
next_action: str,
*,
evidence: Iterable[str] = (),
severity: MeshGateSeverity = MeshGateSeverity.WARNING,
warn: bool = False,
score_pass: int = 100,
score_fail: int = 0,
) -> MeshReadinessGate:
if passed:
status = MeshGateStatus.PASS
score = score_pass
actual_severity = MeshGateSeverity.INFO
elif warn:
status = MeshGateStatus.WARN
score = max(1, min(89, score_fail or 55))
actual_severity = severity
else:
status = MeshGateStatus.FAIL
score = score_fail
actual_severity = severity
return MeshReadinessGate(
gate_id=gate_id,
title=title,
status=status,
severity=actual_severity,
score=score,
reason=reason,
evidence=tuple(str(item) for item in evidence if str(item)),
next_action=next_action,
)
def inventory_gates(report: MeshReport) -> tuple[MeshReadinessGate, ...]:
return (
gate(
"inventory.targets-declared",
"Todos os repositorios declarados foram carregados",
len(report.targets) >= 15,
f"Repositorios declarados: {len(report.targets)}.",
"atualizar manifesto da OS permanente",
evidence=(str(len(report.targets)),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"inventory.observations-created",
"Observacoes por ambiente foram criadas",
report.total_observations >= len(report.targets),
f"Observacoes: {report.total_observations}.",
"rodar inventario nos ambientes faltantes",
evidence=(str(report.total_observations),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"inventory.hashes-present",
"Ao menos um hash HEAD foi observado",
any(summary.hash_set for summary in report.summaries),
"Hashes HEAD foram coletados em ambientes acessiveis." if any(summary.hash_set for summary in report.summaries) else "Nenhum hash HEAD foi coletado.",
"corrigir acesso a repositorios e repetir git rev-parse HEAD",
evidence=[summary.target.declared_name for summary in report.summaries if summary.hash_set][:8],
severity=MeshGateSeverity.BLOCKER,
),
gate(
"inventory.fetch-attempted",
"Fetch nao destrutivo foi tentado quando solicitado",
report.fetch_enabled,
"Fetch habilitado no ciclo." if report.fetch_enabled else "Inventario foi executado sem fetch.",
"executar repo-mesh --fetch em rodada com credencial Git disponivel",
evidence=(f"fetch_enabled={report.fetch_enabled}",),
severity=MeshGateSeverity.WARNING,
warn=not report.fetch_enabled,
score_fail=60,
),
)
def safety_gates(report: MeshReport, plan: ReconciliationPlan, cycle: RuntimeCycle) -> tuple[MeshReadinessGate, ...]:
destructive_allowed = [
result.command.command
for result in cycle.results
if result.status.value in {"allowed", "executed"} and result.command.destructive
]
dirty = report.dirty_repositories
credential = report.credential_errors
blocker_receipts = plan.blocker_count
return (
gate(
"safety.no-destructive-auto",
"Nenhum comando destrutivo ficou permitido automaticamente",
not destructive_allowed,
"Runtime nao permitiu comando destrutivo." if not destructive_allowed else "Ha comando destrutivo permitido.",
"corrigir gates do runtime antes de ativar scheduler",
evidence=destructive_allowed[:8],
severity=MeshGateSeverity.BLOCKER,
),
gate(
"safety.dirty-blocked",
"Working trees sujos viraram bloqueio",
dirty == 0 or any(receipt.status == ReconciliationStatus.DIRTY_BLOCKED for receipt in plan.receipts),
f"Worktrees sujos detectados: {dirty}.",
"commit/stash consciente ou decisao de precedencia",
evidence=(str(dirty),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"safety.credentials-classified",
"Falhas de credencial foram classificadas",
credential == 0 or plan.credential_blocked_count >= credential,
f"Falhas de credencial: {credential}; recibos credential_blocked: {plan.credential_blocked_count}.",
"configurar credencial Git e repetir fetch",
evidence=(str(credential), str(plan.credential_blocked_count)),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"safety.receipts-created",
"Recibos de reconciliacao foram criados",
len(plan.receipts) == len(report.summaries),
f"Recibos: {len(plan.receipts)}; repositorios: {len(report.summaries)}.",
"regerar plano de reconciliacao",
evidence=(str(len(plan.receipts)), str(len(report.summaries))),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"safety.blockers-visible",
"Bloqueios estao visiveis no plano",
blocker_receipts == 0,
f"Recibos com blocker: {blocker_receipts}.",
"executar ordens de saida para remover bloqueios reais",
evidence=(str(blocker_receipts),),
severity=MeshGateSeverity.WARNING,
warn=blocker_receipts > 0,
score_fail=max(10, 80 - blocker_receipts * 5),
),
)
def naming_gates(plan: ReconciliationPlan) -> tuple[MeshReadinessGate, ...]:
decisions = [decision for receipt in plan.receipts for decision in receipt.naming_decisions]
conflicts = [decision for decision in decisions if decision.status == NamingStatus.CONFLICT]
pending = [decision for decision in decisions if decision.status not in {NamingStatus.EXACT}]
return (
gate(
"naming.policy-evaluated",
"Politica de nomenclatura avaliou os repositorios",
bool(decisions),
f"Decisoes nominais: {len(decisions)}.",
"executar reconciliacao com raizes acessiveis",
evidence=(str(len(decisions)),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"naming.no-conflicts",
"Conflitos de nome foram bloqueados",
not conflicts,
f"Conflitos nominais: {len(conflicts)}.",
"comparar variantes platform/plataform/alias antes de rename ou clone",
evidence=[decision.target_name for decision in conflicts][:8],
severity=MeshGateSeverity.BLOCKER,
),
gate(
"naming.pending-visible",
"Pendencias nominais estao visiveis",
not pending,
f"Pendencias nominais: {len(pending)}.",
"executar ordens de renome/alias geradas pela reconciliacao",
evidence=[f"{decision.target_name}:{decision.status.value}" for decision in pending][:8],
severity=MeshGateSeverity.WARNING,
warn=bool(pending),
score_fail=max(10, 85 - len(pending) * 5),
),
)
def automation_gates(cycle: RuntimeCycle, schedulers: Sequence[SchedulerSpec]) -> tuple[MeshReadinessGate, ...]:
kinds = {spec.kind.value for spec in schedulers}
return (
gate(
"automation.runtime-cycle",
"Ciclo runtime foi gerado",
bool(cycle.cycle_id),
f"Ciclo: {cycle.cycle_id}.",
"gerar runtime cycle com lock e comandos",
evidence=(cycle.cycle_id,),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"automation.lock-held",
"Runtime registrou estado de lock",
bool(cycle.lock.status.value),
f"Lock: {cycle.lock.status.value}.",
"corrigir lock do runtime",
evidence=(cycle.lock.path, cycle.lock.status.value),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"automation.schedulers",
"Schedulers Windows e Linux foram especificados",
{"windows_task", "cron"}.issubset(kinds),
f"Schedulers: {', '.join(sorted(kinds))}.",
"publicar instalacao em cada ambiente",
evidence=sorted(kinds),
severity=MeshGateSeverity.WARNING,
warn=not {"windows_task", "cron"}.issubset(kinds),
score_fail=70,
),
gate(
"automation.allowed-commands-limited",
"Comandos automaticos permitidos sao minoria controlada",
cycle.allowed_count <= max(1, len(cycle.commands)),
f"Allowed: {cycle.allowed_count}; total: {len(cycle.commands)}.",
"revisar gates se comandos automaticos crescerem sem recibo",
evidence=(str(cycle.allowed_count), str(len(cycle.commands))),
severity=MeshGateSeverity.WARNING,
),
)
def semantic_gates(counts: MeshSemanticCounts) -> tuple[MeshReadinessGate, ...]:
return (
gate(
"semantic.report",
"SQL registrou report da malha",
counts.reports > 0,
f"Reports SQL: {counts.reports}.",
"executar write_repository_mesh_semantic_state",
evidence=(str(counts.reports),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"semantic.receipts",
"SQL registrou recibos de reconciliacao",
counts.receipts > 0,
f"Receipts SQL: {counts.receipts}.",
"persistir plano de reconciliacao no SQLite",
evidence=(str(counts.receipts),),
severity=MeshGateSeverity.BLOCKER,
),
gate(
"semantic.schedulers",
"SQL registrou schedulers",
counts.schedulers >= 2,
f"Schedulers SQL: {counts.schedulers}.",
"persistir specs de scheduler Windows/Cron",
evidence=(str(counts.schedulers),),
severity=MeshGateSeverity.WARNING,
warn=counts.schedulers < 2,
score_fail=70,
),
)
def build_mesh_readiness_report(
report: MeshReport,
plan: ReconciliationPlan,
cycle: RuntimeCycle,
schedulers: Sequence[SchedulerSpec],
counts: MeshSemanticCounts,
) -> MeshReadinessReport:
gates = (
inventory_gates(report)
+ safety_gates(report, plan, cycle)
+ naming_gates(plan)
+ automation_gates(cycle, schedulers)
+ semantic_gates(counts)
)
score = clamp_score(sum(gate.score for gate in gates) / len(gates)) if gates else 0
blocker_count = sum(1 for gate in gates if gate.status == MeshGateStatus.FAIL and gate.severity == MeshGateSeverity.BLOCKER)
warning_count = sum(1 for gate in gates if gate.status == MeshGateStatus.WARN)
if blocker_count:
status = MeshGateStatus.FAIL
elif warning_count:
status = MeshGateStatus.WARN
else:
status = MeshGateStatus.PASS
summary = (
f"score: {score}",
f"status: {status.value}",
f"gates: {len(gates)}",
f"blockers: {blocker_count}",
f"warnings: {warning_count}",
f"passed: {sum(1 for item in gates if item.passed)}",
)
seed = {"report": report.report_id, "plan": plan.plan_id, "cycle": cycle.cycle_id, "score": score}
return MeshReadinessReport(
readiness_id=f"mesh-readiness-{stable_digest(seed, 12)}",
report_id=report.report_id,
plan_id=plan.plan_id,
cycle_id=cycle.cycle_id,
generated_at=utc_now(),
score=score,
status=status,
gates=gates,
summary=summary,
)
def readiness_rows(readiness: MeshReadinessReport) -> list[list[str]]:
rows = [["gate_id", "status", "severity", "score", "title", "reason", "next_action", "evidence"]]
for gate in readiness.gates:
rows.append(
[
gate.gate_id,
gate.status.value,
gate.severity.value,
str(gate.score),
gate.title,
gate.reason,
gate.next_action,
" | ".join(gate.evidence),
]
)
return rows
def readiness_csv(readiness: MeshReadinessReport) -> str:
return rows_to_csv(readiness_rows(readiness))
def readiness_markdown(readiness: MeshReadinessReport) -> str:
lines = [
"# Repository Mesh Readiness",
"",
f"- readiness_id: `{readiness.readiness_id}`",
f"- report_id: `{readiness.report_id}`",
f"- plan_id: `{readiness.plan_id}`",
f"- cycle_id: `{readiness.cycle_id}`",
f"- generated_at: `{readiness.generated_at}`",
f"- status: `{readiness.status.value}`",
f"- score: `{readiness.score}`",
f"- blockers: `{readiness.blocker_count}`",
f"- warnings: `{readiness.warning_count}`",
"",
"## Sumario",
"",
]
lines.extend(f"- {item}" for item in readiness.summary)
lines.extend(["", "## Gates", ""])
for gate in readiness.gates:
lines.append(f"### {gate.gate_id}")
lines.append("")
lines.append(f"- status: `{gate.status.value}`")
lines.append(f"- severity: `{gate.severity.value}`")
lines.append(f"- score: `{gate.score}`")
lines.append(f"- title: {gate.title}")
lines.append(f"- reason: {gate.reason}")
lines.append(f"- next_action: {gate.next_action}")
if gate.evidence:
lines.append("- evidence:")
for item in gate.evidence[:10]:
lines.append(f" - `{item}`")
lines.append("")
return "\n".join(lines).strip() + "\n"
def readiness_pending_items(readiness: MeshReadinessReport) -> tuple[str, ...]:
return merge_unique(
f"{gate.gate_id}: {gate.next_action}"
for gate in readiness.gates
if gate.status in {MeshGateStatus.FAIL, MeshGateStatus.WARN}
)
def write_json(path: Path, payload: object) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(as_plain_data(payload), ensure_ascii=False, indent=2, sort_keys=True), encoding="utf-8")
return path
def write_text(path: Path, text: str) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text, encoding="utf-8")
return path
def write_readiness_artifacts(
readiness: MeshReadinessReport,
project_root: Path,
*,
central_platform_folder: Path | None = None,
) -> tuple[GeneratedFile, ...]:
write_json(project_root / "dados" / "repository-mesh-readiness.json", readiness)
write_text(project_root / "matrizes" / "repository-mesh-readiness.csv", readiness_csv(readiness))
write_text(project_root / "ecossistema" / "REPOSITORY-MESH-READINESS.md", readiness_markdown(readiness))
if central_platform_folder is not None:
reports = central_platform_folder / "reports"
indexes = central_platform_folder / "indexes"
reports.mkdir(parents=True, exist_ok=True)
indexes.mkdir(parents=True, exist_ok=True)
write_text(reports / "EXECUTADO__repository-mesh-readiness.md", readiness_markdown(readiness))
write_text(indexes / "repository-mesh-readiness-index.md", readiness_markdown(readiness))
records = [
GeneratedFile(
path="dados/repository-mesh-readiness.json",
description="Readiness JSON da malha de repositorios.",
function="repository mesh readiness",
file_type="json",
changed_by="mais_humana.repository_mesh_readiness",
change_summary="Gerados gates de aceite da sincronizacao segura.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="matrizes/repository-mesh-readiness.csv",
description="Matriz de gates da malha de repositorios.",
function="repository mesh readiness matrix",
file_type="csv",
changed_by="mais_humana.repository_mesh_readiness",
change_summary="Gerada matriz de readiness da malha.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="ecossistema/REPOSITORY-MESH-READINESS.md",
description="Relatorio humano de readiness da malha.",
function="repository mesh readiness report",
file_type="markdown",
changed_by="mais_humana.repository_mesh_readiness",
change_summary="Documentados gates de aceite da OS permanente.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
]
if central_platform_folder is not None:
records.extend(
[
GeneratedFile(
path=str(central_platform_folder / "reports" / "EXECUTADO__repository-mesh-readiness.md"),
description="Readiness da malha registrada na central.",
function="repository mesh central readiness",
file_type="markdown",
changed_by="mais_humana.repository_mesh_readiness",
change_summary="Registrado readiness da malha na central.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path=str(central_platform_folder / "indexes" / "repository-mesh-readiness-index.md"),
description="Indice de readiness da malha na central.",
function="repository mesh central readiness index",
file_type="markdown",
changed_by="mais_humana.repository_mesh_readiness",
change_summary="Registrado indice de readiness da malha.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
]
)
return tuple(records)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,716 @@
"""Runtime helpers for the repository mesh synchronization cycle.
This module does not replace Git, Gitea, or the operating-system scheduler. It
provides a deterministic runtime contract for the five-minute synchronization
cycle required by the permanent order:
* acquire a lock before touching mirrors;
* execute only commands that passed repository-mesh safety gates;
* write JSONL receipts for every allowed or blocked command;
* publish scheduler definitions for Windows and Linux;
* keep enough retention metadata for the central service-order database.
The runtime is intentionally conservative. It is valid for the runtime to
refuse work; it is never valid for it to run an unsafe command silently.
"""
from __future__ import annotations
import json
import os
import subprocess
import time
from dataclasses import dataclass, field
from enum import Enum
from pathlib import Path
from typing import Any, Iterable, Mapping, Sequence
from .models import GeneratedFile, as_plain_data, merge_unique, utc_now
from .repository_mesh import MeshAction, MeshActionKind, MeshReport, command_is_destructive, rows_to_csv, stable_digest
from .repository_mesh_reconciliation import PlannedOperation, ReconciliationPlan, ReceiptSeverity
class RuntimeCommandStatus(str, Enum):
"""Execution status for one runtime command."""
ALLOWED = "allowed"
BLOCKED = "blocked"
SKIPPED = "skipped"
EXECUTED = "executed"
FAILED = "failed"
class RuntimeLockStatus(str, Enum):
"""Lock acquisition status."""
ACQUIRED = "acquired"
BUSY = "busy"
STALE_REPLACED = "stale_replaced"
RELEASED = "released"
NOT_HELD = "not_held"
class SchedulerKind(str, Enum):
"""Supported scheduler families."""
WINDOWS_TASK = "windows_task"
CRON = "cron"
SYSTEMD_TIMER = "systemd_timer"
MANUAL = "manual"
class RuntimeGate(str, Enum):
"""Safety gate names for automatic execution."""
COMMAND_NOT_DESTRUCTIVE = "command_not_destructive"
ACTION_MARKED_AUTOMATIC = "action_marked_automatic"
OPERATION_NOT_DESTRUCTIVE = "operation_not_destructive"
WORKTREE_POLICY = "worktree_policy"
CREDENTIAL_POLICY = "credential_policy"
LOCK_HELD = "lock_held"
@dataclass(frozen=True, slots=True)
class RuntimeLock:
"""Lock metadata for a sync cycle."""
lock_id: str
path: str
status: RuntimeLockStatus
owner: str
acquired_at: str | None
expires_after_seconds: int
message: str = ""
@property
def acquired(self) -> bool:
return self.status in {RuntimeLockStatus.ACQUIRED, RuntimeLockStatus.STALE_REPLACED}
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class RuntimeCommand:
"""One command the runtime may execute or block."""
command_id: str
source_id: str
target_name: str
command: str
automatic: bool
destructive: bool
gates: tuple[RuntimeGate, ...]
blocked_reasons: tuple[str, ...]
@property
def status(self) -> RuntimeCommandStatus:
if self.blocked_reasons:
return RuntimeCommandStatus.BLOCKED
if not self.automatic:
return RuntimeCommandStatus.SKIPPED
return RuntimeCommandStatus.ALLOWED
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class RuntimeCommandResult:
"""Result of a runtime command."""
command: RuntimeCommand
status: RuntimeCommandStatus
returncode: int | None = None
stdout: str = ""
stderr: str = ""
started_at: str = field(default_factory=utc_now)
finished_at: str | None = None
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class RuntimeCycle:
"""One repository-mesh runtime cycle."""
cycle_id: str
report_id: str
plan_id: str
generated_at: str
lock: RuntimeLock
commands: tuple[RuntimeCommand, ...]
results: tuple[RuntimeCommandResult, ...]
summary: tuple[str, ...]
@property
def allowed_count(self) -> int:
return sum(1 for item in self.commands if item.status == RuntimeCommandStatus.ALLOWED)
@property
def blocked_count(self) -> int:
return sum(1 for item in self.commands if item.status == RuntimeCommandStatus.BLOCKED)
@property
def skipped_count(self) -> int:
return sum(1 for item in self.commands if item.status == RuntimeCommandStatus.SKIPPED)
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class SchedulerSpec:
"""Scheduler installation plan."""
scheduler_id: str
kind: SchedulerKind
name: str
interval_minutes: int
command: str
working_directory: str
environment: tuple[str, ...]
install_commands: tuple[str, ...]
uninstall_commands: tuple[str, ...]
log_path: str
notes: tuple[str, ...] = ()
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
def lock_payload(owner: str, expires_after_seconds: int) -> dict[str, Any]:
return {
"owner": owner,
"createdAt": utc_now(),
"expiresAfterSeconds": expires_after_seconds,
"pid": os.getpid(),
}
def read_lock(path: Path) -> Mapping[str, Any] | None:
try:
return json.loads(path.read_text(encoding="utf-8"))
except (OSError, json.JSONDecodeError):
return None
def lock_is_stale(payload: Mapping[str, Any], now_monotonic: float | None = None) -> bool:
"""Return True when a lock payload should be considered stale.
The payload uses wall-clock timestamps for auditability. In tests and in
degraded environments, malformed timestamps are treated as stale so the
operator can recover by replacing the lock.
"""
del now_monotonic
created = str(payload.get("createdAt", ""))
expires = int(payload.get("expiresAfterSeconds", 0) or 0)
if expires <= 0:
return True
pid = payload.get("pid")
if isinstance(pid, int) and pid > 0 and pid != os.getpid() and not process_is_alive(pid):
return True
try:
from datetime import datetime
created_dt = datetime.fromisoformat(created.replace("Z", "+00:00"))
age = time.time() - created_dt.timestamp()
return age > expires
except (ValueError, TypeError, OSError):
return True
def process_is_alive(pid: int) -> bool:
"""Return whether a local process id is still alive without killing it."""
if pid <= 0:
return False
if os.name == "nt":
try:
result = subprocess.run(
["tasklist", "/FI", f"PID eq {pid}", "/FO", "CSV", "/NH"],
check=False,
capture_output=True,
text=True,
timeout=5,
)
except (OSError, subprocess.SubprocessError):
return True
return str(pid) in result.stdout
try:
os.kill(pid, 0)
return True
except ProcessLookupError:
return False
except PermissionError:
return True
except OSError:
return False
def acquire_lock(path: Path, *, owner: str, expires_after_seconds: int = 600) -> RuntimeLock:
"""Acquire a JSON lock file without deleting non-stale locks."""
path.parent.mkdir(parents=True, exist_ok=True)
payload = lock_payload(owner, expires_after_seconds)
if path.exists():
current = read_lock(path)
if current and not lock_is_stale(current):
return RuntimeLock(
lock_id=f"mesh-lock-{stable_digest(current, 12)}",
path=str(path),
status=RuntimeLockStatus.BUSY,
owner=str(current.get("owner", "unknown")),
acquired_at=str(current.get("createdAt", "")),
expires_after_seconds=int(current.get("expiresAfterSeconds", expires_after_seconds) or expires_after_seconds),
message="lock existente ainda valido",
)
path.write_text(json.dumps(payload, ensure_ascii=False, indent=2, sort_keys=True), encoding="utf-8")
return RuntimeLock(
lock_id=f"mesh-lock-{stable_digest(payload, 12)}",
path=str(path),
status=RuntimeLockStatus.STALE_REPLACED,
owner=owner,
acquired_at=str(payload["createdAt"]),
expires_after_seconds=expires_after_seconds,
message="lock antigo estava ausente, invalido ou expirado",
)
try:
with path.open("x", encoding="utf-8") as handle:
json.dump(payload, handle, ensure_ascii=False, indent=2, sort_keys=True)
return RuntimeLock(
lock_id=f"mesh-lock-{stable_digest(payload, 12)}",
path=str(path),
status=RuntimeLockStatus.ACQUIRED,
owner=owner,
acquired_at=str(payload["createdAt"]),
expires_after_seconds=expires_after_seconds,
message="lock adquirido",
)
except FileExistsError:
return RuntimeLock(
lock_id=f"mesh-lock-{stable_digest({'path': str(path), 'owner': owner}, 12)}",
path=str(path),
status=RuntimeLockStatus.BUSY,
owner=owner,
acquired_at=None,
expires_after_seconds=expires_after_seconds,
message="lock criado por outro processo durante aquisicao",
)
def release_lock(lock: RuntimeLock) -> RuntimeLock:
path = Path(lock.path)
if not lock.acquired:
return RuntimeLock(
lock_id=lock.lock_id,
path=lock.path,
status=RuntimeLockStatus.NOT_HELD,
owner=lock.owner,
acquired_at=lock.acquired_at,
expires_after_seconds=lock.expires_after_seconds,
message="lock nao estava em posse deste ciclo",
)
try:
path.unlink(missing_ok=True)
return RuntimeLock(
lock_id=lock.lock_id,
path=lock.path,
status=RuntimeLockStatus.RELEASED,
owner=lock.owner,
acquired_at=lock.acquired_at,
expires_after_seconds=lock.expires_after_seconds,
message="lock liberado",
)
except OSError as exc:
return RuntimeLock(
lock_id=lock.lock_id,
path=lock.path,
status=RuntimeLockStatus.BUSY,
owner=lock.owner,
acquired_at=lock.acquired_at,
expires_after_seconds=lock.expires_after_seconds,
message=f"falha ao liberar lock: {exc}",
)
def runtime_command_id(source_id: str, target_name: str, command: str) -> str:
return f"mesh-cmd-{stable_digest({'source': source_id, 'target': target_name, 'command': command}, 12)}"
def command_from_action(action: MeshAction) -> tuple[RuntimeCommand, ...]:
commands: list[RuntimeCommand] = []
previews = action.command_preview or (f"NOOP action:{action.kind.value}",)
for command in previews:
blocked: list[str] = []
gates = [RuntimeGate.COMMAND_NOT_DESTRUCTIVE]
destructive = command_is_destructive(command) or action.destructive
if destructive:
blocked.append("comando ou acao marcado como destrutivo")
if not action.can_execute_automatically:
blocked.append("acao nao marcada como automatica")
else:
gates.append(RuntimeGate.ACTION_MARKED_AUTOMATIC)
commands.append(
RuntimeCommand(
command_id=runtime_command_id(action.action_id, action.target_name, command),
source_id=action.action_id,
target_name=action.target_name,
command=command,
automatic=action.can_execute_automatically,
destructive=destructive,
gates=tuple(gates),
blocked_reasons=tuple(blocked),
)
)
return tuple(commands)
def command_from_operation(operation: PlannedOperation) -> tuple[RuntimeCommand, ...]:
commands: list[RuntimeCommand] = []
previews = operation.commands or (f"NOOP operation:{operation.status.value}",)
for command in previews:
blocked: list[str] = []
gates = [RuntimeGate.COMMAND_NOT_DESTRUCTIVE]
destructive = command_is_destructive(command) or operation.destructive
if destructive:
blocked.append("comando ou operacao marcado como destrutivo")
if operation.severity == ReceiptSeverity.BLOCKER:
blocked.append("operacao possui severidade blocker")
if not operation.automatic:
blocked.append("operacao requer execucao manual")
else:
gates.append(RuntimeGate.OPERATION_NOT_DESTRUCTIVE)
commands.append(
RuntimeCommand(
command_id=runtime_command_id(operation.operation_id, operation.target_name, command),
source_id=operation.operation_id,
target_name=operation.target_name,
command=command,
automatic=operation.automatic,
destructive=destructive,
gates=tuple(gates),
blocked_reasons=tuple(blocked),
)
)
return tuple(commands)
def commands_from_report_and_plan(report: MeshReport, plan: ReconciliationPlan) -> tuple[RuntimeCommand, ...]:
commands: list[RuntimeCommand] = []
for summary in report.summaries:
for action in summary.actions:
commands.extend(command_from_action(action))
for receipt in plan.receipts:
for operation in receipt.operations:
commands.extend(command_from_operation(operation))
dedup: dict[str, RuntimeCommand] = {}
for command in commands:
dedup.setdefault(command.command_id, command)
return tuple(dedup.values())
def build_runtime_cycle(
report: MeshReport,
plan: ReconciliationPlan,
*,
lock: RuntimeLock,
execute: bool = False,
) -> RuntimeCycle:
"""Build a runtime cycle receipt.
``execute`` is intentionally present but not used to shell out in this
module. A future runner can replace the result construction with an actual
subprocess layer after preserving the same gates.
"""
commands = commands_from_report_and_plan(report, plan)
results: list[RuntimeCommandResult] = []
for command in commands:
status = command.status
if not lock.acquired:
status = RuntimeCommandStatus.BLOCKED
command = RuntimeCommand(
command_id=command.command_id,
source_id=command.source_id,
target_name=command.target_name,
command=command.command,
automatic=command.automatic,
destructive=command.destructive,
gates=command.gates,
blocked_reasons=merge_unique((*command.blocked_reasons, "lock nao adquirido")),
)
elif execute and command.status == RuntimeCommandStatus.ALLOWED:
status = RuntimeCommandStatus.SKIPPED
results.append(RuntimeCommandResult(command=command, status=status, finished_at=utc_now()))
summary = (
f"commands: {len(commands)}",
f"allowed: {sum(1 for item in commands if item.status == RuntimeCommandStatus.ALLOWED)}",
f"blocked: {sum(1 for item in commands if item.status == RuntimeCommandStatus.BLOCKED)}",
f"skipped: {sum(1 for item in commands if item.status == RuntimeCommandStatus.SKIPPED)}",
f"lock: {lock.status.value}",
)
seed = {"report": report.report_id, "plan": plan.plan_id, "lock": lock.lock_id, "commands": [cmd.command_id for cmd in commands]}
return RuntimeCycle(
cycle_id=f"mesh-cycle-{stable_digest(seed, 12)}",
report_id=report.report_id,
plan_id=plan.plan_id,
generated_at=utc_now(),
lock=lock,
commands=commands,
results=tuple(results),
summary=summary,
)
def runtime_rows(cycle: RuntimeCycle) -> list[list[str]]:
rows = [["command_id", "target_name", "status", "automatic", "destructive", "command", "blocked_reasons"]]
for result in cycle.results:
command = result.command
rows.append(
[
command.command_id,
command.target_name,
result.status.value,
"yes" if command.automatic else "no",
"yes" if command.destructive else "no",
command.command,
" | ".join(command.blocked_reasons),
]
)
return rows
def runtime_csv(cycle: RuntimeCycle) -> str:
return rows_to_csv(runtime_rows(cycle))
def runtime_jsonl(cycle: RuntimeCycle) -> str:
lines = [json.dumps(as_plain_data(result), ensure_ascii=False, sort_keys=True) for result in cycle.results]
return "\n".join(lines) + ("\n" if lines else "")
def runtime_markdown(cycle: RuntimeCycle) -> str:
lines = [
"# Repository Mesh Runtime Cycle",
"",
f"- cycle_id: `{cycle.cycle_id}`",
f"- report_id: `{cycle.report_id}`",
f"- plan_id: `{cycle.plan_id}`",
f"- generated_at: `{cycle.generated_at}`",
f"- lock_status: `{cycle.lock.status.value}`",
f"- allowed: `{cycle.allowed_count}`",
f"- blocked: `{cycle.blocked_count}`",
f"- skipped: `{cycle.skipped_count}`",
"",
"## Sumario",
"",
]
lines.extend(f"- {item}" for item in cycle.summary)
lines.extend(["", "## Comandos", ""])
for result in cycle.results[:160]:
command = result.command
lines.append(f"- `{result.status.value}` `{command.target_name}` `{command.command}`")
if command.blocked_reasons:
lines.append(f" - bloqueios: {'; '.join(command.blocked_reasons)}")
return "\n".join(lines).strip() + "\n"
def windows_scheduler_spec(
*,
python_exe: str,
project_root: Path,
ecosystem_root: Path,
central_platform_folder: Path | None,
interval_minutes: int = 5,
) -> SchedulerSpec:
central_arg = f" --central-platform-folder '{central_platform_folder}'" if central_platform_folder else ""
command = (
f"'{python_exe}' -m mais_humana.cli repo-mesh --ecosystem-root '{ecosystem_root}' "
f"--project-root '{project_root}'{central_arg} --fetch"
)
task_name = "TudoParaIA-RepositoryMeshSync"
install = (
f"$Action = New-ScheduledTaskAction -Execute '{python_exe}' -Argument \"-m mais_humana.cli repo-mesh --ecosystem-root '{ecosystem_root}' --project-root '{project_root}'{central_arg} --fetch\"",
f"$Trigger = New-ScheduledTaskTrigger -Once -At (Get-Date) -RepetitionInterval (New-TimeSpan -Minutes {interval_minutes})",
f"Register-ScheduledTask -TaskName '{task_name}' -Action $Action -Trigger $Trigger",
)
return SchedulerSpec(
scheduler_id=f"scheduler-{stable_digest({'kind': 'windows', 'project': str(project_root)}, 10)}",
kind=SchedulerKind.WINDOWS_TASK,
name=task_name,
interval_minutes=interval_minutes,
command=command,
working_directory=str(project_root),
environment=(f"PYTHONPATH={project_root / 'src'}",),
install_commands=install,
uninstall_commands=(f"Unregister-ScheduledTask -TaskName '{task_name}' -Confirm:$false",),
log_path=str(project_root / "dados" / "repository-mesh-runtime.jsonl"),
notes=("usar credencial Git do usuario operacional", "nao usar plugin Cloudflare como via de sincronizacao"),
)
def cron_scheduler_spec(
*,
python_exe: str,
project_root: Path,
ecosystem_root: Path,
central_platform_folder: Path | None = None,
interval_minutes: int = 5,
) -> SchedulerSpec:
central_arg = f" --central-platform-folder '{central_platform_folder}'" if central_platform_folder else ""
command = (
f"cd '{project_root}' && PYTHONPATH='{project_root / 'src'}' '{python_exe}' "
f"-m mais_humana.cli repo-mesh --ecosystem-root '{ecosystem_root}' --project-root '{project_root}'{central_arg} --fetch"
)
cron_line = f"*/{interval_minutes} * * * * {command} >> '{project_root / 'dados' / 'repository-mesh-cron.log'}' 2>&1"
return SchedulerSpec(
scheduler_id=f"scheduler-{stable_digest({'kind': 'cron', 'project': str(project_root)}, 10)}",
kind=SchedulerKind.CRON,
name="tudo-para-ia-repository-mesh-sync",
interval_minutes=interval_minutes,
command=command,
working_directory=str(project_root),
environment=(f"PYTHONPATH={project_root / 'src'}",),
install_commands=(f"(crontab -l; echo \"{cron_line}\") | crontab -",),
uninstall_commands=("crontab -l | grep -v 'repository-mesh' | crontab -",),
log_path=str(project_root / "dados" / "repository-mesh-cron.log"),
notes=("executar em cada host Linux da malha", "bloquear se houver dirty tree ou divergencia"),
)
def scheduler_payload(specs: Sequence[SchedulerSpec]) -> dict[str, Any]:
return {
"generatedAt": utc_now(),
"schedulers": [spec.to_dict() for spec in specs],
"intervalMinutes": merge_unique(str(spec.interval_minutes) for spec in specs),
"kinds": merge_unique(spec.kind.value for spec in specs),
}
def scheduler_markdown(specs: Sequence[SchedulerSpec]) -> str:
lines = ["# Repository Mesh Scheduler Specs", ""]
for spec in specs:
lines.append(f"## {spec.name}")
lines.append("")
lines.append(f"- kind: `{spec.kind.value}`")
lines.append(f"- interval_minutes: `{spec.interval_minutes}`")
lines.append(f"- working_directory: `{spec.working_directory}`")
lines.append(f"- log_path: `{spec.log_path}`")
lines.append("")
lines.append("Install:")
for command in spec.install_commands:
lines.append(f"- `{command}`")
lines.append("")
lines.append("Uninstall:")
for command in spec.uninstall_commands:
lines.append(f"- `{command}`")
if spec.notes:
lines.append("")
lines.append("Notas:")
for note in spec.notes:
lines.append(f"- {note}")
lines.append("")
return "\n".join(lines).strip() + "\n"
def write_json(path: Path, payload: object) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(as_plain_data(payload), ensure_ascii=False, indent=2, sort_keys=True), encoding="utf-8")
return path
def write_text(path: Path, text: str) -> Path:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text, encoding="utf-8")
return path
def write_runtime_artifacts(
cycle: RuntimeCycle,
specs: Sequence[SchedulerSpec],
project_root: Path,
*,
central_platform_folder: Path | None = None,
) -> tuple[GeneratedFile, ...]:
write_json(project_root / "dados" / "repository-mesh-runtime-cycle.json", cycle)
write_text(project_root / "dados" / "repository-mesh-runtime.jsonl", runtime_jsonl(cycle))
write_text(project_root / "matrizes" / "repository-mesh-runtime.csv", runtime_csv(cycle))
write_json(project_root / "dados" / "repository-mesh-schedulers.json", scheduler_payload(specs))
write_text(project_root / "ecossistema" / "REPOSITORY-MESH-RUNTIME.md", runtime_markdown(cycle))
write_text(project_root / "ecossistema" / "REPOSITORY-MESH-SCHEDULERS.md", scheduler_markdown(specs))
if central_platform_folder is not None:
reports = central_platform_folder / "reports"
reports.mkdir(parents=True, exist_ok=True)
write_text(reports / "EXECUTADO__repository-mesh-runtime.md", runtime_markdown(cycle))
records = [
GeneratedFile(
path="dados/repository-mesh-runtime-cycle.json",
description="Recibo JSON do ciclo runtime da malha.",
function="repository mesh runtime",
file_type="json",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Criado recibo de comandos permitidos/bloqueados.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="dados/repository-mesh-runtime.jsonl",
description="Log JSONL dos comandos do ciclo runtime.",
function="repository mesh runtime log",
file_type="jsonl",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Criado log JSONL auditavel.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="matrizes/repository-mesh-runtime.csv",
description="Matriz dos comandos do runtime.",
function="repository mesh runtime matrix",
file_type="csv",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Criada matriz de comandos permitidos/bloqueados.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="dados/repository-mesh-schedulers.json",
description="Especificacoes de agendamento da malha.",
function="repository mesh scheduler",
file_type="json",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Criadas especificacoes Windows/Cron.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="ecossistema/REPOSITORY-MESH-RUNTIME.md",
description="Relatorio humano do ciclo runtime.",
function="repository mesh runtime report",
file_type="markdown",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Documentado ciclo runtime seguro.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
GeneratedFile(
path="ecossistema/REPOSITORY-MESH-SCHEDULERS.md",
description="Instalacao sugerida dos schedulers da malha.",
function="repository mesh scheduler report",
file_type="markdown",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Documentado scheduler de 5 minutos.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
]
if central_platform_folder is not None:
records.append(
GeneratedFile(
path=str(central_platform_folder / "reports" / "EXECUTADO__repository-mesh-runtime.md"),
description="Runtime registrado na central.",
function="repository mesh central runtime",
file_type="markdown",
changed_by="mais_humana.repository_mesh_runtime",
change_summary="Registrado runtime da sincronizacao na central.",
relation_to_order="000_sincronizacao-dos-espelhos",
)
)
return tuple(records)

View File

@@ -0,0 +1,545 @@
"""SQLite persistence for repository mesh control artifacts."""
from __future__ import annotations
import json
import sqlite3
from dataclasses import dataclass
from pathlib import Path
from typing import Any, Mapping, Sequence
from .models import GeneratedFile, ServiceOrder, as_plain_data, utc_now
from .repository_mesh import MeshReport, MeshRiskLevel
from .repository_mesh_reconciliation import ReconciliationPlan, ReconciliationStatus
from .repository_mesh_runtime import RuntimeCycle, SchedulerSpec
SCHEMA = """
CREATE TABLE IF NOT EXISTS repository_mesh_reports (
id INTEGER PRIMARY KEY AUTOINCREMENT,
report_id TEXT UNIQUE NOT NULL,
generated_at TEXT NOT NULL,
fetch_enabled INTEGER NOT NULL,
targets_count INTEGER NOT NULL,
observations_count INTEGER NOT NULL,
ok_count INTEGER NOT NULL,
attention_count INTEGER NOT NULL,
blocked_count INTEGER NOT NULL,
dirty_repositories INTEGER NOT NULL,
credential_errors INTEGER NOT NULL,
destructive_blocks INTEGER NOT NULL,
plugin_auth_attempt TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS repository_mesh_targets (
id INTEGER PRIMARY KEY AUTOINCREMENT,
report_id TEXT NOT NULL,
declared_name TEXT NOT NULL,
expected_local_name TEXT NOT NULL,
gitea_repo TEXT NOT NULL,
risk TEXT NOT NULL,
aligned_hash TEXT,
hash_count INTEGER NOT NULL,
dirty_count INTEGER NOT NULL,
missing_count INTEGER NOT NULL,
remote_mismatch_count INTEGER NOT NULL,
nominal_mismatch_count INTEGER NOT NULL,
credential_error_count INTEGER NOT NULL,
destructive_block_count INTEGER NOT NULL,
summary TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL,
UNIQUE(report_id, declared_name)
);
CREATE TABLE IF NOT EXISTS repository_mesh_observations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
report_id TEXT NOT NULL,
declared_name TEXT NOT NULL,
environment_id TEXT NOT NULL,
presence TEXT NOT NULL,
matched_path TEXT,
branch TEXT,
head TEXT,
remote_origin TEXT,
cleanliness TEXT,
ahead INTEGER,
behind INTEGER,
fetch_error_kind TEXT,
nomenclature_note TEXT,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL,
UNIQUE(report_id, declared_name, environment_id)
);
CREATE TABLE IF NOT EXISTS repository_mesh_actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
report_id TEXT NOT NULL,
action_id TEXT UNIQUE NOT NULL,
declared_name TEXT NOT NULL,
environment_id TEXT NOT NULL,
kind TEXT NOT NULL,
risk TEXT NOT NULL,
automatic INTEGER NOT NULL,
destructive INTEGER NOT NULL,
reason TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS repository_mesh_reconciliation_plans (
id INTEGER PRIMARY KEY AUTOINCREMENT,
plan_id TEXT UNIQUE NOT NULL,
report_id TEXT NOT NULL,
generated_at TEXT NOT NULL,
blocker_count INTEGER NOT NULL,
auto_sync_ready_count INTEGER NOT NULL,
rename_ready_count INTEGER NOT NULL,
credential_blocked_count INTEGER NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS repository_mesh_receipts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
plan_id TEXT NOT NULL,
receipt_id TEXT UNIQUE NOT NULL,
target_name TEXT NOT NULL,
expected_local_name TEXT NOT NULL,
gitea_repo TEXT NOT NULL,
status TEXT NOT NULL,
risk TEXT NOT NULL,
safe_to_auto_sync INTEGER NOT NULL,
selected_head TEXT,
precedence TEXT NOT NULL,
pending_count INTEGER NOT NULL,
blocker_count INTEGER NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS repository_mesh_runtime_cycles (
id INTEGER PRIMARY KEY AUTOINCREMENT,
cycle_id TEXT UNIQUE NOT NULL,
report_id TEXT NOT NULL,
plan_id TEXT NOT NULL,
generated_at TEXT NOT NULL,
lock_status TEXT NOT NULL,
allowed_count INTEGER NOT NULL,
blocked_count INTEGER NOT NULL,
skipped_count INTEGER NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS repository_mesh_schedulers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
scheduler_id TEXT UNIQUE NOT NULL,
kind TEXT NOT NULL,
name TEXT NOT NULL,
interval_minutes INTEGER NOT NULL,
command TEXT NOT NULL,
working_directory TEXT NOT NULL,
log_path TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
"""
@dataclass(frozen=True, slots=True)
class MeshSemanticCounts:
reports: int
targets: int
observations: int
actions: int
plans: int
receipts: int
cycles: int
schedulers: int
def to_dict(self) -> dict[str, int]:
return as_plain_data(self)
def connect(path: Path) -> sqlite3.Connection:
path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(path)
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA synchronous=NORMAL")
conn.executescript(SCHEMA)
return conn
def payload(value: object) -> str:
return json.dumps(as_plain_data(value), ensure_ascii=False, sort_keys=True)
def upsert_report(conn: sqlite3.Connection, report: MeshReport) -> None:
now = utc_now()
conn.execute(
"""
INSERT INTO repository_mesh_reports (
report_id, generated_at, fetch_enabled, targets_count, observations_count,
ok_count, attention_count, blocked_count, dirty_repositories,
credential_errors, destructive_blocks, plugin_auth_attempt,
payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(report_id) DO UPDATE SET
generated_at=excluded.generated_at,
fetch_enabled=excluded.fetch_enabled,
targets_count=excluded.targets_count,
observations_count=excluded.observations_count,
ok_count=excluded.ok_count,
attention_count=excluded.attention_count,
blocked_count=excluded.blocked_count,
dirty_repositories=excluded.dirty_repositories,
credential_errors=excluded.credential_errors,
destructive_blocks=excluded.destructive_blocks,
plugin_auth_attempt=excluded.plugin_auth_attempt,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
report.report_id,
report.generated_at,
1 if report.fetch_enabled else 0,
len(report.targets),
report.total_observations,
report.ok_count,
report.attention_count,
report.blocked_count,
report.dirty_repositories,
report.credential_errors,
report.destructive_blocks,
report.plugin_auth_attempt,
payload(report),
now,
),
)
for summary in report.summaries:
conn.execute(
"""
INSERT INTO repository_mesh_targets (
report_id, declared_name, expected_local_name, gitea_repo, risk,
aligned_hash, hash_count, dirty_count, missing_count,
remote_mismatch_count, nominal_mismatch_count, credential_error_count,
destructive_block_count, summary, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(report_id, declared_name) DO UPDATE SET
expected_local_name=excluded.expected_local_name,
gitea_repo=excluded.gitea_repo,
risk=excluded.risk,
aligned_hash=excluded.aligned_hash,
hash_count=excluded.hash_count,
dirty_count=excluded.dirty_count,
missing_count=excluded.missing_count,
remote_mismatch_count=excluded.remote_mismatch_count,
nominal_mismatch_count=excluded.nominal_mismatch_count,
credential_error_count=excluded.credential_error_count,
destructive_block_count=excluded.destructive_block_count,
summary=excluded.summary,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
report.report_id,
summary.target.declared_name,
summary.target.expected_local_name,
summary.target.gitea_repo,
summary.risk.value,
summary.aligned_hash,
len(summary.hash_set),
summary.dirty_count,
summary.missing_count,
summary.remote_mismatch_count,
summary.nominal_mismatch_count,
summary.credential_error_count,
summary.destructive_block_count,
summary.summary,
payload(summary),
now,
),
)
for obs in summary.observations:
state = obs.git_state
conn.execute(
"""
INSERT INTO repository_mesh_observations (
report_id, declared_name, environment_id, presence, matched_path,
branch, head, remote_origin, cleanliness, ahead, behind,
fetch_error_kind, nomenclature_note, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(report_id, declared_name, environment_id) DO UPDATE SET
presence=excluded.presence,
matched_path=excluded.matched_path,
branch=excluded.branch,
head=excluded.head,
remote_origin=excluded.remote_origin,
cleanliness=excluded.cleanliness,
ahead=excluded.ahead,
behind=excluded.behind,
fetch_error_kind=excluded.fetch_error_kind,
nomenclature_note=excluded.nomenclature_note,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
report.report_id,
summary.target.declared_name,
obs.environment_id,
obs.presence.value,
obs.matched_path,
state.branch if state else None,
state.head if state else None,
state.remote_origin if state else None,
state.cleanliness.value if state else None,
state.ahead_behind.ahead if state else None,
state.ahead_behind.behind if state else None,
state.fetch_error_kind.value if state else None,
obs.nomenclature_note,
payload(obs),
now,
),
)
for action in summary.actions:
conn.execute(
"""
INSERT INTO repository_mesh_actions (
report_id, action_id, declared_name, environment_id, kind,
risk, automatic, destructive, reason, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(action_id) DO UPDATE SET
report_id=excluded.report_id,
declared_name=excluded.declared_name,
environment_id=excluded.environment_id,
kind=excluded.kind,
risk=excluded.risk,
automatic=excluded.automatic,
destructive=excluded.destructive,
reason=excluded.reason,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
report.report_id,
action.action_id,
action.target_name,
action.environment_id,
action.kind.value,
action.risk.value,
1 if action.can_execute_automatically else 0,
1 if action.destructive else 0,
action.reason,
payload(action),
now,
),
)
def upsert_plan(conn: sqlite3.Connection, plan: ReconciliationPlan) -> None:
now = utc_now()
conn.execute(
"""
INSERT INTO repository_mesh_reconciliation_plans (
plan_id, report_id, generated_at, blocker_count, auto_sync_ready_count,
rename_ready_count, credential_blocked_count, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(plan_id) DO UPDATE SET
report_id=excluded.report_id,
generated_at=excluded.generated_at,
blocker_count=excluded.blocker_count,
auto_sync_ready_count=excluded.auto_sync_ready_count,
rename_ready_count=excluded.rename_ready_count,
credential_blocked_count=excluded.credential_blocked_count,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
plan.plan_id,
plan.report_id,
plan.generated_at,
plan.blocker_count,
plan.auto_sync_ready_count,
plan.rename_ready_count,
plan.credential_blocked_count,
payload(plan),
now,
),
)
for receipt in plan.receipts:
conn.execute(
"""
INSERT INTO repository_mesh_receipts (
plan_id, receipt_id, target_name, expected_local_name, gitea_repo,
status, risk, safe_to_auto_sync, selected_head, precedence,
pending_count, blocker_count, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(receipt_id) DO UPDATE SET
plan_id=excluded.plan_id,
target_name=excluded.target_name,
expected_local_name=excluded.expected_local_name,
gitea_repo=excluded.gitea_repo,
status=excluded.status,
risk=excluded.risk,
safe_to_auto_sync=excluded.safe_to_auto_sync,
selected_head=excluded.selected_head,
precedence=excluded.precedence,
pending_count=excluded.pending_count,
blocker_count=excluded.blocker_count,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
plan.plan_id,
receipt.receipt_id,
receipt.target_name,
receipt.expected_local_name,
receipt.gitea_repo,
receipt.status.value,
receipt.risk.value,
1 if receipt.safe_to_auto_sync else 0,
receipt.latest_state.selected_head,
receipt.latest_state.precedence.value,
len(receipt.pending_items),
len(receipt.blockers),
payload(receipt),
now,
),
)
def upsert_runtime(conn: sqlite3.Connection, cycle: RuntimeCycle, schedulers: Sequence[SchedulerSpec]) -> None:
now = utc_now()
conn.execute(
"""
INSERT INTO repository_mesh_runtime_cycles (
cycle_id, report_id, plan_id, generated_at, lock_status,
allowed_count, blocked_count, skipped_count, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(cycle_id) DO UPDATE SET
report_id=excluded.report_id,
plan_id=excluded.plan_id,
generated_at=excluded.generated_at,
lock_status=excluded.lock_status,
allowed_count=excluded.allowed_count,
blocked_count=excluded.blocked_count,
skipped_count=excluded.skipped_count,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
cycle.cycle_id,
cycle.report_id,
cycle.plan_id,
cycle.generated_at,
cycle.lock.status.value,
cycle.allowed_count,
cycle.blocked_count,
cycle.skipped_count,
payload(cycle),
now,
),
)
for spec in schedulers:
conn.execute(
"""
INSERT INTO repository_mesh_schedulers (
scheduler_id, kind, name, interval_minutes, command,
working_directory, log_path, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(scheduler_id) DO UPDATE SET
kind=excluded.kind,
name=excluded.name,
interval_minutes=excluded.interval_minutes,
command=excluded.command,
working_directory=excluded.working_directory,
log_path=excluded.log_path,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
spec.scheduler_id,
spec.kind.value,
spec.name,
spec.interval_minutes,
spec.command,
spec.working_directory,
spec.log_path,
payload(spec),
now,
),
)
def write_repository_mesh_semantic_state(
sqlite_path: Path,
*,
report: MeshReport,
plan: ReconciliationPlan,
cycle: RuntimeCycle,
schedulers: Sequence[SchedulerSpec],
) -> MeshSemanticCounts:
with connect(sqlite_path) as conn:
upsert_report(conn, report)
upsert_plan(conn, plan)
upsert_runtime(conn, cycle, schedulers)
conn.commit()
return table_counts(sqlite_path)
def table_counts(sqlite_path: Path) -> MeshSemanticCounts:
if not sqlite_path.exists():
return MeshSemanticCounts(0, 0, 0, 0, 0, 0, 0, 0)
with sqlite3.connect(sqlite_path) as conn:
def count(table: str) -> int:
try:
return int(conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone()[0])
except sqlite3.DatabaseError:
return -1
return MeshSemanticCounts(
reports=count("repository_mesh_reports"),
targets=count("repository_mesh_targets"),
observations=count("repository_mesh_observations"),
actions=count("repository_mesh_actions"),
plans=count("repository_mesh_reconciliation_plans"),
receipts=count("repository_mesh_receipts"),
cycles=count("repository_mesh_runtime_cycles"),
schedulers=count("repository_mesh_schedulers"),
)
def semantic_generated_records(sqlite_path: Path) -> tuple[GeneratedFile, ...]:
return (
GeneratedFile(
path=str(sqlite_path),
description="SQLite semantico atualizado com inventario, reconciliacao e runtime da malha de repositorios.",
function="repository mesh semantic state",
file_type="sqlite",
changed_by="mais_humana.repository_mesh_semantic",
change_summary="Registradas tabelas compactas da OS permanente de sincronizacao.",
relation_to_order="000_sincronizacao-dos-espelhos",
),
)
def counts_markdown(counts: MeshSemanticCounts, sqlite_path: Path) -> str:
lines = [
"# Repository Mesh Semantic Counts",
"",
f"- sqlite: `{sqlite_path}`",
f"- reports: `{counts.reports}`",
f"- targets: `{counts.targets}`",
f"- observations: `{counts.observations}`",
f"- actions: `{counts.actions}`",
f"- plans: `{counts.plans}`",
f"- receipts: `{counts.receipts}`",
f"- cycles: `{counts.cycles}`",
f"- schedulers: `{counts.schedulers}`",
"",
]
return "\n".join(lines)