auto-sync: tudo-para-ia-mais-humana 2026-05-02 07:24:14

This commit is contained in:
2026-05-02 07:24:14 -03:00
parent 0951751ebe
commit 89e69ff7bd
22 changed files with 156891 additions and 5347 deletions

View File

@@ -0,0 +1,875 @@
"""Executable canonical-name migration plan for Mais Humana.
The router made the institutional name explicit:
``tudo-para-ia-mais-humana-platform`` is canonical, while the physical
repository can remain materialized as ``tudo-para-ia-mais-humana`` until Git,
MCP, Docs, UI, and synchronization windows are coordinated. This module turns
that decision into machine-checkable controls and MCP acceptance cases so the
platform can migrate without creating a duplicate repository or bypassing the
MCP control plane.
"""
from __future__ import annotations
import csv
import io
import json
import sqlite3
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
from typing import Any, Iterable, Mapping, Sequence
from .identity_policy import (
ACCEPTED_PROJECT_IDS,
CANONICAL_COMPATIBILITY_RULE,
CANONICAL_DECISION_SOURCE,
CANONICAL_DECISION_STATUS,
CANONICAL_PROJECT_ID,
CENTRAL_FOLDER_NAME,
CURRENT_PROJECT_ID,
LEGACY_PLATAFORM_ALIAS,
MCP_CONTROL_PLANE_ID,
MCP_EXECUTE_ENDPOINT,
)
from .models import GeneratedFile, as_plain_data, merge_unique, utc_now
from .repository_mesh import RepositoryTarget, default_repository_targets, stable_digest
from .repository_mesh_naming import plataform_to_platform, platform_to_plataform
from .storage import connect, upsert_files
class MigrationLayer(str, Enum):
"""Migration area controlled by the canonical-name plan."""
IDENTITY = "identity"
FILESYSTEM = "filesystem"
GIT_REMOTE = "git_remote"
CENTRAL_DOSSIER = "central_dossier"
MCP_CONTRACT = "mcp_contract"
MCP_EXECUTE = "mcp_execute"
DOCS_UI = "docs_ui"
SYNC_MESH = "sync_mesh"
ROLLBACK = "rollback"
class MigrationStatus(str, Enum):
"""Status for one migration control or the whole plan."""
READY = "ready"
PARTIAL = "partial"
BLOCKED = "blocked"
MANUAL_WINDOW_REQUIRED = "manual_window_required"
class AcceptanceStatus(str, Enum):
"""Decision for one identifier found in a migration payload."""
CANONICAL = "canonical"
ALIAS_ACCEPTED = "alias_accepted"
CANONICAL_FIELD_REWRITE_REQUIRED = "canonical_field_rewrite_required"
REMOTE_ALIAS_ACCEPTED = "remote_alias_accepted"
CENTRAL_FOLDER_ACCEPTED = "central_folder_accepted"
BLOCKED_UNKNOWN_IDENTIFIER = "blocked_unknown_identifier"
CANONICAL_OWNER_FIELDS: tuple[str, ...] = (
"canonicalProjectId",
"ownerPlatformId",
"owner_platform_id",
)
COMPATIBILITY_FIELDS: tuple[str, ...] = (
"projectId",
"project_id",
"currentProjectId",
"current_project_id",
"platformId",
"platform_id",
"origin",
"destination",
"targetPlatformId",
"target_platform_id",
"consumerPlatformId",
"consumer_platform_id",
)
REMOTE_FIELDS: tuple[str, ...] = (
"repoRemote",
"remoteOrigin",
"remote_origin",
"originRemote",
)
CENTRAL_FIELDS: tuple[str, ...] = (
"centralFolder",
"central_folder",
"centralPlatformFolder",
"central_platform_folder",
)
MCP_MIGRATION_OPERATIONS: tuple[tuple[str, str], ...] = (
("consulta", "mcp.admin.readonly"),
("diagnostico", "mcp.admin.diagnostic"),
("acao", "mcp.admin.action.request"),
("auditoria", "mcp.admin.audit"),
("explicacao", "mcp.admin.explain"),
)
MCP_TRANSIT_FIELDS: tuple[str, ...] = (
"origin",
"destination",
"tool",
"payload",
"actor",
"permission",
"result",
"traceId",
"auditId",
"timestamp",
)
@dataclass(frozen=True, slots=True)
class CanonicalMigrationControl:
"""One actionable control required to migrate without losing history."""
control_id: str
layer: MigrationLayer
title: str
current_state: str
canonical_state: str
status: MigrationStatus
required_evidence: tuple[str, ...]
blockers: tuple[str, ...]
allowed_actions: tuple[str, ...]
forbidden_actions: tuple[str, ...]
rollback_steps: tuple[str, ...]
mcp_transit_fields: tuple[str, ...]
order_ids: tuple[str, ...]
owner: str = "codex-operational-round"
@property
def ready(self) -> bool:
return self.status == MigrationStatus.READY
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class CanonicalMigrationAcceptanceCase:
"""One MCP acceptance case for a field/candidate/operation tuple."""
case_id: str
target_name: str
field_name: str
candidate_value: str
canonical_value: str
operation: str
permission_scope: str
accepted: bool
status: AcceptanceStatus
required_action: str
reason: str
mcp_transit_required: bool
direct_repository_write_blocked: bool
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class SemanticWriteStatus:
"""Compact result of writing generated file functions to SQLite."""
sqlite_path: str
attempted: bool
ok: bool
files_count: int
error: str = ""
def to_dict(self) -> dict[str, Any]:
return as_plain_data(self)
@dataclass(frozen=True, slots=True)
class CanonicalMigrationPlan:
"""Full canonical-name migration plan and MCP acceptance matrix."""
plan_id: str
generated_at: str
canonical_project_id: str
current_project_id: str
legacy_alias: str
central_folder: str
decision_status: str
decision_source: str
compatibility_rule: str
mcp_control_plane_id: str
mcp_execute_endpoint: str
controls: tuple[CanonicalMigrationControl, ...]
acceptance_cases: tuple[CanonicalMigrationAcceptanceCase, ...]
generated_from: str
semantic_write: SemanticWriteStatus | None = None
central_semantic_write: SemanticWriteStatus | None = None
@property
def controls_count(self) -> int:
return len(self.controls)
@property
def accepted_cases_count(self) -> int:
return sum(1 for case in self.acceptance_cases if case.accepted)
@property
def blocked_cases_count(self) -> int:
return sum(1 for case in self.acceptance_cases if not case.accepted)
@property
def blockers(self) -> tuple[str, ...]:
return merge_unique(blocker for control in self.controls for blocker in control.blockers)
@property
def status(self) -> MigrationStatus:
if any(control.status == MigrationStatus.BLOCKED for control in self.controls):
return MigrationStatus.BLOCKED
if any(control.status == MigrationStatus.MANUAL_WINDOW_REQUIRED for control in self.controls):
return MigrationStatus.MANUAL_WINDOW_REQUIRED
if any(control.status == MigrationStatus.PARTIAL for control in self.controls):
return MigrationStatus.PARTIAL
return MigrationStatus.READY
def to_dict(self) -> dict[str, Any]:
data = as_plain_data(self)
data["status"] = self.status.value
data["controls_count"] = self.controls_count
data["accepted_cases_count"] = self.accepted_cases_count
data["blocked_cases_count"] = self.blocked_cases_count
data["blockers"] = list(self.blockers)
return data
def expected_remote_for(repo_name: str) -> str:
"""Return the expected Gitea HTTPS remote for a repo name or gitea slug."""
clean = repo_name.strip().removeprefix("admin/")
if clean.startswith("http://") or clean.startswith("https://"):
return clean
return f"https://git.ami.app.br/admin/{clean}.git"
def target_canonical_name(target: RepositoryTarget) -> str:
"""Return the migration canonical name for one repository target."""
return target.canonical_name or target.expected_local_name
def target_identifier_candidates(target: RepositoryTarget) -> tuple[str, ...]:
"""Return all accepted identifiers and locators for a target."""
canonical = target_canonical_name(target)
names = [
canonical,
target.expected_local_name,
target.declared_name,
*target.aliases,
platform_to_plataform(target.expected_local_name),
plataform_to_platform(target.expected_local_name),
target.central_folder,
f"admin/{canonical}",
f"admin/{target.expected_local_name}",
target.gitea_repo,
expected_remote_for(canonical),
expected_remote_for(target.expected_local_name),
expected_remote_for(target.gitea_repo),
]
if target.declared_name == CANONICAL_PROJECT_ID:
names.extend(
[
CURRENT_PROJECT_ID,
LEGACY_PLATAFORM_ALIAS,
CENTRAL_FOLDER_NAME,
f"admin/{CURRENT_PROJECT_ID}",
f"admin/{LEGACY_PLATAFORM_ALIAS}",
expected_remote_for(CURRENT_PROJECT_ID),
expected_remote_for(LEGACY_PLATAFORM_ALIAS),
]
)
return merge_unique(value for value in names if value)
def canonical_remote_values(target: RepositoryTarget) -> tuple[str, ...]:
"""Return canonical remote forms for a target."""
canonical = target_canonical_name(target)
return (f"admin/{canonical}", expected_remote_for(canonical))
def classify_candidate(target: RepositoryTarget, field_name: str, candidate: str) -> tuple[bool, AcceptanceStatus, str, str]:
"""Classify one field/candidate pair for MCP migration payloads."""
canonical = target_canonical_name(target)
accepted = set(target_identifier_candidates(target))
canonical_values = {canonical, f"admin/{canonical}", expected_remote_for(canonical)}
central_values = {target.central_folder}
remote_values = set(canonical_remote_values(target))
remote_values.update({target.gitea_repo, expected_remote_for(target.gitea_repo), expected_remote_for(target.expected_local_name)})
if candidate not in accepted:
return (
False,
AcceptanceStatus.BLOCKED_UNKNOWN_IDENTIFIER,
"bloquear payload ate que o identificador seja mapeado no grafo canonico",
"identificador nao pertence ao conjunto aceito de aliases, remotes ou pasta central",
)
if field_name in CANONICAL_OWNER_FIELDS:
if candidate == canonical:
return (True, AcceptanceStatus.CANONICAL, "manter valor canonico", "campo canonico ja usa o nome institucional")
return (
True,
AcceptanceStatus.CANONICAL_FIELD_REWRITE_REQUIRED,
"reescrever campo canonico para tudo-para-ia-mais-humana-platform antes de persistir",
"alias aceito apenas como compatibilidade; campos owner/canonical devem carregar o canonico",
)
if field_name in REMOTE_FIELDS:
if candidate in canonical_values:
return (True, AcceptanceStatus.CANONICAL, "manter remote canonico", "remote ja aponta para forma canonica")
if candidate in remote_values or candidate in accepted:
return (
True,
AcceptanceStatus.REMOTE_ALIAS_ACCEPTED,
"registrar remote alias e migrar apenas em janela coordenada",
"remote historico aceito para nao duplicar repositorio nem perder commits",
)
if field_name in CENTRAL_FIELDS:
return (
True,
AcceptanceStatus.CENTRAL_FOLDER_ACCEPTED,
"manter pasta central numerada como dossie gerencial, sem propagar numero ao repo",
"pasta central e alias administrativo aceito, nao nome tecnico do projeto",
)
if candidate == canonical:
return (True, AcceptanceStatus.CANONICAL, "usar valor canonico", "identificador ja esta canonico")
return (
True,
AcceptanceStatus.ALIAS_ACCEPTED,
"resolver alias para canonical_project_id no MCP e registrar trace/audit",
"alias historico aceito somente com rastreabilidade e sem bypass direto",
)
def build_migration_acceptance_cases(targets: Sequence[RepositoryTarget] | None = None) -> tuple[CanonicalMigrationAcceptanceCase, ...]:
"""Build deterministic MCP acceptance cases for repository-name migration."""
cases: list[CanonicalMigrationAcceptanceCase] = []
fields = (*CANONICAL_OWNER_FIELDS, *COMPATIBILITY_FIELDS, *REMOTE_FIELDS, *CENTRAL_FIELDS)
for target in tuple(targets or default_repository_targets()):
canonical = target_canonical_name(target)
for field_name in fields:
for candidate in target_identifier_candidates(target):
for operation, permission in MCP_MIGRATION_OPERATIONS:
accepted, status, action, reason = classify_candidate(target, field_name, candidate)
seed = {
"target": target.declared_name,
"field": field_name,
"candidate": candidate,
"operation": operation,
"permission": permission,
}
cases.append(
CanonicalMigrationAcceptanceCase(
case_id=f"canonical-migration-{stable_digest(seed, 24)}",
target_name=target.declared_name,
field_name=field_name,
candidate_value=candidate,
canonical_value=canonical,
operation=operation,
permission_scope=permission,
accepted=accepted,
status=status,
required_action=action,
reason=reason,
mcp_transit_required=True,
direct_repository_write_blocked=True,
)
)
return tuple(cases)
def build_migration_controls() -> tuple[CanonicalMigrationControl, ...]:
"""Build the controls that gate a safe canonical-name migration."""
forbidden_git = (
"git reset --hard",
"git checkout -- <arquivo>",
"git restore destrutivo",
"git clean",
"pull/merge nao fast-forward",
"renomear pasta criando repositorio duplicado",
)
return (
CanonicalMigrationControl(
control_id="canonical-identity-approved",
layer=MigrationLayer.IDENTITY,
title="Nome canonico aprovado e aliases preservados",
current_state=f"repo materializado como {CURRENT_PROJECT_ID}; alias historico {LEGACY_PLATAFORM_ALIAS}",
canonical_state=CANONICAL_PROJECT_ID,
status=MigrationStatus.READY,
required_evidence=(CANONICAL_DECISION_SOURCE, "dados/canonical-identity-graph.json"),
blockers=(),
allowed_actions=("resolver aliases para canonicalProjectId", "manter aliases em payloads de compatibilidade"),
forbidden_actions=("apagar historico de aliases", "trocar ownerPlatformId sem trace/audit"),
rollback_steps=("voltar a aceitar CURRENT_PROJECT_ID como alias se consumidor legado falhar",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0035_EXECUTIVA__reconciliar-nome-canonico-real-alias-platform", "0052_GERENCIAL__formalizar-nome-canonico-mais-humana-platform-e-aliases"),
),
CanonicalMigrationControl(
control_id="filesystem-alias-window",
layer=MigrationLayer.FILESYSTEM,
title="Diretorio fisico so muda em janela coordenada",
current_state=f"G:/_codex-git/{CURRENT_PROJECT_ID}",
canonical_state=f"G:/_codex-git/{CANONICAL_PROJECT_ID}",
status=MigrationStatus.MANUAL_WINDOW_REQUIRED,
required_evidence=("git status limpo antes/depois", "hash HEAD antes/depois", "ausencia de pasta duplicada"),
blockers=("rename fisico depende de janela institucional e comparacao de remotes",),
allowed_actions=("registrar alias de diretorio", "validar que destino canonico nao existe antes de renomear"),
forbidden_actions=forbidden_git,
rollback_steps=("renomear de volta somente se HEAD e remote forem iguais aos hashes pre-migracao",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0057_GERENCIAL__governar-migracao-coordenada-do-nome-canonico-platform",),
),
CanonicalMigrationControl(
control_id="git-remote-canonicalization",
layer=MigrationLayer.GIT_REMOTE,
title="Remote canonico sem sobrescrever remoto historico",
current_state=f"origin historico https://git.ami.app.br/admin/{CURRENT_PROJECT_ID}.git",
canonical_state=f"https://git.ami.app.br/admin/{CANONICAL_PROJECT_ID}.git",
status=MigrationStatus.BLOCKED,
required_evidence=("git remote -v", "git ls-remote --heads remoto canonico main", "push dry-run com credencial valida"),
blockers=("SEC_E_NO_CREDENTIALS no Git Windows", "renome remoto depende de Gitea/owner"),
allowed_actions=("mapear remote historico como alias", "bloquear push se ahead/behind divergente"),
forbidden_actions=forbidden_git,
rollback_steps=("restaurar origin historico se remoto canonico nao resolver o mesmo HEAD",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0038_EXECUTIVA__corrigir-acl-index-lock-e-commitar-artefatos-mcp", "0050_GERENCIAL__decidir-politica-de-sync-automatico-e-credencial-gitea"),
),
CanonicalMigrationControl(
control_id="central-dossier-materialized",
layer=MigrationLayer.CENTRAL_DOSSIER,
title="Pasta central registra canonico e materializa ordens",
current_state=CENTRAL_FOLDER_NAME,
canonical_state=f"{CENTRAL_FOLDER_NAME} como dossie; project_id canonico {CANONICAL_PROJECT_ID}",
status=MigrationStatus.PARTIAL,
required_evidence=("orders/executivas e orders/gerenciais materializadas", "controle-semantico.sqlite atualizado"),
blockers=("central ja possui alteracoes concorrentes nao pertencentes a esta rodada",),
allowed_actions=("criar arquivos ausentes da plataforma 15", "upsert SQL semantico escopado"),
forbidden_actions=("alterar pastas de outras plataformas para resolver esta OS",),
rollback_steps=("recriar ordens a partir de os-orientadoras/central-materialization-fallback",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0040_EXECUTIVA__materializar-escrita-central-e-sql-semantico-sem-permissionerror",),
),
CanonicalMigrationControl(
control_id="mcp-contract-owner-canonical",
layer=MigrationLayer.MCP_CONTRACT,
title="Contratos MCP usam ownerPlatformId canonico",
current_state="provider Mais Humana aceita aliases e tools live",
canonical_state=f"ownerPlatformId/canonicalProjectId = {CANONICAL_PROJECT_ID}",
status=MigrationStatus.READY,
required_evidence=("dados/mcp-provider-mais-humana.json", "dados/mcp-contratos-humanos.json"),
blockers=(),
allowed_actions=("publicar aliasPolicy no provider", "validar sameSource por MCP"),
forbidden_actions=("chamada direta de plataforma sem traceId/auditId",),
rollback_steps=("manter aliases aceitos no gateway durante rollout",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0037_EXECUTIVA__homologar-rotas-administrativas-mcp-no-gateway", "0049_GERENCIAL__homologar-mcp-only-administration-routes-como-politica-de-ecossistema"),
),
CanonicalMigrationControl(
control_id="mcp-execute-live-readback",
layer=MigrationLayer.MCP_EXECUTE,
title="Gateway MCP confirma tools e aliases via /v1/execute",
current_state="tools Mais Humana respondem no endpoint live quando publicadas",
canonical_state=f"{MCP_EXECUTE_ENDPOINT} audita alias e canonico pelo MCPs Internos",
status=MigrationStatus.PARTIAL,
required_evidence=("HTTP 200 ok=true por tool", "traceId/auditId", "source hash"),
blockers=("deploy live e smoke dependem de runner/deploy manual quando wrangler local falhar",),
allowed_actions=("smoke HTTP com bearer simulado informado", "registrar resposta redigida"),
forbidden_actions=("usar falha do plugin Cloudflare como diagnostico operacional",),
rollback_steps=("voltar para ultimo deployment Cloudflare observado por wrangler/manual",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0039_EXECUTIVA__reexecutar-wrangler-em-runner-homologado-e-registrar-deploy",),
),
CanonicalMigrationControl(
control_id="docs-ui-same-source",
layer=MigrationLayer.DOCS_UI,
title="Docs e UI renderizam a mesma fonte MCP",
current_state="Docs pode estar catalogOnly; UI deve renderizar fonte MCP",
canonical_state="GPT e painel explicam a mesma instancia administrativa canonica",
status=MigrationStatus.BLOCKED,
required_evidence=("decisao Docs responseReady/catalogOnly", "sameSourceValidation", "instancia administrativa MCP"),
blockers=("Docs responseReady ou excecao catalogOnly depende de owner",),
allowed_actions=("registrar excecao formal", "exigir sameSource antes de release UI"),
forbidden_actions=("criar fonte paralela de painel fora do MCP",),
rollback_steps=("desabilitar renderizacao da fonte nao-MCP ate correcao",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0053_GERENCIAL__pactuar-docs-catalogonly-ou-response-ready-como-excecao-global",),
),
CanonicalMigrationControl(
control_id="sync-mesh-no-reversal",
layer=MigrationLayer.SYNC_MESH,
title="Sincronizacao preserva a alteracao valida mais recente",
current_state="fetch/push podem falhar por credencial ou ACL; central tem deltas concorrentes",
canonical_state="hashes comparados antes de qualquer renome, push ou materializacao remota",
status=MigrationStatus.BLOCKED,
required_evidence=("targeted-sync-audit", "ahead/behind", "hash local/remoto", "logs de autosync"),
blockers=("SEC_E_NO_CREDENTIALS", "FETCH_HEAD/index.lock Permission denied", "worktree central divergente"),
allowed_actions=("fetch seguro quando credencial existir", "bloquear sincronizacao destrutiva em divergencia"),
forbidden_actions=forbidden_git,
rollback_steps=("preservar backup de hashes e bloquear autosync ate reconciliacao segura",),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0038_EXECUTIVA__corrigir-acl-index-lock-e-commitar-artefatos-mcp", "0050_GERENCIAL__decidir-politica-de-sync-automatico-e-credencial-gitea"),
),
CanonicalMigrationControl(
control_id="rollback-and-audit-pack",
layer=MigrationLayer.ROLLBACK,
title="Rollback documentado antes da migracao fisica",
current_state="aliases preservam operacao enquanto remoto/diretorio seguem historicos",
canonical_state="rollback inclui hashes, remotes, deployment id, ordem de servico e ledger MCP",
status=MigrationStatus.PARTIAL,
required_evidence=("ledger MCP", "AUDITORIA-GPT", "PENDENCIAS-CODEX", "deployment id"),
blockers=("sem push remoto confirmado nao ha rollback remoto auditavel",),
allowed_actions=("registrar rollback textual e hashes", "manter aliases por janela de deprecacao"),
forbidden_actions=("remover alias no mesmo ciclo do rename canonico",),
rollback_steps=("reativar alias historico", "restaurar remote anterior", "registrar auditoria de reversao no MCP"),
mcp_transit_fields=MCP_TRANSIT_FIELDS,
order_ids=("0057_GERENCIAL__governar-migracao-coordenada-do-nome-canonico-platform",),
),
)
def build_canonical_migration_plan(*, use_generated: bool = True) -> CanonicalMigrationPlan:
"""Build the full canonical-name migration plan."""
cases: tuple[CanonicalMigrationAcceptanceCase, ...]
generated_from = "runtime"
if use_generated:
try:
from .generated_canonical_migration_plan import iter_acceptance_cases
cases = tuple(iter_acceptance_cases())
generated_from = "generated_canonical_migration_plan"
except (ImportError, AttributeError):
cases = build_migration_acceptance_cases()
else:
cases = build_migration_acceptance_cases()
seed = {
"canonical": CANONICAL_PROJECT_ID,
"current": CURRENT_PROJECT_ID,
"alias": LEGACY_PLATAFORM_ALIAS,
"cases": len(cases),
"controls": [control.control_id for control in build_migration_controls()],
}
return CanonicalMigrationPlan(
plan_id=f"canonical-migration-{stable_digest(seed, 20)}",
generated_at=utc_now(),
canonical_project_id=CANONICAL_PROJECT_ID,
current_project_id=CURRENT_PROJECT_ID,
legacy_alias=LEGACY_PLATAFORM_ALIAS,
central_folder=CENTRAL_FOLDER_NAME,
decision_status=CANONICAL_DECISION_STATUS,
decision_source=CANONICAL_DECISION_SOURCE,
compatibility_rule=CANONICAL_COMPATIBILITY_RULE,
mcp_control_plane_id=MCP_CONTROL_PLANE_ID,
mcp_execute_endpoint=MCP_EXECUTE_ENDPOINT,
controls=build_migration_controls(),
acceptance_cases=cases,
generated_from=generated_from,
)
def migration_plan_payload(plan: CanonicalMigrationPlan, *, limit_cases: int = 80) -> dict[str, Any]:
"""Return compact JSON payload for humans, GPT, and MCP discovery."""
payload = plan.to_dict()
payload["accepted_project_ids"] = list(ACCEPTED_PROJECT_IDS)
payload["acceptance_cases"] = [case.to_dict() for case in plan.acceptance_cases[: max(0, limit_cases)]]
payload["acceptance_cases_total"] = len(plan.acceptance_cases)
return payload
def migration_case_rows(cases: Sequence[CanonicalMigrationAcceptanceCase]) -> list[list[str]]:
"""Return CSV rows for the acceptance matrix."""
rows = [
[
"case_id",
"target_name",
"field_name",
"candidate_value",
"canonical_value",
"operation",
"permission_scope",
"accepted",
"status",
"required_action",
"reason",
]
]
for case in cases:
rows.append(
[
case.case_id,
case.target_name,
case.field_name,
case.candidate_value,
case.canonical_value,
case.operation,
case.permission_scope,
"yes" if case.accepted else "no",
case.status.value,
case.required_action,
case.reason,
]
)
return rows
def migration_control_rows(controls: Sequence[CanonicalMigrationControl]) -> list[list[str]]:
"""Return CSV rows for migration controls."""
rows = [["control_id", "layer", "status", "title", "blockers", "required_evidence", "order_ids"]]
for control in controls:
rows.append(
[
control.control_id,
control.layer.value,
control.status.value,
control.title,
" | ".join(control.blockers),
" | ".join(control.required_evidence),
" | ".join(control.order_ids),
]
)
return rows
def rows_to_csv(rows: Sequence[Sequence[str]]) -> str:
"""Serialize rows to CSV text."""
buffer = io.StringIO()
writer = csv.writer(buffer, lineterminator="\n")
writer.writerows(rows)
return buffer.getvalue()
def migration_plan_markdown(plan: CanonicalMigrationPlan, *, limit_cases: int = 30) -> str:
"""Render a concise human report for the canonical migration plan."""
lines = [
"# Plano de migracao canonica - Mais Humana",
"",
f"- status: `{plan.status.value}`",
f"- plan_id: `{plan.plan_id}`",
f"- canonical_project_id: `{plan.canonical_project_id}`",
f"- current_project_id: `{plan.current_project_id}`",
f"- legacy_alias: `{plan.legacy_alias}`",
f"- central_folder: `{plan.central_folder}`",
f"- decision_source: `{plan.decision_source}`",
f"- generated_from: `{plan.generated_from}`",
f"- controls: `{plan.controls_count}`",
f"- acceptance_cases: `{len(plan.acceptance_cases)}`",
f"- accepted_cases: `{plan.accepted_cases_count}`",
f"- blocked_cases: `{plan.blocked_cases_count}`",
"",
"## Regra institucional",
"",
plan.compatibility_rule,
"",
"## Controles",
"",
]
for control in plan.controls:
blockers = "; ".join(control.blockers) if control.blockers else "nenhum"
lines.extend(
[
f"### {control.control_id}",
f"- layer: `{control.layer.value}`",
f"- status: `{control.status.value}`",
f"- current_state: `{control.current_state}`",
f"- canonical_state: `{control.canonical_state}`",
f"- blockers: `{blockers}`",
f"- order_ids: `{', '.join(control.order_ids)}`",
"",
]
)
lines.extend(["## Casos MCP de aceitacao", ""])
for case in plan.acceptance_cases[: max(0, limit_cases)]:
lines.append(
f"- `{case.case_id}` `{case.target_name}` `{case.field_name}` -> `{case.candidate_value}` "
f"status `{case.status.value}` action `{case.required_action}`"
)
lines.extend(
[
"",
"## Bloqueios reais",
"",
]
)
for blocker in plan.blockers or ("nenhum",):
lines.append(f"- {blocker}")
lines.extend(
[
"",
"## Politica de transito MCP",
"",
"- Toda migracao interplataforma deve passar pelo MCPs Internos.",
"- Campos de transito obrigatorios: " + ", ".join(f"`{field}`" for field in MCP_TRANSIT_FIELDS) + ".",
"- Escrita direta em repositorio, painel ou plataforma permanece bloqueada quando nao houver traceId/auditId.",
"",
]
)
return "\n".join(lines)
def migration_generated_files(project_root: Path, central_platform_folder: Path | None = None) -> tuple[GeneratedFile, ...]:
"""Return semantic file records for generated migration artifacts."""
relation = "0057_GERENCIAL__governar-migracao-coordenada-do-nome-canonico-platform"
specs = [
("dados/canonical-migration-plan.json", "Plano JSON de migracao canonica.", "canonical migration plan", "json"),
("dados/canonical-migration-central-write-status.json", "Status da escrita central do plano de migracao canonica.", "canonical migration central write status", "json"),
("matrizes/canonical-migration-acceptance-cases.csv", "Matriz MCP de aceite de aliases.", "canonical migration cases", "csv"),
("matrizes/canonical-migration-controls.csv", "Controles de migracao canonica.", "canonical migration controls", "csv"),
("ecossistema/CANONICAL-MIGRATION-PLAN.md", "Relatorio humano da migracao canonica.", "canonical migration report", "markdown"),
]
records = [
GeneratedFile(
path=str(project_root / relative),
description=description,
function=function,
file_type=file_type,
changed_by="mais_humana.canonical_migration",
change_summary="Gerado plano de migracao canonica -platform com aliases rastreaveis e controles MCP.",
relation_to_order=relation,
)
for relative, description, function, file_type in specs
]
if central_platform_folder is not None:
records.append(
GeneratedFile(
path=str(central_platform_folder / "reports" / "EXECUTADO__canonical-migration-plan.md"),
description="Registro central do plano de migracao canonica.",
function="canonical migration central report",
file_type="markdown",
changed_by="mais_humana.canonical_migration",
change_summary="Registrada migracao canonica no dossie central da plataforma 15.",
relation_to_order=relation,
)
)
return tuple(records)
def write_semantic_records(sqlite_path: Path, files: Iterable[GeneratedFile]) -> SemanticWriteStatus:
"""Upsert generated file functions into a semantic SQLite database."""
records = tuple(files)
try:
with connect(sqlite_path) as conn:
upsert_files(conn, records)
conn.commit()
return SemanticWriteStatus(str(sqlite_path), True, True, len(records))
except (OSError, sqlite3.Error) as exc:
return SemanticWriteStatus(str(sqlite_path), True, False, len(records), f"{type(exc).__name__}: {exc}")
def write_canonical_migration_artifacts(
plan: CanonicalMigrationPlan,
project_root: Path,
*,
central_platform_folder: Path | None = None,
) -> tuple[GeneratedFile, SemanticWriteStatus, SemanticWriteStatus | None]:
"""Write project and optional central artifacts for the migration plan."""
project_root.mkdir(parents=True, exist_ok=True)
central_report_path = central_platform_folder / "reports" / "EXECUTADO__canonical-migration-plan.md" if central_platform_folder is not None else None
central_file_error = ""
central_write: SemanticWriteStatus | None = None
central_records: tuple[GeneratedFile, ...] = ()
if central_platform_folder is not None:
central_records = tuple(
record for record in migration_generated_files(project_root, central_platform_folder) if str(central_platform_folder) in record.path
)
try:
assert central_report_path is not None
central_report_path.parent.mkdir(parents=True, exist_ok=True)
central_report_path.write_text(migration_plan_markdown(plan, limit_cases=60), encoding="utf-8")
central_write = write_semantic_records(central_platform_folder / "controle-semantico.sqlite", central_records or migration_generated_files(project_root, central_platform_folder))
except OSError as exc:
central_file_error = f"{type(exc).__name__}: {exc}"
central_write = SemanticWriteStatus(
str(central_platform_folder / "controle-semantico.sqlite"),
True,
False,
len(central_records),
central_file_error,
)
targets = [
(project_root / "dados" / "canonical-migration-plan.json", json.dumps(migration_plan_payload(plan, limit_cases=400), ensure_ascii=False, indent=2, sort_keys=True)),
(
project_root / "dados" / "canonical-migration-central-write-status.json",
json.dumps(
{
"centralPlatformFolder": str(central_platform_folder) if central_platform_folder is not None else "",
"centralReportPath": str(central_report_path) if central_report_path is not None else "",
"centralReportOk": bool(central_platform_folder is not None and not central_file_error),
"centralReportError": central_file_error,
"centralSemanticWrite": central_write.to_dict() if central_write is not None else None,
},
ensure_ascii=False,
indent=2,
sort_keys=True,
),
),
(project_root / "matrizes" / "canonical-migration-acceptance-cases.csv", rows_to_csv(migration_case_rows(plan.acceptance_cases))),
(project_root / "matrizes" / "canonical-migration-controls.csv", rows_to_csv(migration_control_rows(plan.controls))),
(project_root / "ecossistema" / "CANONICAL-MIGRATION-PLAN.md", migration_plan_markdown(plan, limit_cases=120)),
]
for path, text in targets:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(text, encoding="utf-8")
records = migration_generated_files(project_root, central_platform_folder)
project_write = write_semantic_records(project_root / "controle-semantico.sqlite", records)
return records, project_write, central_write
def run_canonical_migration_plan(
*,
project_root: Path,
central_platform_folder: Path | None = None,
use_generated: bool = True,
) -> CanonicalMigrationPlan:
"""Build, write, and return the canonical migration plan."""
plan = build_canonical_migration_plan(use_generated=use_generated)
records, project_write, central_write = write_canonical_migration_artifacts(
plan,
project_root,
central_platform_folder=central_platform_folder,
)
return CanonicalMigrationPlan(
plan_id=plan.plan_id,
generated_at=plan.generated_at,
canonical_project_id=plan.canonical_project_id,
current_project_id=plan.current_project_id,
legacy_alias=plan.legacy_alias,
central_folder=plan.central_folder,
decision_status=plan.decision_status,
decision_source=plan.decision_source,
compatibility_rule=plan.compatibility_rule,
mcp_control_plane_id=plan.mcp_control_plane_id,
mcp_execute_endpoint=plan.mcp_execute_endpoint,
controls=plan.controls,
acceptance_cases=plan.acceptance_cases,
generated_from=plan.generated_from,
semantic_write=project_write,
central_semantic_write=central_write,
)

View File

@@ -10,6 +10,7 @@ from .models import as_plain_data
from .central_consolidation import run_consolidated_report
from .central_materialization import run_central_materialization
from .canonical_identity import identity_graph_payload, run_canonical_identity_graph
from .canonical_migration import migration_plan_payload, run_canonical_migration_plan
from .matrix import build_global_recommendations, build_matrix, build_platform_reports
from .mcp_contract import build_mcp_contract_report, build_mcp_execute_probe, mcp_provider_compact_json, mcp_provider_payload
from .mcp_contract import (
@@ -153,6 +154,14 @@ def build_parser() -> argparse.ArgumentParser:
default="G:/_codex-git/nucleo-gestao-operacional/central-de-ordem-de-servico/projects/15_repo_tudo-para-ia-mais-humana-platform",
)
canonical_identity.add_argument("--no-generated", action="store_true", help="Build graph from runtime targets instead of generated registry.")
canonical_migration = sub.add_parser("canonical-migration-plan", help="Write canonical-name migration controls and MCP acceptance cases.")
canonical_migration.add_argument("--project-root", default="G:/_codex-git/tudo-para-ia-mais-humana")
canonical_migration.add_argument(
"--central-platform-folder",
default="G:/_codex-git/nucleo-gestao-operacional/central-de-ordem-de-servico/projects/15_repo_tudo-para-ia-mais-humana-platform",
)
canonical_migration.add_argument("--no-generated", action="store_true", help="Build acceptance cases from runtime repository targets.")
canonical_migration.add_argument("--limit", type=int, default=40)
return parser
@@ -609,6 +618,17 @@ def command_canonical_identity(args: argparse.Namespace) -> int:
return 0
def command_canonical_migration_plan(args: argparse.Namespace) -> int:
central_platform_folder = Path(args.central_platform_folder) if args.central_platform_folder else None
plan = run_canonical_migration_plan(
project_root=Path(args.project_root),
central_platform_folder=central_platform_folder,
use_generated=not bool(args.no_generated),
)
print(json.dumps(migration_plan_payload(plan, limit_cases=int(args.limit)), ensure_ascii=False, indent=2))
return 0
def main(argv: list[str] | None = None) -> int:
parser = build_parser()
args = parser.parse_args(argv)
@@ -654,6 +674,8 @@ def main(argv: list[str] | None = None) -> int:
return command_central_materialization(args)
if args.command == "canonical-identity":
return command_canonical_identity(args)
if args.command == "canonical-migration-plan":
return command_canonical_migration_plan(args)
parser.error(f"unknown command: {args.command}")
return 2

File diff suppressed because it is too large Load Diff