feat: add repository mesh reconciliation round
This commit is contained in:
@@ -35,6 +35,9 @@ class FullGenerationTests(unittest.TestCase):
|
||||
self.assertTrue((project / "dados" / "snapshot-ecossistema.json").exists())
|
||||
self.assertTrue((project / "relatorios-docx" / "RELATORIO-GERAL-DO-ECOSSISTEMA-humana.docx").exists())
|
||||
self.assertTrue((project / "graficos" / "matriz-plataforma-perfil.svg").exists())
|
||||
self.assertTrue((project / "dados" / "mcp-provider-mais-humana.json").exists())
|
||||
self.assertTrue((project / "matrizes" / "mcp-contratos-humanos.csv").exists())
|
||||
self.assertTrue((project / "ecossistema" / "UI-RENDERER-SAME-SOURCE-POLICY.md").exists())
|
||||
self.assertTrue((central / "controle-semantico.sqlite").exists())
|
||||
counts = table_counts(central / "controle-semantico.sqlite")
|
||||
self.assertGreater(counts["files"], 0)
|
||||
|
||||
115
tests/test_mcp_provider_contract.py
Normal file
115
tests/test_mcp_provider_contract.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from mais_humana.catalog import get_platform
|
||||
from mais_humana.cli import main
|
||||
from mais_humana.human_rulebook import MCP_TRANSIT_FIELDS, evaluate_rulebook
|
||||
from mais_humana.matrix import build_matrix, build_platform_reports
|
||||
from mais_humana.mcp_contract import (
|
||||
CANONICAL_PROJECT_ID,
|
||||
MCP_CONTROL_PLANE_ID,
|
||||
McpContractKind,
|
||||
build_mcp_contract_report,
|
||||
contracts_for_kind,
|
||||
iter_contracts,
|
||||
mcp_contract_csv,
|
||||
mcp_contract_markdown,
|
||||
mcp_provider_payload,
|
||||
same_source_validation_payload,
|
||||
)
|
||||
from mais_humana.scanner import scan_platform
|
||||
from tests.helpers import make_tmp
|
||||
|
||||
|
||||
class McpProviderContractTests(unittest.TestCase):
|
||||
def make_repo(self, root: Path, platform_id: str, text: str) -> None:
|
||||
platform = get_platform(platform_id)
|
||||
repo = root / platform.repo_name
|
||||
repo.mkdir(parents=True)
|
||||
(repo / "README.md").write_text(text, encoding="utf-8")
|
||||
(repo / "src").mkdir()
|
||||
(repo / "src" / "index.ts").write_text(
|
||||
"\n".join(
|
||||
[
|
||||
"export const panel = 'admin_ui panelReady sameSource sourcePayloadHash sourceRecordsHash';",
|
||||
"export const trace = 'traceId auditId actor permission result timestamp';",
|
||||
"export const docs = 'responseReady readiness health evidence';",
|
||||
]
|
||||
)
|
||||
+ "\n",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
def make_report(self, root: Path):
|
||||
self.make_repo(
|
||||
root,
|
||||
"business",
|
||||
"business checkout billing entitlements sameSource panelReady responseReady traceId auditId",
|
||||
)
|
||||
self.make_repo(
|
||||
root,
|
||||
"docs",
|
||||
"docs catalogOnly formal exception contract canonical sourcePayloadHash sourceRecordsHash",
|
||||
)
|
||||
scans = tuple(scan_platform(root, get_platform(pid)) for pid in ("business", "docs"))
|
||||
cells = build_matrix(scans)
|
||||
reports = build_platform_reports(scans, cells)
|
||||
rulebook = evaluate_rulebook(reports, limit=60)
|
||||
return build_mcp_contract_report(rulebook)
|
||||
|
||||
def test_generated_contract_registry_is_large_and_mcp_bound(self) -> None:
|
||||
contracts = iter_contracts()
|
||||
self.assertGreaterEqual(len(contracts), 500)
|
||||
self.assertTrue(any(contract.contract_id == "docs.formal-exception.docs-catalogonly" for contract in contracts))
|
||||
self.assertTrue(any(contract.control_plane_id == MCP_CONTROL_PLANE_ID for contract in contracts))
|
||||
self.assertTrue(all(tuple(MCP_TRANSIT_FIELDS) == contract.required_transit_fields for contract in contracts))
|
||||
|
||||
def test_contract_report_exports_provider_envelope(self) -> None:
|
||||
report = self.make_report(make_tmp())
|
||||
envelope = mcp_provider_payload(report, limit=5)
|
||||
self.assertEqual(envelope["origin"], "tudo-para-ia-mais-humana")
|
||||
self.assertEqual(envelope["destination"], MCP_CONTROL_PLANE_ID)
|
||||
self.assertIn("traceId", envelope)
|
||||
self.assertIn("auditId", envelope)
|
||||
self.assertEqual(envelope["payload"]["canonicalProjectId"], CANONICAL_PROJECT_ID)
|
||||
self.assertLessEqual(len(envelope["payload"]["contracts"]), 5)
|
||||
|
||||
def test_contract_markdown_csv_and_same_source_payload_are_stable(self) -> None:
|
||||
report = self.make_report(make_tmp())
|
||||
markdown = mcp_contract_markdown(report)
|
||||
csv_text = mcp_contract_csv(report)
|
||||
same_source = same_source_validation_payload(report, limit=8)
|
||||
self.assertIn("MCP Provider Mais Humana", markdown)
|
||||
self.assertIn("contract_id,kind,platform_id", csv_text)
|
||||
self.assertTrue(same_source["allSameSource"])
|
||||
self.assertGreater(same_source["validatedCount"], 0)
|
||||
|
||||
def test_report_model_and_ui_contracts_exist(self) -> None:
|
||||
report_models = contracts_for_kind(McpContractKind.REPORT_MODEL)
|
||||
ui_screens = contracts_for_kind(McpContractKind.UI_SCREEN)
|
||||
self.assertGreater(len(report_models), 100)
|
||||
self.assertGreater(len(ui_screens), 20)
|
||||
self.assertTrue(all(contract.report_model_id for contract in report_models))
|
||||
|
||||
def test_cli_mcp_provider_returns_json(self) -> None:
|
||||
root = make_tmp()
|
||||
self.make_repo(
|
||||
root,
|
||||
"business",
|
||||
"business checkout billing entitlements sameSource panelReady responseReady traceId auditId",
|
||||
)
|
||||
code = main(["mcp-provider", "--ecosystem-root", str(root), "--limit", "3"])
|
||||
self.assertEqual(code, 0)
|
||||
|
||||
def test_provider_payload_is_serializable(self) -> None:
|
||||
report = self.make_report(make_tmp())
|
||||
payload = mcp_provider_payload(report, limit=3)
|
||||
text = json.dumps(payload, ensure_ascii=False)
|
||||
self.assertIn("sourcePayloadHash", text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@@ -61,6 +61,15 @@ Bearer abcdefghijklmnopqrstuvwxyz1234567890
|
||||
self.assertEqual(len(findings), 1)
|
||||
self.assertEqual(findings[0].pattern_id, "cloudflare_token_assignment")
|
||||
|
||||
def test_redaction_flags_cfat_even_when_named_as_reference(self) -> None:
|
||||
findings = scan_text_for_secrets("route.md", "tokenRef=cfat_abcdefghijklmnopqrstuvwxyz1234567890")
|
||||
self.assertEqual(len(findings), 1)
|
||||
self.assertEqual(findings[0].pattern_id, "cloudflare_cfat_token")
|
||||
|
||||
def test_redaction_allows_single_opaque_reference_line(self) -> None:
|
||||
findings = scan_text_for_secrets("route.md", "credentialRef: cf-token-prod-readonly")
|
||||
self.assertEqual(findings, ())
|
||||
|
||||
def test_acceptance_report_handles_missing_artifacts(self) -> None:
|
||||
root = make_tmp()
|
||||
bundle = ReportBundle(
|
||||
|
||||
389
tests/test_repository_mesh.py
Normal file
389
tests/test_repository_mesh.py
Normal file
@@ -0,0 +1,389 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Sequence
|
||||
|
||||
from mais_humana.cli import main
|
||||
from mais_humana.repository_mesh import (
|
||||
CommandResult,
|
||||
MeshActionKind,
|
||||
MeshEnvironment,
|
||||
MeshEnvironmentKind,
|
||||
MeshErrorKind,
|
||||
MeshPresence,
|
||||
MeshRiskLevel,
|
||||
RepositoryTarget,
|
||||
automation_markdown,
|
||||
build_mesh_report,
|
||||
classify_command_error,
|
||||
command_is_destructive,
|
||||
default_repository_targets,
|
||||
mesh_actions_csv,
|
||||
mesh_inventory_csv,
|
||||
mesh_markdown,
|
||||
mesh_orders_payload,
|
||||
mesh_summary_payload,
|
||||
normalize_remote_url,
|
||||
repository_mesh_artifact_records,
|
||||
run_repository_mesh,
|
||||
validate_report,
|
||||
)
|
||||
from tests.helpers import make_tmp
|
||||
|
||||
|
||||
def make_repo(root: Path, name: str) -> Path:
|
||||
repo = root / name
|
||||
(repo / ".git").mkdir(parents=True)
|
||||
return repo
|
||||
|
||||
|
||||
class FakeGit:
|
||||
def __init__(self) -> None:
|
||||
self.calls: list[tuple[str, ...]] = []
|
||||
self.status_by_repo: dict[str, tuple[str, ...]] = {}
|
||||
self.remote_by_repo: dict[str, str] = {}
|
||||
self.head_by_repo: dict[str, str] = {}
|
||||
self.branch_by_repo: dict[str, str] = {}
|
||||
self.upstream_by_repo: dict[str, str] = {}
|
||||
self.ahead_behind_by_repo: dict[str, str] = {}
|
||||
self.fetch_error_by_repo: dict[str, str] = {}
|
||||
|
||||
def set_repo(
|
||||
self,
|
||||
repo: Path,
|
||||
*,
|
||||
branch: str = "main",
|
||||
head: str = "abc1230000000000000000000000000000000000",
|
||||
remote: str = "https://git.ami.app.br/admin/repo.git",
|
||||
status: Sequence[str] = (),
|
||||
upstream: str = "origin/main",
|
||||
ahead_behind: str = "0 0",
|
||||
fetch_error: str = "",
|
||||
) -> None:
|
||||
key = str(repo)
|
||||
self.branch_by_repo[key] = branch
|
||||
self.head_by_repo[key] = head
|
||||
self.remote_by_repo[key] = remote
|
||||
self.status_by_repo[key] = tuple(status)
|
||||
self.upstream_by_repo[key] = upstream
|
||||
self.ahead_behind_by_repo[key] = ahead_behind
|
||||
if fetch_error:
|
||||
self.fetch_error_by_repo[key] = fetch_error
|
||||
|
||||
def __call__(self, argv: Sequence[str], cwd: Path | None = None, timeout: int = 60) -> CommandResult:
|
||||
del cwd, timeout
|
||||
args = tuple(str(item) for item in argv)
|
||||
self.calls.append(args)
|
||||
try:
|
||||
repo = args[args.index("-C") + 1]
|
||||
except ValueError:
|
||||
repo = ""
|
||||
command = args[args.index(repo) + 1 :] if repo else args
|
||||
if command[:3] == ("fetch", "--all", "--prune"):
|
||||
if repo in self.fetch_error_by_repo:
|
||||
stderr = self.fetch_error_by_repo[repo]
|
||||
return CommandResult(args, 1, "", stderr, error_kind=classify_command_error(stderr, 1))
|
||||
return CommandResult(args, 0, "", "")
|
||||
if command[:2] == ("branch", "--show-current"):
|
||||
return CommandResult(args, 0, self.branch_by_repo.get(repo, "main") + "\n", "")
|
||||
if command[:2] == ("rev-parse", "HEAD"):
|
||||
return CommandResult(args, 0, self.head_by_repo.get(repo, "abc123") + "\n", "")
|
||||
if command[:3] == ("remote", "get-url", "origin"):
|
||||
return CommandResult(args, 0, self.remote_by_repo.get(repo, "https://git.ami.app.br/admin/repo.git") + "\n", "")
|
||||
if command[:2] == ("status", "--short"):
|
||||
return CommandResult(args, 0, "\n".join(self.status_by_repo.get(repo, ())) + "\n", "")
|
||||
if command[:3] == ("log", "-1", "--oneline"):
|
||||
return CommandResult(args, 0, self.head_by_repo.get(repo, "abc123")[:7] + " test\n", "")
|
||||
if command[:4] == ("rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}"):
|
||||
upstream = self.upstream_by_repo.get(repo, "origin/main")
|
||||
if upstream:
|
||||
return CommandResult(args, 0, upstream + "\n", "")
|
||||
return CommandResult(args, 1, "", "fatal: no upstream configured", error_kind=MeshErrorKind.UNKNOWN)
|
||||
if command[:3] == ("rev-list", "--left-right", "--count"):
|
||||
return CommandResult(args, 0, self.ahead_behind_by_repo.get(repo, "0 0") + "\n", "")
|
||||
return CommandResult(args, 1, "", "unexpected command", error_kind=MeshErrorKind.UNKNOWN)
|
||||
|
||||
|
||||
class RepositoryMeshTests(unittest.TestCase):
|
||||
def test_default_targets_include_nominal_reconciliation_cases(self) -> None:
|
||||
targets = default_repository_targets()
|
||||
names = {item.declared_name: item for item in targets}
|
||||
self.assertIn("tudo-para-ia-mais-humana-plataform", names)
|
||||
self.assertIn("tudo-para-ia-mais-humana", names["tudo-para-ia-mais-humana-plataform"].aliases)
|
||||
self.assertTrue(names["tudo-para-ia-mais-humana-plataform"].requires_nominal_reconciliation)
|
||||
self.assertIn("tudo-para-ia-integracoes-platform", names)
|
||||
self.assertEqual(names["tudo-para-ia-integracoes-platform"].canonical_name, "tudo-para-ia-integracoes-plataform")
|
||||
|
||||
def test_remote_normalization_ignores_git_suffix_and_slash(self) -> None:
|
||||
self.assertEqual(
|
||||
normalize_remote_url("https://git.ami.app.br/admin/repo.git/"),
|
||||
normalize_remote_url("https://git.ami.app.br/admin/repo"),
|
||||
)
|
||||
|
||||
def test_error_classifier_handles_dubious_credentials_auth_network_and_repo(self) -> None:
|
||||
self.assertEqual(classify_command_error("fatal: detected dubious ownership", 1), MeshErrorKind.DUBIOUS_OWNERSHIP)
|
||||
self.assertEqual(classify_command_error("SEC_E_NO_CREDENTIALS credenciais nao disponiveis", 1), MeshErrorKind.CREDENTIALS_MISSING)
|
||||
self.assertEqual(classify_command_error("Authentication failed for https://x", 1), MeshErrorKind.AUTHENTICATION)
|
||||
self.assertEqual(classify_command_error("Could not resolve host: git.ami.app.br", 1), MeshErrorKind.NETWORK)
|
||||
self.assertEqual(classify_command_error("fatal: not a git repository", 1), MeshErrorKind.NOT_A_REPOSITORY)
|
||||
self.assertEqual(classify_command_error("", 0), MeshErrorKind.NONE)
|
||||
|
||||
def test_command_destructive_guard_allows_fetch_and_ff_only_but_blocks_reset_pull_clean(self) -> None:
|
||||
self.assertFalse(command_is_destructive("git fetch --all --prune"))
|
||||
self.assertFalse(command_is_destructive("git merge --ff-only @{u}"))
|
||||
self.assertTrue(command_is_destructive("git reset --hard HEAD"))
|
||||
self.assertTrue(command_is_destructive("git clean -fdx"))
|
||||
self.assertTrue(command_is_destructive("git pull origin main"))
|
||||
self.assertTrue(command_is_destructive("git checkout main"))
|
||||
self.assertTrue(command_is_destructive("git restore ."))
|
||||
|
||||
def test_clean_repository_gets_safe_fetch_action(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(
|
||||
repo,
|
||||
remote="https://git.ami.app.br/admin/alpha.git",
|
||||
head="1111111111111111111111111111111111111111",
|
||||
)
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=True)
|
||||
self.assertEqual(report.ok_count, 1)
|
||||
self.assertEqual(report.blocked_count, 0)
|
||||
self.assertEqual(report.summaries[0].risk, MeshRiskLevel.OK)
|
||||
self.assertTrue(any(action.kind == MeshActionKind.FETCH for action in report.summaries[0].actions))
|
||||
self.assertTrue(any("--prune" in " ".join(call) for call in fake.calls))
|
||||
self.assertFalse(validate_report(report))
|
||||
|
||||
def test_dirty_repository_blocks_destructive_sync(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(
|
||||
repo,
|
||||
remote="https://git.ami.app.br/admin/alpha.git",
|
||||
status=(" M README.md", "?? src/new.py"),
|
||||
)
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=False)
|
||||
summary = report.summaries[0]
|
||||
self.assertEqual(summary.risk, MeshRiskLevel.BLOCKED)
|
||||
self.assertEqual(summary.dirty_count, 1)
|
||||
self.assertTrue(any(action.kind == MeshActionKind.BLOCK_DESTRUCTIVE_SYNC for action in summary.actions))
|
||||
self.assertIn("working tree sujo", mesh_markdown(report))
|
||||
|
||||
def test_divergent_branch_blocks_automatic_reconciliation(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(
|
||||
repo,
|
||||
remote="https://git.ami.app.br/admin/alpha.git",
|
||||
ahead_behind="2 3",
|
||||
)
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=False)
|
||||
self.assertEqual(report.summaries[0].risk, MeshRiskLevel.BLOCKED)
|
||||
reasons = " ".join(action.reason for action in report.summaries[0].actions)
|
||||
self.assertIn("ahead/behind", reasons)
|
||||
self.assertIn("bloqueios contra sync destrutiva", mesh_markdown(report))
|
||||
|
||||
def test_remote_mismatch_creates_fix_remote_action_without_auto_execution(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/wrong.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=False)
|
||||
actions = report.summaries[0].actions
|
||||
self.assertEqual(report.summaries[0].remote_mismatch_count, 1)
|
||||
fix_actions = [item for item in actions if item.kind == MeshActionKind.FIX_REMOTE_URL]
|
||||
self.assertEqual(len(fix_actions), 1)
|
||||
self.assertFalse(fix_actions[0].can_execute_automatically)
|
||||
self.assertIn("git remote set-url origin", " ".join(fix_actions[0].command_preview))
|
||||
|
||||
def test_missing_repository_creates_clone_plan_but_not_success(self) -> None:
|
||||
root = make_tmp()
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=FakeGit(), fetch=False)
|
||||
summary = report.summaries[0]
|
||||
self.assertEqual(summary.risk, MeshRiskLevel.ATTENTION)
|
||||
self.assertEqual(summary.missing_count, 1)
|
||||
self.assertTrue(any(action.kind == MeshActionKind.CLONE_MISSING for action in summary.actions))
|
||||
self.assertIn("espelho ausente", mesh_markdown(report))
|
||||
|
||||
def test_unreachable_environment_is_blocked_as_external_access(self) -> None:
|
||||
root = make_tmp()
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("server", MeshEnvironmentKind.CODEX_SERVER, str(root / "missing-root"), "remote", local=False)
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=FakeGit(), fetch=False)
|
||||
summary = report.summaries[0]
|
||||
self.assertEqual(summary.risk, MeshRiskLevel.BLOCKED)
|
||||
self.assertEqual(summary.observations[0].presence, MeshPresence.UNREACHABLE_ENVIRONMENT)
|
||||
self.assertTrue(any(action.kind == MeshActionKind.REQUIRE_ENVIRONMENT_ACCESS for action in summary.actions))
|
||||
|
||||
def test_present_non_git_directory_blocks_replace_or_clone(self) -> None:
|
||||
root = make_tmp()
|
||||
(root / "alpha").mkdir()
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=FakeGit(), fetch=False)
|
||||
summary = report.summaries[0]
|
||||
self.assertEqual(summary.risk, MeshRiskLevel.BLOCKED)
|
||||
self.assertEqual(summary.observations[0].presence, MeshPresence.PRESENT_NOT_GIT)
|
||||
self.assertTrue(any(action.destructive for action in summary.actions))
|
||||
|
||||
def test_alias_materialization_creates_nominal_rename_action(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha-old")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget(
|
||||
"alpha-new",
|
||||
"admin/alpha",
|
||||
"alpha-new",
|
||||
"01_alpha",
|
||||
aliases=("alpha-old",),
|
||||
canonical_name="alpha-new",
|
||||
requires_nominal_reconciliation=True,
|
||||
)
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=False)
|
||||
summary = report.summaries[0]
|
||||
self.assertEqual(summary.nominal_mismatch_count, 1)
|
||||
self.assertTrue(any(action.kind == MeshActionKind.RENAME_LOCAL_FOLDER for action in summary.actions))
|
||||
self.assertIn("materializado como alias", mesh_markdown(report))
|
||||
|
||||
def test_fetch_credential_error_is_reported_as_blocker_not_plugin_blocker(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(
|
||||
repo,
|
||||
remote="https://git.ami.app.br/admin/alpha.git",
|
||||
fetch_error="fatal: unable to access url: SEC_E_NO_CREDENTIALS",
|
||||
)
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(
|
||||
root,
|
||||
targets=(target,),
|
||||
environments=(env,),
|
||||
runner=fake,
|
||||
fetch=True,
|
||||
plugin_auth_attempt="user rejected MCP tool call",
|
||||
)
|
||||
self.assertEqual(report.credential_errors, 1)
|
||||
self.assertEqual(report.summaries[0].risk, MeshRiskLevel.BLOCKED)
|
||||
md = mesh_markdown(report)
|
||||
self.assertIn("user rejected MCP tool call", md)
|
||||
self.assertIn("credentials_missing", json.dumps(mesh_summary_payload(report), ensure_ascii=False))
|
||||
|
||||
def test_inventory_and_actions_csv_include_required_columns(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake, fetch=False)
|
||||
inventory = mesh_inventory_csv(report)
|
||||
actions = mesh_actions_csv(report)
|
||||
self.assertIn("nome_declarado,nome_esperado,repositorio_gitea", inventory)
|
||||
self.assertIn("ahead_behind", inventory)
|
||||
self.assertIn("action_id,nome_declarado,ambiente,tipo", actions)
|
||||
|
||||
def test_automation_markdown_documents_five_minute_safe_cycle(self) -> None:
|
||||
root = make_tmp()
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=FakeGit(), fetch=False)
|
||||
text = automation_markdown(report)
|
||||
self.assertIn("5 minutos", text)
|
||||
self.assertIn("New-ScheduledTaskAction", text)
|
||||
self.assertIn("*/5 * * * *", text)
|
||||
self.assertIn("nunca executar reset", text.lower())
|
||||
|
||||
def test_orders_payload_maps_central_active_orders(self) -> None:
|
||||
root = make_tmp()
|
||||
central_root = root / "central" / "projects"
|
||||
order_dir = central_root / "01_alpha" / "orders" / "executivas"
|
||||
order_dir.mkdir(parents=True)
|
||||
(order_dir / "0001_EXECUTIVA__teste.md").write_text("# teste\n", encoding="utf-8")
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, central_root=central_root, targets=(target,), environments=(env,), runner=fake)
|
||||
payload = mesh_orders_payload(report)
|
||||
self.assertEqual(payload["repositories"][0]["centralFolder"], "01_alpha")
|
||||
self.assertEqual(len(payload["repositories"][0]["activeOrders"]), 1)
|
||||
|
||||
def test_run_repository_mesh_writes_all_artifacts(self) -> None:
|
||||
tmp = make_tmp()
|
||||
ecosystem = tmp / "eco"
|
||||
project = tmp / "human"
|
||||
central = tmp / "central" / "projects" / "15_repo_tudo-para-ia-mais-humana"
|
||||
ecosystem.mkdir()
|
||||
project.mkdir()
|
||||
central.mkdir(parents=True)
|
||||
repo = make_repo(ecosystem, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(ecosystem), "test")
|
||||
report = build_mesh_report(ecosystem, targets=(target,), environments=(env,), runner=fake)
|
||||
self.assertFalse(validate_report(report))
|
||||
records = repository_mesh_artifact_records(project, central)
|
||||
self.assertTrue(any("repository-mesh-inventory.json" in item["path"] for item in records))
|
||||
report, written = run_repository_mesh(
|
||||
ecosystem,
|
||||
project,
|
||||
central_platform_folder=central,
|
||||
fetch=False,
|
||||
plugin_auth_attempt="user rejected MCP tool call",
|
||||
runner=fake,
|
||||
)
|
||||
self.assertTrue((project / "dados" / "repository-mesh-inventory.json").exists())
|
||||
self.assertTrue((project / "matrizes" / "repository-mesh-actions.csv").exists())
|
||||
self.assertTrue((project / "ecossistema" / "REPOSITORY-MESH-SYNC.md").exists())
|
||||
self.assertTrue((central / "reports" / "EXECUTADO__repository-mesh-sync.md").exists())
|
||||
self.assertGreaterEqual(len(written), 8)
|
||||
|
||||
def test_cli_repo_mesh_writes_payload(self) -> None:
|
||||
tmp = make_tmp()
|
||||
ecosystem = tmp / "eco"
|
||||
project = tmp / "human"
|
||||
central = tmp / "central" / "projects" / "15_repo_tudo-para-ia-mais-humana"
|
||||
ecosystem.mkdir()
|
||||
project.mkdir()
|
||||
central.mkdir(parents=True)
|
||||
code = main(
|
||||
[
|
||||
"repo-mesh",
|
||||
"--ecosystem-root",
|
||||
str(ecosystem),
|
||||
"--project-root",
|
||||
str(project),
|
||||
"--central-platform-folder",
|
||||
str(central),
|
||||
"--plugin-auth-attempt",
|
||||
"user rejected MCP tool call",
|
||||
]
|
||||
)
|
||||
self.assertEqual(code, 0)
|
||||
summary = json.loads((project / "dados" / "repository-mesh-summary.json").read_text(encoding="utf-8"))
|
||||
self.assertEqual(summary["pluginAuthAttempt"], "user rejected MCP tool call")
|
||||
self.assertEqual(summary["targets"], len(default_repository_targets()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
102
tests/test_repository_mesh_gitea.py
Normal file
102
tests/test_repository_mesh_gitea.py
Normal file
@@ -0,0 +1,102 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_gitea import (
|
||||
GiteaAuth,
|
||||
GiteaPlannedAction,
|
||||
GiteaRepoStatus,
|
||||
build_gitea_mesh_plan,
|
||||
classify_http_error,
|
||||
endpoint_for_target,
|
||||
gitea_plan_csv,
|
||||
gitea_plan_markdown,
|
||||
plan_for_target,
|
||||
write_gitea_plan_artifacts,
|
||||
)
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
class RepositoryMeshGiteaTests(unittest.TestCase):
|
||||
def test_endpoint_for_target_builds_api_and_clone_urls(self) -> None:
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
endpoint = endpoint_for_target(target)
|
||||
self.assertEqual(endpoint.api_repo_path, "/api/v1/repos/admin/alpha")
|
||||
self.assertEqual(endpoint.clone_url, "https://git.ami.app.br/admin/alpha.git")
|
||||
|
||||
def test_auth_headers_are_redacted_and_support_token_or_basic(self) -> None:
|
||||
token = GiteaAuth(token="secret")
|
||||
self.assertIn("Authorization", token.headers())
|
||||
self.assertEqual(token.redacted_label(), "token:<redacted>")
|
||||
basic = GiteaAuth(username="ami", password="pw")
|
||||
self.assertIn("Authorization", basic.headers())
|
||||
self.assertEqual(basic.redacted_label(), "basic:ami:<redacted>")
|
||||
|
||||
def test_http_error_classifier_maps_common_statuses(self) -> None:
|
||||
self.assertEqual(classify_http_error(200), GiteaRepoStatus.EXISTS)
|
||||
self.assertEqual(classify_http_error(404), GiteaRepoStatus.MISSING)
|
||||
self.assertEqual(classify_http_error(401), GiteaRepoStatus.UNAUTHORIZED)
|
||||
self.assertEqual(classify_http_error(403), GiteaRepoStatus.FORBIDDEN)
|
||||
self.assertEqual(classify_http_error(None, "network"), GiteaRepoStatus.NETWORK_ERROR)
|
||||
|
||||
def test_plan_for_missing_repo_requires_token_and_create_action(self) -> None:
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
endpoint = endpoint_for_target(target)
|
||||
plan = plan_for_target(target, endpoint=endpoint, status=GiteaRepoStatus.MISSING, auth=GiteaAuth())
|
||||
self.assertTrue(plan.blocked)
|
||||
self.assertIn(GiteaPlannedAction.REQUIRE_TOKEN, plan.actions)
|
||||
self.assertIn(GiteaPlannedAction.CREATE_REPOSITORY, plan.actions)
|
||||
self.assertTrue(plan.api_requests)
|
||||
|
||||
def test_nominal_reconciliation_target_requires_owner_decision(self) -> None:
|
||||
target = RepositoryTarget(
|
||||
"alpha-plataform",
|
||||
"admin/alpha",
|
||||
"alpha-plataform",
|
||||
"01_alpha",
|
||||
aliases=("alpha",),
|
||||
canonical_name="alpha-plataform",
|
||||
requires_nominal_reconciliation=True,
|
||||
)
|
||||
plan = plan_for_target(target, endpoint=endpoint_for_target(target), status=GiteaRepoStatus.EXISTS, auth=GiteaAuth(token="x"))
|
||||
self.assertIn(GiteaPlannedAction.REQUIRE_OWNER_DECISION, plan.actions)
|
||||
self.assertIn(GiteaPlannedAction.RENAME_REPOSITORY, plan.actions)
|
||||
|
||||
def test_build_gitea_mesh_plan_infers_status_from_mesh_report(self) -> None:
|
||||
tmp = make_tmp()
|
||||
repo = make_repo(tmp, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(tmp), "test")
|
||||
report = build_mesh_report(tmp, targets=(target,), environments=(env,), runner=fake)
|
||||
plan = build_gitea_mesh_plan(report)
|
||||
self.assertEqual(plan.exists_count, 1)
|
||||
self.assertEqual(plan.blocked_count, 1)
|
||||
self.assertEqual(plan.repositories[0].status, GiteaRepoStatus.EXISTS)
|
||||
|
||||
def test_gitea_renderers_and_artifacts_are_written(self) -> None:
|
||||
tmp = make_tmp()
|
||||
project = tmp / "project"
|
||||
central = tmp / "central"
|
||||
project.mkdir()
|
||||
central.mkdir()
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
endpoint = endpoint_for_target(target)
|
||||
repo_plan = plan_for_target(target, endpoint=endpoint, status=GiteaRepoStatus.MISSING, auth=GiteaAuth())
|
||||
from mais_humana.repository_mesh_gitea import GiteaMeshPlan
|
||||
|
||||
plan = GiteaMeshPlan("plan-1", "2026-01-01T00:00:00+00:00", "https://git.ami.app.br", "none", (repo_plan,))
|
||||
self.assertIn("plan_id,declared_name", gitea_plan_csv(plan))
|
||||
self.assertIn("Gitea Repository Mesh Plan", gitea_plan_markdown(plan))
|
||||
records = write_gitea_plan_artifacts(plan, project, central_platform_folder=central)
|
||||
self.assertTrue((project / "dados" / "repository-mesh-gitea-plan.json").exists())
|
||||
self.assertTrue((project / "ecossistema" / "REPOSITORY-MESH-GITEA.md").exists())
|
||||
self.assertTrue((central / "reports" / "PENDENCIAS-CODEX__repository-mesh-gitea.md").exists())
|
||||
self.assertGreaterEqual(len(records), 4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
97
tests/test_repository_mesh_naming.py
Normal file
97
tests/test_repository_mesh_naming.py
Normal file
@@ -0,0 +1,97 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_naming import (
|
||||
NamingStatus,
|
||||
candidate_names,
|
||||
central_folder_for_name,
|
||||
decide_naming,
|
||||
naming_rows,
|
||||
plataform_to_platform,
|
||||
platform_to_plataform,
|
||||
)
|
||||
from mais_humana.repository_mesh_reconciliation import ReconciliationStatus, build_reconciliation_plan
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
class RepositoryMeshNamingTests(unittest.TestCase):
|
||||
def test_platform_and_plataform_helpers_are_reversible_for_suffixes(self) -> None:
|
||||
self.assertEqual(platform_to_plataform("tudo-para-ia-ui-platform"), "tudo-para-ia-ui-plataform")
|
||||
self.assertEqual(plataform_to_platform("tudo-para-ia-docs-plataform"), "tudo-para-ia-docs-platform")
|
||||
self.assertEqual(platform_to_plataform("sem-sufixo"), "sem-sufixo")
|
||||
self.assertEqual(central_folder_for_name("Tudo Para IA UI Platform", index=14), "14_repo_tudo-para-ia-ui-platform")
|
||||
|
||||
def test_candidate_names_include_expected_alias_canonical_and_suffix_variant(self) -> None:
|
||||
target = RepositoryTarget(
|
||||
"alpha-platform",
|
||||
"admin/alpha-platform",
|
||||
"alpha-platform",
|
||||
"01_alpha",
|
||||
aliases=("alpha",),
|
||||
canonical_name="alpha-plataform",
|
||||
requires_nominal_reconciliation=True,
|
||||
)
|
||||
self.assertEqual(
|
||||
candidate_names(target),
|
||||
("alpha-platform", "alpha-plataform", "alpha",),
|
||||
)
|
||||
|
||||
def test_decide_naming_marks_exact_alias_missing_and_conflict(self) -> None:
|
||||
root = make_tmp()
|
||||
target = RepositoryTarget("alpha-new", "admin/alpha", "alpha-new", "01_alpha", aliases=("alpha-old",))
|
||||
|
||||
self.assertEqual(decide_naming(root, target).status, NamingStatus.MISSING)
|
||||
|
||||
(root / "alpha-old").mkdir()
|
||||
alias = decide_naming(root, target)
|
||||
self.assertEqual(alias.status, NamingStatus.ALIAS)
|
||||
self.assertEqual(alias.selected_name, "alpha-old")
|
||||
|
||||
(root / "alpha-new").mkdir()
|
||||
conflict = decide_naming(root, target)
|
||||
self.assertEqual(conflict.status, NamingStatus.CONFLICT)
|
||||
|
||||
target = RepositoryTarget(
|
||||
"beta-new",
|
||||
"admin/beta",
|
||||
"beta-new",
|
||||
"01_beta",
|
||||
aliases=("beta-old",),
|
||||
canonical_name="beta-canonical",
|
||||
)
|
||||
(root / "beta-old").mkdir()
|
||||
(root / "beta-canonical").mkdir()
|
||||
conflict = decide_naming(root, target)
|
||||
self.assertEqual(conflict.status, NamingStatus.CONFLICT)
|
||||
self.assertIn("CONFLICT", "\n".join(",".join(row) for row in naming_rows((conflict,))).upper())
|
||||
|
||||
def test_reconciliation_blocks_when_two_alias_variants_exist(self) -> None:
|
||||
root = make_tmp()
|
||||
repo_a = make_repo(root, "alpha-old")
|
||||
repo_b = make_repo(root, "alpha-canonical")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo_a, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
fake.set_repo(repo_b, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget(
|
||||
"alpha-new",
|
||||
"admin/alpha",
|
||||
"alpha-new",
|
||||
"01_alpha",
|
||||
aliases=("alpha-old",),
|
||||
canonical_name="alpha-canonical",
|
||||
requires_nominal_reconciliation=True,
|
||||
)
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
report = build_mesh_report(root, targets=(target,), environments=(env,), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.MANUAL_DECISION_REQUIRED)
|
||||
self.assertEqual(receipt.naming_decisions[0].status, NamingStatus.CONFLICT)
|
||||
self.assertTrue(any("conflito nominal" in operation.title.lower() for operation in receipt.operations))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
84
tests/test_repository_mesh_readiness.py
Normal file
84
tests/test_repository_mesh_readiness.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_reconciliation import build_reconciliation_plan
|
||||
from mais_humana.repository_mesh_runtime import acquire_lock, build_runtime_cycle, cron_scheduler_spec, release_lock, windows_scheduler_spec
|
||||
from mais_humana.repository_mesh_semantic import write_repository_mesh_semantic_state
|
||||
from mais_humana.repository_mesh_readiness import (
|
||||
MeshGateStatus,
|
||||
build_mesh_readiness_report,
|
||||
readiness_csv,
|
||||
readiness_markdown,
|
||||
readiness_pending_items,
|
||||
write_readiness_artifacts,
|
||||
)
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
class RepositoryMeshReadinessTests(unittest.TestCase):
|
||||
def build_bundle(self, *, fetch: bool = False, dirty: bool = False):
|
||||
tmp = make_tmp()
|
||||
eco = tmp / "eco"
|
||||
project = tmp / "project"
|
||||
central = tmp / "central"
|
||||
eco.mkdir()
|
||||
project.mkdir()
|
||||
central.mkdir()
|
||||
repo = make_repo(eco, "alpha")
|
||||
fake = FakeGit()
|
||||
status = (" M README.md",) if dirty else ()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", status=status)
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(eco), "test")
|
||||
report = build_mesh_report(eco, targets=(target,), environments=(env,), runner=fake, fetch=fetch)
|
||||
plan = build_reconciliation_plan(report)
|
||||
lock = acquire_lock(project / "dados" / "lock.json", owner="readiness-test")
|
||||
cycle = build_runtime_cycle(report, plan, lock=lock)
|
||||
specs = (
|
||||
windows_scheduler_spec(python_exe="python.exe", project_root=project, ecosystem_root=eco, central_platform_folder=central),
|
||||
cron_scheduler_spec(python_exe="python", project_root=project, ecosystem_root=eco, central_platform_folder=central),
|
||||
)
|
||||
counts = write_repository_mesh_semantic_state(central / "controle-semantico.sqlite", report=report, plan=plan, cycle=cycle, schedulers=specs)
|
||||
return tmp, project, central, report, plan, cycle, specs, counts, lock
|
||||
|
||||
def test_readiness_report_warns_when_fetch_not_attempted(self) -> None:
|
||||
tmp, project, central, report, plan, cycle, specs, counts, lock = self.build_bundle(fetch=False)
|
||||
readiness = build_mesh_readiness_report(report, plan, cycle, specs, counts)
|
||||
self.assertIn(readiness.status, {MeshGateStatus.WARN, MeshGateStatus.FAIL})
|
||||
self.assertTrue(any(gate.gate_id == "inventory.fetch-attempted" and gate.status == MeshGateStatus.WARN for gate in readiness.gates))
|
||||
release_lock(lock)
|
||||
|
||||
def test_readiness_report_classifies_dirty_tree_as_blocked(self) -> None:
|
||||
tmp, project, central, report, plan, cycle, specs, counts, lock = self.build_bundle(fetch=True, dirty=True)
|
||||
readiness = build_mesh_readiness_report(report, plan, cycle, specs, counts)
|
||||
self.assertEqual(readiness.status, MeshGateStatus.FAIL)
|
||||
self.assertTrue(any(gate.gate_id == "safety.dirty-blocked" for gate in readiness.gates))
|
||||
self.assertTrue(readiness_pending_items(readiness))
|
||||
release_lock(lock)
|
||||
|
||||
def test_readiness_csv_and_markdown_include_gate_details(self) -> None:
|
||||
tmp, project, central, report, plan, cycle, specs, counts, lock = self.build_bundle(fetch=True)
|
||||
readiness = build_mesh_readiness_report(report, plan, cycle, specs, counts)
|
||||
self.assertIn("gate_id,status,severity", readiness_csv(readiness))
|
||||
md = readiness_markdown(readiness)
|
||||
self.assertIn("Repository Mesh Readiness", md)
|
||||
self.assertIn("inventory.targets-declared", md)
|
||||
release_lock(lock)
|
||||
|
||||
def test_write_readiness_artifacts_creates_project_and_central_records(self) -> None:
|
||||
tmp, project, central, report, plan, cycle, specs, counts, lock = self.build_bundle(fetch=True)
|
||||
readiness = build_mesh_readiness_report(report, plan, cycle, specs, counts)
|
||||
records = write_readiness_artifacts(readiness, project, central_platform_folder=central)
|
||||
self.assertTrue((project / "dados" / "repository-mesh-readiness.json").exists())
|
||||
self.assertTrue((project / "matrizes" / "repository-mesh-readiness.csv").exists())
|
||||
self.assertTrue((project / "ecossistema" / "REPOSITORY-MESH-READINESS.md").exists())
|
||||
self.assertTrue((central / "reports" / "EXECUTADO__repository-mesh-readiness.md").exists())
|
||||
self.assertGreaterEqual(len(records), 5)
|
||||
release_lock(lock)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
205
tests/test_repository_mesh_reconciliation.py
Normal file
205
tests/test_repository_mesh_reconciliation.py
Normal file
@@ -0,0 +1,205 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
|
||||
from mais_humana.models import OrderType
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_reconciliation import (
|
||||
ReconciliationStatus,
|
||||
StatePrecedence,
|
||||
apply_reconciliation_to_report,
|
||||
build_reconciliation_plan,
|
||||
candidate_states,
|
||||
operations_csv,
|
||||
reconciliation_csv,
|
||||
reconciliation_markdown,
|
||||
receipt_to_service_order,
|
||||
select_latest_state,
|
||||
service_orders_from_plan,
|
||||
)
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
def one_env(root: Path) -> MeshEnvironment:
|
||||
return MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
|
||||
|
||||
def one_target(name: str = "alpha") -> RepositoryTarget:
|
||||
return RepositoryTarget(name, f"admin/{name}", name, f"01_{name}")
|
||||
|
||||
|
||||
class RepositoryMeshReconciliationTests(unittest.TestCase):
|
||||
def test_single_hash_is_selected_as_latest_valid_state(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", head="a" * 40)
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
latest = select_latest_state(report.summaries[0])
|
||||
self.assertEqual(latest.precedence, StatePrecedence.SINGLE_HASH)
|
||||
self.assertEqual(latest.selected_head, "a" * 40)
|
||||
self.assertFalse(latest.blockers)
|
||||
|
||||
def test_dirty_tree_prevents_latest_state_selection(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", status=(" M README.md",))
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
latest = select_latest_state(report.summaries[0])
|
||||
self.assertEqual(latest.precedence, StatePrecedence.DIRTY_TREE)
|
||||
self.assertIsNone(latest.selected_head)
|
||||
self.assertTrue(latest.blockers)
|
||||
|
||||
def test_divergent_ahead_behind_requires_manual_precedence(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", ahead_behind="2 2")
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
latest = select_latest_state(report.summaries[0])
|
||||
self.assertEqual(latest.precedence, StatePrecedence.DIVERGENT)
|
||||
self.assertIn("ahead=2 behind=2", " ".join(latest.blockers))
|
||||
|
||||
def test_local_ahead_is_candidate_but_still_needs_push_review(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", ahead_behind="3 0")
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
latest = select_latest_state(report.summaries[0])
|
||||
self.assertEqual(latest.precedence, StatePrecedence.SINGLE_HASH)
|
||||
plan = build_reconciliation_plan(report)
|
||||
self.assertEqual(plan.receipts[0].status, ReconciliationStatus.ALIGNED)
|
||||
|
||||
def test_missing_hash_creates_missing_materialization_receipt(self) -> None:
|
||||
root = make_tmp()
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=FakeGit())
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.MISSING_MATERIALIZATION)
|
||||
self.assertFalse(receipt.safe_to_auto_sync)
|
||||
self.assertTrue(receipt.pending_items)
|
||||
|
||||
def test_environment_blocked_receipt_has_blocker_operation(self) -> None:
|
||||
root = make_tmp()
|
||||
env = MeshEnvironment("server", MeshEnvironmentKind.CODEX_SERVER, str(root / "missing"), "remote", local=False)
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(env,), runner=FakeGit())
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.ENVIRONMENT_BLOCKED)
|
||||
self.assertTrue(receipt.blockers)
|
||||
self.assertIn("ambiente declarado", receipt.blockers[0])
|
||||
|
||||
def test_credential_blocked_receipt_has_credential_operation(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(
|
||||
repo,
|
||||
remote="https://git.ami.app.br/admin/alpha.git",
|
||||
fetch_error="fatal: SEC_E_NO_CREDENTIALS",
|
||||
)
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake, fetch=True)
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.CREDENTIAL_BLOCKED)
|
||||
self.assertTrue(any("credencial" in item.lower() for item in receipt.pending_items))
|
||||
|
||||
def test_nominal_rename_ready_receipt_is_created_for_clean_alias(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha-old")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget(
|
||||
"alpha-new",
|
||||
"admin/alpha",
|
||||
"alpha-new",
|
||||
"01_alpha",
|
||||
aliases=("alpha-old",),
|
||||
canonical_name="alpha-new",
|
||||
requires_nominal_reconciliation=True,
|
||||
)
|
||||
report = build_mesh_report(root, targets=(target,), environments=(one_env(root),), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.NOMINAL_RENAME_READY)
|
||||
self.assertTrue(any("Rename-Item" in " ".join(operation.commands) for operation in receipt.operations))
|
||||
|
||||
def test_remote_mismatch_receipt_prioritizes_remote_operation(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/wrong.git")
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
receipt = plan.receipts[0]
|
||||
self.assertEqual(receipt.status, ReconciliationStatus.REMOTE_MISMATCH)
|
||||
self.assertTrue(any("remote origin" in operation.title.lower() for operation in receipt.operations))
|
||||
|
||||
def test_candidate_states_extract_clean_commit_details(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", head="b" * 40, ahead_behind="1 0")
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
candidates = candidate_states(report.summaries[0])
|
||||
self.assertEqual(len(candidates), 1)
|
||||
self.assertEqual(candidates[0].short_head, "b" * 12)
|
||||
self.assertTrue(candidates[0].has_local_only_commits)
|
||||
|
||||
def test_reconciliation_markdown_and_csv_are_rendered(self) -> None:
|
||||
root = make_tmp()
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
md = reconciliation_markdown(plan)
|
||||
receipts_csv = reconciliation_csv(plan)
|
||||
ops_csv = operations_csv(plan)
|
||||
self.assertIn("Repository Mesh Reconciliation Plan", md)
|
||||
self.assertIn("receipt_id,target_name", receipts_csv)
|
||||
self.assertIn("operation_id,target_name", ops_csv)
|
||||
|
||||
def test_service_orders_from_plan_builds_executive_and_managerial_orders(self) -> None:
|
||||
root = make_tmp()
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=FakeGit())
|
||||
plan = build_reconciliation_plan(report)
|
||||
orders = service_orders_from_plan(plan, start_executive=200, start_managerial=300)
|
||||
self.assertEqual(len(orders), 2)
|
||||
self.assertEqual(orders[0].order_type, OrderType.EXECUTIVE)
|
||||
self.assertEqual(orders[1].order_type, OrderType.MANAGERIAL)
|
||||
self.assertIn("repo-mesh", " ".join(orders[0].validations))
|
||||
|
||||
def test_receipt_to_service_order_preserves_paths_and_ready_criteria(self) -> None:
|
||||
root = make_tmp()
|
||||
report = build_mesh_report(root, targets=(one_target(),), environments=(one_env(root),), runner=FakeGit())
|
||||
plan = build_reconciliation_plan(report)
|
||||
order = receipt_to_service_order(plan.receipts[0], 222, OrderType.EXECUTIVE)
|
||||
self.assertIn("G:/_codex-git/alpha", order.affected_paths)
|
||||
self.assertTrue(order.ready_criteria)
|
||||
self.assertEqual(order.status.value, "planejada")
|
||||
|
||||
def test_apply_reconciliation_writes_project_and_central_artifacts(self) -> None:
|
||||
tmp = make_tmp()
|
||||
ecosystem = tmp / "eco"
|
||||
project = tmp / "human"
|
||||
central = tmp / "central" / "projects" / "15_repo_tudo-para-ia-mais-humana"
|
||||
ecosystem.mkdir()
|
||||
project.mkdir()
|
||||
central.mkdir(parents=True)
|
||||
report = build_mesh_report(ecosystem, targets=(one_target(),), environments=(one_env(ecosystem),), runner=FakeGit())
|
||||
plan, records = apply_reconciliation_to_report(report, project, central_platform_folder=central)
|
||||
self.assertTrue((project / "dados" / "repository-mesh-reconciliation.json").exists())
|
||||
self.assertTrue((project / "matrizes" / "repository-mesh-operations.csv").exists())
|
||||
self.assertTrue((project / "ecossistema" / "REPOSITORY-MESH-RECONCILIATION.md").exists())
|
||||
self.assertTrue((central / "reports" / "EXECUTADO__repository-mesh-reconciliation.md").exists())
|
||||
self.assertGreaterEqual(len(records), 7)
|
||||
self.assertEqual(plan.report_id, report.report_id)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
162
tests/test_repository_mesh_runtime.py
Normal file
162
tests/test_repository_mesh_runtime.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import unittest
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_reconciliation import build_reconciliation_plan
|
||||
from mais_humana.repository_mesh_runtime import (
|
||||
RuntimeCommandStatus,
|
||||
RuntimeLockStatus,
|
||||
acquire_lock,
|
||||
build_runtime_cycle,
|
||||
commands_from_report_and_plan,
|
||||
cron_scheduler_spec,
|
||||
lock_is_stale,
|
||||
release_lock,
|
||||
runtime_csv,
|
||||
runtime_jsonl,
|
||||
runtime_markdown,
|
||||
scheduler_markdown,
|
||||
scheduler_payload,
|
||||
windows_scheduler_spec,
|
||||
write_runtime_artifacts,
|
||||
)
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
def target() -> RepositoryTarget:
|
||||
return RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
|
||||
|
||||
def env(root: Path) -> MeshEnvironment:
|
||||
return MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(root), "test")
|
||||
|
||||
|
||||
def clean_report(root: Path):
|
||||
repo = make_repo(root, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
return build_mesh_report(root, targets=(target(),), environments=(env(root),), runner=fake)
|
||||
|
||||
|
||||
class RepositoryMeshRuntimeTests(unittest.TestCase):
|
||||
def test_lock_acquire_busy_release_cycle(self) -> None:
|
||||
tmp = make_tmp()
|
||||
lock_path = tmp / "mesh.lock.json"
|
||||
first = acquire_lock(lock_path, owner="one", expires_after_seconds=600)
|
||||
self.assertEqual(first.status, RuntimeLockStatus.ACQUIRED)
|
||||
second = acquire_lock(lock_path, owner="two", expires_after_seconds=600)
|
||||
self.assertEqual(second.status, RuntimeLockStatus.BUSY)
|
||||
released = release_lock(first)
|
||||
self.assertEqual(released.status, RuntimeLockStatus.RELEASED)
|
||||
third = acquire_lock(lock_path, owner="three", expires_after_seconds=600)
|
||||
self.assertEqual(third.status, RuntimeLockStatus.ACQUIRED)
|
||||
release_lock(third)
|
||||
|
||||
def test_lock_stale_detection_accepts_malformed_payload_as_recoverable(self) -> None:
|
||||
self.assertTrue(lock_is_stale({"createdAt": "bad", "expiresAfterSeconds": 600}))
|
||||
self.assertTrue(lock_is_stale({"createdAt": "2026-01-01T00:00:00+00:00", "expiresAfterSeconds": 0}))
|
||||
self.assertTrue(
|
||||
lock_is_stale(
|
||||
{
|
||||
"createdAt": datetime.now(timezone.utc).isoformat(),
|
||||
"expiresAfterSeconds": 600,
|
||||
"pid": 99999999 if os.name == "nt" else 999999,
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
def test_commands_from_clean_report_allow_only_safe_fetch_like_commands(self) -> None:
|
||||
tmp = make_tmp()
|
||||
report = clean_report(tmp)
|
||||
plan = build_reconciliation_plan(report)
|
||||
commands = commands_from_report_and_plan(report, plan)
|
||||
self.assertTrue(commands)
|
||||
self.assertTrue(any(command.status == RuntimeCommandStatus.ALLOWED for command in commands))
|
||||
self.assertFalse(any("reset" in command.command for command in commands))
|
||||
|
||||
def test_dirty_report_blocks_runtime_commands(self) -> None:
|
||||
tmp = make_tmp()
|
||||
repo = make_repo(tmp, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git", status=(" M README.md",))
|
||||
report = build_mesh_report(tmp, targets=(target(),), environments=(env(tmp),), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
commands = commands_from_report_and_plan(report, plan)
|
||||
self.assertTrue(any(command.status == RuntimeCommandStatus.BLOCKED for command in commands))
|
||||
self.assertTrue(any("destrutivo" in " ".join(command.blocked_reasons) or "manual" in " ".join(command.blocked_reasons) for command in commands))
|
||||
|
||||
def test_runtime_cycle_blocks_everything_when_lock_not_acquired(self) -> None:
|
||||
tmp = make_tmp()
|
||||
report = clean_report(tmp)
|
||||
plan = build_reconciliation_plan(report)
|
||||
first = acquire_lock(tmp / "lock.json", owner="one")
|
||||
busy = acquire_lock(tmp / "lock.json", owner="two")
|
||||
cycle = build_runtime_cycle(report, plan, lock=busy)
|
||||
self.assertEqual(cycle.lock.status, RuntimeLockStatus.BUSY)
|
||||
self.assertTrue(all(result.status == RuntimeCommandStatus.BLOCKED for result in cycle.results))
|
||||
release_lock(first)
|
||||
|
||||
def test_runtime_outputs_csv_jsonl_and_markdown(self) -> None:
|
||||
tmp = make_tmp()
|
||||
report = clean_report(tmp)
|
||||
plan = build_reconciliation_plan(report)
|
||||
lock = acquire_lock(tmp / "lock.json", owner="one")
|
||||
cycle = build_runtime_cycle(report, plan, lock=lock)
|
||||
self.assertIn("command_id,target_name,status", runtime_csv(cycle))
|
||||
self.assertIn("Repository Mesh Runtime Cycle", runtime_markdown(cycle))
|
||||
first_line = runtime_jsonl(cycle).splitlines()[0]
|
||||
self.assertIn("command", json.loads(first_line))
|
||||
release_lock(lock)
|
||||
|
||||
def test_scheduler_specs_include_five_minute_commands(self) -> None:
|
||||
tmp = make_tmp()
|
||||
win = windows_scheduler_spec(
|
||||
python_exe="python.exe",
|
||||
project_root=tmp,
|
||||
ecosystem_root=tmp / "eco",
|
||||
central_platform_folder=tmp / "central",
|
||||
)
|
||||
cron = cron_scheduler_spec(
|
||||
python_exe="python",
|
||||
project_root=tmp,
|
||||
ecosystem_root=tmp / "eco",
|
||||
central_platform_folder=tmp / "central",
|
||||
)
|
||||
payload = scheduler_payload((win, cron))
|
||||
md = scheduler_markdown((win, cron))
|
||||
self.assertIn("windows_task", payload["kinds"])
|
||||
self.assertIn("cron", payload["kinds"])
|
||||
self.assertIn("New-ScheduledTaskAction", md)
|
||||
self.assertIn("crontab", md)
|
||||
|
||||
def test_write_runtime_artifacts_records_project_and_central_files(self) -> None:
|
||||
tmp = make_tmp()
|
||||
project = tmp / "project"
|
||||
central = tmp / "central"
|
||||
project.mkdir()
|
||||
central.mkdir()
|
||||
report = clean_report(tmp / "eco")
|
||||
plan = build_reconciliation_plan(report)
|
||||
lock = acquire_lock(project / "dados" / "lock.json", owner="one")
|
||||
cycle = build_runtime_cycle(report, plan, lock=lock)
|
||||
specs = (
|
||||
windows_scheduler_spec(python_exe="python.exe", project_root=project, ecosystem_root=tmp / "eco", central_platform_folder=central),
|
||||
cron_scheduler_spec(python_exe="python", project_root=project, ecosystem_root=tmp / "eco", central_platform_folder=central),
|
||||
)
|
||||
records = write_runtime_artifacts(cycle, specs, project, central_platform_folder=central)
|
||||
self.assertTrue((project / "dados" / "repository-mesh-runtime-cycle.json").exists())
|
||||
self.assertTrue((project / "dados" / "repository-mesh-runtime.jsonl").exists())
|
||||
self.assertTrue((project / "ecossistema" / "REPOSITORY-MESH-SCHEDULERS.md").exists())
|
||||
self.assertTrue((central / "reports" / "EXECUTADO__repository-mesh-runtime.md").exists())
|
||||
self.assertGreaterEqual(len(records), 6)
|
||||
release_lock(lock)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
78
tests/test_repository_mesh_semantic.py
Normal file
78
tests/test_repository_mesh_semantic.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import unittest
|
||||
|
||||
from mais_humana.repository_mesh import MeshEnvironment, MeshEnvironmentKind, RepositoryTarget, build_mesh_report
|
||||
from mais_humana.repository_mesh_reconciliation import build_reconciliation_plan
|
||||
from mais_humana.repository_mesh_runtime import acquire_lock, build_runtime_cycle, cron_scheduler_spec, release_lock, windows_scheduler_spec
|
||||
from mais_humana.repository_mesh_semantic import counts_markdown, semantic_generated_records, table_counts, write_repository_mesh_semantic_state
|
||||
from tests.helpers import make_tmp
|
||||
from tests.test_repository_mesh import FakeGit, make_repo
|
||||
|
||||
|
||||
class RepositoryMeshSemanticTests(unittest.TestCase):
|
||||
def build_objects(self):
|
||||
tmp = make_tmp()
|
||||
eco = tmp / "eco"
|
||||
project = tmp / "project"
|
||||
eco.mkdir()
|
||||
project.mkdir()
|
||||
repo = make_repo(eco, "alpha")
|
||||
fake = FakeGit()
|
||||
fake.set_repo(repo, remote="https://git.ami.app.br/admin/alpha.git")
|
||||
target = RepositoryTarget("alpha", "admin/alpha", "alpha", "01_alpha")
|
||||
env = MeshEnvironment("primary", MeshEnvironmentKind.WINDOWS_PRIMARY, str(eco), "test")
|
||||
report = build_mesh_report(eco, targets=(target,), environments=(env,), runner=fake)
|
||||
plan = build_reconciliation_plan(report)
|
||||
lock = acquire_lock(project / "dados" / "lock.json", owner="semantic-test")
|
||||
cycle = build_runtime_cycle(report, plan, lock=lock)
|
||||
specs = (
|
||||
windows_scheduler_spec(python_exe="python.exe", project_root=project, ecosystem_root=eco, central_platform_folder=None),
|
||||
cron_scheduler_spec(python_exe="python", project_root=project, ecosystem_root=eco, central_platform_folder=None),
|
||||
)
|
||||
return tmp, report, plan, cycle, specs, lock
|
||||
|
||||
def test_write_repository_mesh_semantic_state_populates_tables(self) -> None:
|
||||
tmp, report, plan, cycle, specs, lock = self.build_objects()
|
||||
sqlite_path = tmp / "central" / "controle-semantico.sqlite"
|
||||
counts = write_repository_mesh_semantic_state(sqlite_path, report=report, plan=plan, cycle=cycle, schedulers=specs)
|
||||
self.assertEqual(counts.reports, 1)
|
||||
self.assertEqual(counts.targets, 1)
|
||||
self.assertEqual(counts.observations, 1)
|
||||
self.assertGreaterEqual(counts.actions, 1)
|
||||
self.assertEqual(counts.plans, 1)
|
||||
self.assertEqual(counts.receipts, 1)
|
||||
self.assertEqual(counts.cycles, 1)
|
||||
self.assertEqual(counts.schedulers, 2)
|
||||
release_lock(lock)
|
||||
|
||||
def test_semantic_state_is_idempotent_for_same_report_and_plan(self) -> None:
|
||||
tmp, report, plan, cycle, specs, lock = self.build_objects()
|
||||
sqlite_path = tmp / "central" / "controle-semantico.sqlite"
|
||||
first = write_repository_mesh_semantic_state(sqlite_path, report=report, plan=plan, cycle=cycle, schedulers=specs)
|
||||
second = write_repository_mesh_semantic_state(sqlite_path, report=report, plan=plan, cycle=cycle, schedulers=specs)
|
||||
self.assertEqual(first.to_dict(), second.to_dict())
|
||||
with sqlite3.connect(sqlite_path) as conn:
|
||||
rows = conn.execute("SELECT report_id, blocked_count FROM repository_mesh_reports").fetchall()
|
||||
self.assertEqual(len(rows), 1)
|
||||
self.assertEqual(rows[0][0], report.report_id)
|
||||
release_lock(lock)
|
||||
|
||||
def test_table_counts_for_missing_database_returns_zeroes(self) -> None:
|
||||
counts = table_counts(make_tmp() / "missing.sqlite")
|
||||
self.assertEqual(counts.reports, 0)
|
||||
self.assertEqual(counts.schedulers, 0)
|
||||
|
||||
def test_semantic_generated_records_and_markdown_are_human_readable(self) -> None:
|
||||
sqlite_path = make_tmp() / "central" / "controle-semantico.sqlite"
|
||||
records = semantic_generated_records(sqlite_path)
|
||||
self.assertEqual(len(records), 1)
|
||||
self.assertIn("repository mesh semantic", records[0].function)
|
||||
text = counts_markdown(table_counts(sqlite_path), sqlite_path)
|
||||
self.assertIn("Repository Mesh Semantic Counts", text)
|
||||
self.assertIn("reports", text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user