Files
tudo-para-ia-mais-humana-pl…/tests/test_operational_dossier.py
2026-04-30 06:42:00 -03:00

120 lines
6.2 KiB
Python

from __future__ import annotations
import json
import unittest
from pathlib import Path
from mais_humana.catalog import get_platform
from mais_humana.cli import main
from mais_humana.matrix import build_global_recommendations, build_matrix, build_platform_reports
from mais_humana.operational_dossier import (
ACTIVE_ORDER_IDS,
build_execution_round_dossier,
build_platform_dossier,
dossier_compact_rows,
dossier_to_markdown,
order_justifications_markdown,
write_csv_lines,
)
from mais_humana.operational_models import GateOutcome, HumanReadinessStage, OrderClosureStatus
from mais_humana.orders import build_exit_orders
from mais_humana.reports import generate
from mais_humana.scanner import scan_platform
from mais_humana.storage import table_counts
from tests.helpers import make_tmp
class OperationalDossierTests(unittest.TestCase):
def make_repo(self, root: Path, platform_id: str, readme: str, code: str = "") -> Path:
platform = get_platform(platform_id)
repo = root / platform.repo_name
repo.mkdir(parents=True)
(repo / "README.md").write_text(readme, encoding="utf-8")
(repo / "src").mkdir()
(repo / "src" / "index.ts").write_text(
code
or "export const readiness = { health: true, panelReady: true, sameSource: true, credentialRef: 'credential:test' };\n",
encoding="utf-8",
)
(repo / "openapi.json").write_text(json.dumps({"openapi": "3.1.0"}), encoding="utf-8")
(repo / "package.json").write_text(json.dumps({"scripts": {"test": "node --test", "smoke": "node smoke.js"}}), encoding="utf-8")
(repo / "index.test.ts").write_text("test('ok', () => true)\n", encoding="utf-8")
return repo
def build_reports(self, root: Path, platform_ids: tuple[str, ...]):
scans = tuple(scan_platform(root, get_platform(platform_id)) for platform_id in platform_ids)
cells = build_matrix(scans)
return build_platform_reports(scans, cells)
def test_platform_dossier_detects_panel_same_source_and_known_blocker(self) -> None:
tmp = make_tmp()
self.make_repo(
tmp,
"docs",
"Docs catalogOnly precisa decisao formal; panelReady sameSource openapi readiness audit trace.",
)
reports = self.build_reports(tmp, ("docs",))
recommendations = build_global_recommendations(reports)
dossier = build_platform_dossier(reports[0], recommendations)
self.assertEqual(dossier.platform_id, "docs")
self.assertTrue(dossier.panel_ready)
self.assertTrue(dossier.same_source_ready)
self.assertGreaterEqual(dossier.blocker_count, 1)
self.assertIn(dossier.stage, {HumanReadinessStage.CATALOG_ONLY, HumanReadinessStage.BLOCKED, HumanReadinessStage.PANEL_READY})
docs_gate = [gate for gate in dossier.gates if gate.gate_id.endswith(".docs")][0]
self.assertIn(docs_gate.outcome, {GateOutcome.BLOCKED, GateOutcome.PASS})
def test_round_dossier_links_active_orders_to_platforms_and_output_orders(self) -> None:
tmp = make_tmp()
self.make_repo(tmp, "business", "Business entitlement checkout panelReady sameSource readiness invoice blocker policy.")
self.make_repo(tmp, "compliance", "Compliance admin_view readiness panelReady sameSource retention policy audit openapi.")
self.make_repo(tmp, "integracoes", "Integracoes BYOK credentialRef readiness Cloudflare wrangler smoke provider.")
reports = self.build_reports(tmp, ("business", "compliance", "integracoes"))
recommendations = build_global_recommendations(reports)
output_orders = build_exit_orders(recommendations)
dossier = build_execution_round_dossier(tmp, reports, recommendations, output_orders, active_order_ids=ACTIVE_ORDER_IDS[:5])
self.assertEqual(len(dossier.order_justifications), 5)
self.assertTrue(dossier.output_orders)
platforms = {item.platform_id for item in dossier.order_justifications}
self.assertIn("business", platforms)
self.assertIn("compliance", platforms)
statuses = {item.closure_status for item in dossier.order_justifications}
self.assertTrue(statuses.intersection({OrderClosureStatus.COMPLETED, OrderClosureStatus.PARTIAL, OrderClosureStatus.BLOCKED}))
markdown = dossier_to_markdown(dossier)
self.assertIn("Dossie operacional humano", markdown)
self.assertIn("Ordens executadas", markdown)
justifications = order_justifications_markdown(dossier)
self.assertIn("Justificativa das ordens", justifications)
csv_text = write_csv_lines(dossier_compact_rows(dossier))
self.assertIn("platform,stage,status", csv_text)
def test_generate_writes_dossier_artifacts_and_semantic_tables(self) -> None:
tmp = make_tmp()
root = tmp / "eco"
project = tmp / "human"
central = tmp / "central"
root.mkdir()
central.mkdir()
self.make_repo(root, "docs", "Docs catalogOnly panelReady sameSource openapi readiness.")
self.make_repo(root, "integracoes", "Integracoes BYOK credentialRef readiness wrangler Cloudflare smoke.")
bundle = generate(root, project, central)
self.assertTrue((project / "dados" / "dossie-operacional-humano.json").exists())
self.assertTrue((project / "ecossistema" / "DOSSIE-OPERACIONAL-HUMANO.md").exists())
self.assertTrue((project / "ecossistema" / "JUSTIFICATIVA-ORDENS-DE-SERVICO.md").exists())
self.assertTrue((project / "matrizes" / "dossie-operacional-humano.csv").exists())
self.assertTrue((central / "reports" / "EXECUTADO__rodada-operacional-mais-humana.md").exists())
counts = table_counts(central / "controle-semantico.sqlite")
self.assertGreater(counts["round_dossiers"], 0)
self.assertGreater(counts["order_justifications"], 0)
generated_paths = {item.path for item in bundle.generated_files}
self.assertIn("dados/dossie-operacional-humano.json", generated_paths)
def test_cli_dossier_returns_success(self) -> None:
tmp = make_tmp()
code = main(["dossier", "--ecosystem-root", str(tmp), "--project-root", str(tmp / "human")])
self.assertEqual(code, 0)
if __name__ == "__main__":
unittest.main()