Files
tudo-para-ia-mais-humana-pl…/src/mais_humana/storage.py
2026-04-30 06:42:00 -03:00

346 lines
12 KiB
Python

"""SQLite storage for compact semantic memory."""
from __future__ import annotations
import json
import sqlite3
from pathlib import Path
from typing import Iterable, Sequence
from .models import GeneratedFile, PlatformHumanReport, Recommendation, ServiceOrder, as_plain_data, utc_now
from .operational_models import ExecutionRoundDossier
SCHEMA = """
CREATE TABLE IF NOT EXISTS service_orders (
id INTEGER PRIMARY KEY AUTOINCREMENT,
order_id TEXT UNIQUE NOT NULL,
order_type TEXT NOT NULL,
project_id TEXT NOT NULL,
title TEXT NOT NULL,
purpose TEXT NOT NULL,
object_scope TEXT NOT NULL,
reason TEXT NOT NULL,
expected_result TEXT NOT NULL,
status_semantico TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
caminho_arquivo TEXT UNIQUE NOT NULL,
descricao TEXT NOT NULL,
funcao TEXT NOT NULL,
tipo_arquivo TEXT NOT NULL,
criado_ou_alterado_por TEXT NOT NULL,
o_que_mudou TEXT NOT NULL,
relacao_com_ordem TEXT NOT NULL,
status_semantico TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS platform_reports (
id INTEGER PRIMARY KEY AUTOINCREMENT,
platform_id TEXT UNIQUE NOT NULL,
average_score INTEGER NOT NULL,
code_lines INTEGER NOT NULL,
evidence_count INTEGER NOT NULL,
warnings_json TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS recommendations (
id INTEGER PRIMARY KEY AUTOINCREMENT,
recommendation_id TEXT UNIQUE NOT NULL,
platform_id TEXT NOT NULL,
title TEXT NOT NULL,
priority INTEGER NOT NULL,
order_type TEXT NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS round_dossiers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
round_id TEXT UNIQUE NOT NULL,
project_id TEXT NOT NULL,
generated_at TEXT NOT NULL,
blockers_count INTEGER NOT NULL,
pending_count INTEGER NOT NULL,
active_orders_count INTEGER NOT NULL,
output_orders_count INTEGER NOT NULL,
total_code_lines_analyzed INTEGER NOT NULL,
code_lines_available_in_project INTEGER NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS order_justifications (
id INTEGER PRIMARY KEY AUTOINCREMENT,
order_id TEXT UNIQUE NOT NULL,
order_type TEXT NOT NULL,
platform_id TEXT NOT NULL,
closure_status TEXT NOT NULL,
reason TEXT NOT NULL,
execution_summary TEXT NOT NULL,
pending_count INTEGER NOT NULL,
linked_signal_count INTEGER NOT NULL,
linked_gate_count INTEGER NOT NULL,
resulting_order_count INTEGER NOT NULL,
payload_json TEXT NOT NULL,
updated_at TEXT NOT NULL
);
"""
def connect(path: Path) -> sqlite3.Connection:
path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(path)
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA synchronous=NORMAL")
conn.executescript(SCHEMA)
return conn
def upsert_files(conn: sqlite3.Connection, files: Iterable[GeneratedFile], status: str = "atualizado") -> None:
now = utc_now()
for item in files:
payload = json.dumps(as_plain_data(item), ensure_ascii=False, sort_keys=True)
conn.execute(
"""
INSERT INTO files (
caminho_arquivo, descricao, funcao, tipo_arquivo, criado_ou_alterado_por,
o_que_mudou, relacao_com_ordem, status_semantico, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(caminho_arquivo) DO UPDATE SET
descricao=excluded.descricao,
funcao=excluded.funcao,
tipo_arquivo=excluded.tipo_arquivo,
criado_ou_alterado_por=excluded.criado_ou_alterado_por,
o_que_mudou=excluded.o_que_mudou,
relacao_com_ordem=excluded.relacao_com_ordem,
status_semantico=excluded.status_semantico,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
item.path,
item.description,
item.function,
item.file_type,
item.changed_by,
item.change_summary,
item.relation_to_order,
status,
payload,
now,
),
)
def upsert_orders(conn: sqlite3.Connection, orders: Iterable[ServiceOrder]) -> None:
now = utc_now()
for order in orders:
payload = json.dumps(as_plain_data(order), ensure_ascii=False, sort_keys=True)
conn.execute(
"""
INSERT INTO service_orders (
order_id, order_type, project_id, title, purpose, object_scope, reason,
expected_result, status_semantico, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(order_id) DO UPDATE SET
order_type=excluded.order_type,
project_id=excluded.project_id,
title=excluded.title,
purpose=excluded.purpose,
object_scope=excluded.object_scope,
reason=excluded.reason,
expected_result=excluded.expected_result,
status_semantico=excluded.status_semantico,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
order.order_id,
order.order_type.value,
order.project_id,
order.title,
order.purpose,
order.object_scope,
order.reason,
order.expected_result,
order.status.value,
payload,
now,
),
)
def upsert_reports(conn: sqlite3.Connection, reports: Sequence[PlatformHumanReport]) -> None:
now = utc_now()
for report in reports:
payload = json.dumps(as_plain_data(report), ensure_ascii=False, sort_keys=True)
warnings = json.dumps(list(report.scan.warnings), ensure_ascii=False)
conn.execute(
"""
INSERT INTO platform_reports (
platform_id, average_score, code_lines, evidence_count, warnings_json,
payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(platform_id) DO UPDATE SET
average_score=excluded.average_score,
code_lines=excluded.code_lines,
evidence_count=excluded.evidence_count,
warnings_json=excluded.warnings_json,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
report.platform.platform_id,
report.average_score,
report.scan.code_lines,
len(report.scan.evidence),
warnings,
payload,
now,
),
)
def upsert_recommendations(conn: sqlite3.Connection, recommendations: Iterable[Recommendation]) -> None:
now = utc_now()
for item in recommendations:
payload = json.dumps(as_plain_data(item), ensure_ascii=False, sort_keys=True)
conn.execute(
"""
INSERT INTO recommendations (
recommendation_id, platform_id, title, priority, order_type, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(recommendation_id) DO UPDATE SET
platform_id=excluded.platform_id,
title=excluded.title,
priority=excluded.priority,
order_type=excluded.order_type,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
item.recommendation_id,
item.platform_id,
item.title,
item.priority,
item.suggested_order_type.value,
payload,
now,
),
)
def upsert_round_dossier(conn: sqlite3.Connection, dossier: ExecutionRoundDossier) -> None:
now = utc_now()
payload = json.dumps(as_plain_data(dossier), ensure_ascii=False, sort_keys=True)
conn.execute(
"""
INSERT INTO round_dossiers (
round_id, project_id, generated_at, blockers_count, pending_count,
active_orders_count, output_orders_count, total_code_lines_analyzed,
code_lines_available_in_project, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(round_id) DO UPDATE SET
project_id=excluded.project_id,
generated_at=excluded.generated_at,
blockers_count=excluded.blockers_count,
pending_count=excluded.pending_count,
active_orders_count=excluded.active_orders_count,
output_orders_count=excluded.output_orders_count,
total_code_lines_analyzed=excluded.total_code_lines_analyzed,
code_lines_available_in_project=excluded.code_lines_available_in_project,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
dossier.round_id,
dossier.project_id,
dossier.generated_at,
len(dossier.blockers),
len(dossier.pending_items),
len(dossier.active_input_orders),
len(dossier.output_orders),
dossier.total_code_lines_analyzed,
dossier.code_lines_available_in_project,
payload,
now,
),
)
for item in dossier.order_justifications:
item_payload = json.dumps(as_plain_data(item), ensure_ascii=False, sort_keys=True)
conn.execute(
"""
INSERT INTO order_justifications (
order_id, order_type, platform_id, closure_status, reason,
execution_summary, pending_count, linked_signal_count, linked_gate_count,
resulting_order_count, payload_json, updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(order_id) DO UPDATE SET
order_type=excluded.order_type,
platform_id=excluded.platform_id,
closure_status=excluded.closure_status,
reason=excluded.reason,
execution_summary=excluded.execution_summary,
pending_count=excluded.pending_count,
linked_signal_count=excluded.linked_signal_count,
linked_gate_count=excluded.linked_gate_count,
resulting_order_count=excluded.resulting_order_count,
payload_json=excluded.payload_json,
updated_at=excluded.updated_at
""",
(
item.order_id,
item.order_type.value,
item.platform_id,
item.closure_status.value,
item.reason,
item.execution_summary,
len(item.pending_items),
len(item.linked_signals),
len(item.linked_gates),
len(item.resulting_orders),
item_payload,
now,
),
)
def write_semantic_state(
sqlite_path: Path,
files: Sequence[GeneratedFile],
orders: Sequence[ServiceOrder],
reports: Sequence[PlatformHumanReport],
recommendations: Sequence[Recommendation],
round_dossier: ExecutionRoundDossier | None = None,
) -> None:
with connect(sqlite_path) as conn:
upsert_files(conn, files)
upsert_orders(conn, orders)
upsert_reports(conn, reports)
upsert_recommendations(conn, recommendations)
if round_dossier is not None:
upsert_round_dossier(conn, round_dossier)
conn.commit()
def table_counts(sqlite_path: Path) -> dict[str, int]:
if not sqlite_path.exists():
return {}
with sqlite3.connect(sqlite_path) as conn:
counts: dict[str, int] = {}
for table in ("service_orders", "files", "platform_reports", "recommendations", "round_dossiers", "order_justifications"):
try:
counts[table] = int(conn.execute(f"SELECT COUNT(*) FROM {table}").fetchone()[0])
except sqlite3.DatabaseError:
counts[table] = -1
return counts