175 lines
6.4 KiB
Python
175 lines
6.4 KiB
Python
from __future__ import annotations
|
|
|
|
import logging
|
|
from datetime import datetime, timezone
|
|
from uuid import uuid4
|
|
|
|
from opentelemetry import trace
|
|
from sqlalchemy import DateTime, Integer, String, Text, JSON
|
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, sessionmaker, Session
|
|
|
|
LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
def _utcnow() -> datetime:
|
|
return datetime.now(timezone.utc)
|
|
|
|
|
|
def current_span_context() -> tuple[str | None, str | None]:
|
|
ctx = trace.get_current_span().get_span_context()
|
|
if not ctx.is_valid:
|
|
return None, None
|
|
return f"{ctx.trace_id:032x}", f"{ctx.span_id:016x}"
|
|
|
|
|
|
class SharedBase(DeclarativeBase):
|
|
pass
|
|
|
|
|
|
class AuditLog(SharedBase):
|
|
__tablename__ = "audit_log"
|
|
|
|
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
|
occurred_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
|
|
action: Mapped[str] = mapped_column(String(100), index=True)
|
|
status: Mapped[str] = mapped_column(String(20), default="success")
|
|
actor_type: Mapped[str] = mapped_column(String(20), index=True)
|
|
actor_id: Mapped[str | None] = mapped_column(String(200), nullable=True)
|
|
domain: Mapped[str] = mapped_column(String(50), index=True)
|
|
service: Mapped[str] = mapped_column(String(50), index=True)
|
|
entity_type: Mapped[str | None] = mapped_column(String(100), nullable=True, index=True)
|
|
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
|
|
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
|
payload: Mapped[dict] = mapped_column(JSON, default=dict)
|
|
|
|
|
|
class JobExecution(SharedBase):
|
|
__tablename__ = "job_executions"
|
|
|
|
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
|
started_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
|
|
completed_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True), nullable=True)
|
|
job_name: Mapped[str] = mapped_column(String(100), index=True)
|
|
domain: Mapped[str] = mapped_column(String(50), index=True)
|
|
status: Mapped[str] = mapped_column(String(20), index=True)
|
|
records_processed: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
duration_ms: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
|
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
|
|
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
|
|
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
|
|
|
|
|
class ExportRecord(SharedBase):
|
|
__tablename__ = "export_records"
|
|
|
|
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
|
|
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
|
|
domain: Mapped[str] = mapped_column(String(50), index=True)
|
|
service: Mapped[str] = mapped_column(String(50))
|
|
source_view: Mapped[str] = mapped_column(String(100), index=True)
|
|
format: Mapped[str] = mapped_column(String(10))
|
|
filters_applied: Mapped[dict] = mapped_column(JSON, default=dict)
|
|
row_count: Mapped[int] = mapped_column(Integer)
|
|
file_size_bytes: Mapped[int] = mapped_column(Integer)
|
|
actor_id: Mapped[str | None] = mapped_column(String(200), nullable=True)
|
|
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
|
|
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
|
|
|
|
|
|
def append_audit(
|
|
factory: sessionmaker[Session],
|
|
*,
|
|
action: str,
|
|
actor_type: str,
|
|
domain: str,
|
|
service: str,
|
|
entity_type: str | None = None,
|
|
actor_id: str | None = None,
|
|
status: str = "success",
|
|
payload: dict | None = None,
|
|
) -> None:
|
|
trace_id, span_id = current_span_context()
|
|
try:
|
|
with factory() as session:
|
|
session.add(AuditLog(
|
|
action=action,
|
|
actor_type=actor_type,
|
|
actor_id=actor_id,
|
|
domain=domain,
|
|
service=service,
|
|
entity_type=entity_type,
|
|
trace_id=trace_id,
|
|
span_id=span_id,
|
|
status=status,
|
|
payload=payload or {},
|
|
))
|
|
session.commit()
|
|
except Exception as exc: # noqa: BLE001
|
|
LOGGER.warning("Failed to write audit record (action=%s): %s", action, exc)
|
|
|
|
|
|
def record_job_start(
|
|
factory: sessionmaker[Session],
|
|
job_name: str,
|
|
domain: str,
|
|
trace_id: str | None,
|
|
span_id: str | None,
|
|
) -> str:
|
|
job_id = str(uuid4())
|
|
try:
|
|
with factory() as session:
|
|
session.add(JobExecution(
|
|
id=job_id,
|
|
job_name=job_name,
|
|
domain=domain,
|
|
status="running",
|
|
trace_id=trace_id,
|
|
span_id=span_id,
|
|
))
|
|
session.commit()
|
|
except Exception as exc: # noqa: BLE001
|
|
LOGGER.warning("Failed to record job start (job=%s): %s", job_name, exc)
|
|
return job_id
|
|
|
|
|
|
def record_job_complete(
|
|
factory: sessionmaker[Session],
|
|
job_id: str,
|
|
started_at: datetime,
|
|
records_processed: int,
|
|
) -> None:
|
|
now = datetime.now(timezone.utc)
|
|
duration_ms = int((now - started_at).total_seconds() * 1000)
|
|
try:
|
|
with factory() as session:
|
|
session.query(JobExecution).filter_by(id=job_id).update({
|
|
"status": "success",
|
|
"completed_at": now,
|
|
"records_processed": records_processed,
|
|
"duration_ms": duration_ms,
|
|
})
|
|
session.commit()
|
|
except Exception as exc: # noqa: BLE001
|
|
LOGGER.warning("Failed to record job completion (id=%s): %s", job_id, exc)
|
|
|
|
|
|
def record_job_failure(
|
|
factory: sessionmaker[Session],
|
|
job_id: str,
|
|
started_at: datetime,
|
|
error_message: str,
|
|
) -> None:
|
|
now = datetime.now(timezone.utc)
|
|
duration_ms = int((now - started_at).total_seconds() * 1000)
|
|
try:
|
|
with factory() as session:
|
|
session.query(JobExecution).filter_by(id=job_id).update({
|
|
"status": "failure",
|
|
"completed_at": now,
|
|
"duration_ms": duration_ms,
|
|
"error_message": error_message[:2000],
|
|
})
|
|
session.commit()
|
|
except Exception as exc: # noqa: BLE001
|
|
LOGGER.warning("Failed to record job failure (id=%s): %s", job_id, exc)
|