Add initial work from Codex
This commit is contained in:
1
backend/microservices/persistence/__init__.py
Normal file
1
backend/microservices/persistence/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""PostgreSQL persistence microservice."""
|
||||
176
backend/microservices/persistence/main.py
Normal file
176
backend/microservices/persistence/main.py
Normal file
@@ -0,0 +1,176 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from fastapi import Depends, FastAPI, Query, Response
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.otel import (
|
||||
TelemetryProviders,
|
||||
configure_otel,
|
||||
instrument_fastapi,
|
||||
instrument_sqlalchemy_engines,
|
||||
shutdown_otel,
|
||||
)
|
||||
from app.core.security import InternalPrincipal, require_internal_principal
|
||||
from app.db.postgres import (
|
||||
create_postgres_engine,
|
||||
create_postgres_session_factory,
|
||||
initialize_postgres_schema,
|
||||
)
|
||||
from app.services.persistence_service import PersistenceService
|
||||
from microservices.common.http import current_trace_headers
|
||||
|
||||
logging.basicConfig(level=settings.log_level)
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuditLogIn(BaseModel):
|
||||
method: str
|
||||
path: str
|
||||
query_string: str = ""
|
||||
status_code: int
|
||||
duration_ms: float
|
||||
trace_id: str | None = None
|
||||
span_id: str | None = None
|
||||
client_ip: str | None = None
|
||||
user_agent: str | None = None
|
||||
details: dict = Field(default_factory=dict)
|
||||
|
||||
|
||||
class ForecastRunIn(BaseModel):
|
||||
horizon_days: int
|
||||
payload: list[dict]
|
||||
trigger_source: str
|
||||
trace_id: str | None = None
|
||||
span_id: str | None = None
|
||||
|
||||
|
||||
class RankingRunIn(BaseModel):
|
||||
top_n: int
|
||||
payload: list[dict]
|
||||
trigger_source: str
|
||||
trace_id: str | None = None
|
||||
span_id: str | None = None
|
||||
|
||||
|
||||
class RecommendationRunIn(BaseModel):
|
||||
payload: list[dict]
|
||||
trigger_source: str
|
||||
trace_id: str | None = None
|
||||
span_id: str | None = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
telemetry: TelemetryProviders = configure_otel(settings)
|
||||
engine = create_postgres_engine()
|
||||
initialize_postgres_schema(engine)
|
||||
instrument_sqlalchemy_engines({"appdb": engine})
|
||||
app.state.persistence_service = PersistenceService(
|
||||
create_postgres_session_factory(engine)
|
||||
)
|
||||
LOGGER.info("Persistence service ready with PostgreSQL")
|
||||
yield
|
||||
engine.dispose()
|
||||
shutdown_otel(telemetry)
|
||||
|
||||
|
||||
app = FastAPI(title="persistence-service", version="0.1.0", lifespan=lifespan)
|
||||
instrument_fastapi(app)
|
||||
|
||||
|
||||
def _service() -> PersistenceService:
|
||||
return app.state.persistence_service
|
||||
|
||||
|
||||
@app.get("/internal/health")
|
||||
def health(response: Response) -> dict:
|
||||
response.headers.update(current_trace_headers())
|
||||
return {"status": "ok", "service": "persistence-service"}
|
||||
|
||||
|
||||
@app.post("/internal/audit-logs")
|
||||
def create_audit_log(
|
||||
payload: AuditLogIn,
|
||||
response: Response,
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> dict:
|
||||
response.headers.update(current_trace_headers())
|
||||
_service().record_audit_log(**payload.model_dump())
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/internal/forecast-runs")
|
||||
def create_forecast_run(
|
||||
payload: ForecastRunIn,
|
||||
response: Response,
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> dict:
|
||||
response.headers.update(current_trace_headers())
|
||||
_service().record_forecast_run(**payload.model_dump())
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/internal/ranking-runs")
|
||||
def create_ranking_run(
|
||||
payload: RankingRunIn,
|
||||
response: Response,
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> dict:
|
||||
response.headers.update(current_trace_headers())
|
||||
_service().record_ranking_run(**payload.model_dump())
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.post("/internal/recommendation-runs")
|
||||
def create_recommendation_run(
|
||||
payload: RecommendationRunIn,
|
||||
response: Response,
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> dict:
|
||||
response.headers.update(current_trace_headers())
|
||||
_service().record_recommendation_run(**payload.model_dump())
|
||||
return {"status": "ok"}
|
||||
|
||||
|
||||
@app.get("/internal/audit-logs")
|
||||
def list_audit_logs(
|
||||
response: Response,
|
||||
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> list[dict]:
|
||||
response.headers.update(current_trace_headers())
|
||||
return _service().list_audit_logs(limit=limit)
|
||||
|
||||
|
||||
@app.get("/internal/forecast-runs")
|
||||
def list_forecast_runs(
|
||||
response: Response,
|
||||
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> list[dict]:
|
||||
response.headers.update(current_trace_headers())
|
||||
return _service().list_forecast_runs(limit=limit)
|
||||
|
||||
|
||||
@app.get("/internal/ranking-runs")
|
||||
def list_ranking_runs(
|
||||
response: Response,
|
||||
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> list[dict]:
|
||||
response.headers.update(current_trace_headers())
|
||||
return _service().list_ranking_runs(limit=limit)
|
||||
|
||||
|
||||
@app.get("/internal/recommendation-runs")
|
||||
def list_recommendation_runs(
|
||||
response: Response,
|
||||
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
|
||||
_auth: InternalPrincipal = Depends(require_internal_principal),
|
||||
) -> list[dict]:
|
||||
response.headers.update(current_trace_headers())
|
||||
return _service().list_recommendation_runs(limit=limit)
|
||||
Reference in New Issue
Block a user