Files
zavrsni-rad-otel-app/backend/app/routers/aw.py
2026-05-11 10:58:46 +02:00

465 lines
17 KiB
Python

from __future__ import annotations
import asyncio
import logging
from datetime import datetime, timezone
from typing import Any, Literal
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from opentelemetry import propagate, trace
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import ExportRecord, append_audit, current_span_context
from app.core.config import settings
from app.core.executor import get_executor
from app.core.export import to_pdf_bytes
from app.core.security import FrontendPrincipal, require_frontend_principal
from app.domain.aw import analytics
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.routers.aw")
router = APIRouter(prefix="/api/aw", tags=["aw"])
_XLSX_MEDIA = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
_PDF_MEDIA = "application/pdf"
def _trace_headers() -> dict[str, str]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return {}
return {"x-trace-id": f"{ctx.trace_id:032x}", "x-span-id": f"{ctx.span_id:016x}"}
def _propagation_headers() -> dict[str, str]:
headers: dict[str, str] = {}
propagate.inject(headers)
return headers
async def _get(client: httpx.AsyncClient, path: str, params: dict | None = None) -> Any:
try:
r = await client.get(path, params=params, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
async def _post(client: httpx.AsyncClient, path: str, json: dict) -> Any:
try:
r = await client.post(path, json=json, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(
status_code=502 if exc.response.status_code != 404 else 404,
detail=f"Analytics service error: {exc.response.status_code}",
)
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
def _record_export(
pg_factory: sessionmaker[Session],
domain: str,
source_view: str,
fmt: str,
filters: dict,
row_count: int,
file_size_bytes: int,
actor_id: str,
trace_id: str | None,
span_id: str | None,
) -> None:
try:
with pg_factory() as session:
session.add(ExportRecord(
domain=domain, service="otel-bi-backend", source_view=source_view,
format=fmt, filters_applied=filters, row_count=row_count,
file_size_bytes=file_size_bytes, actor_id=actor_id,
trace_id=trace_id, span_id=span_id,
))
session.commit()
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to record export metadata: %s", exc)
append_audit(
pg_factory,
action="export.created", actor_type="user", actor_id=actor_id,
domain=domain, service="otel-bi-backend", entity_type=source_view,
payload={"format": fmt, "row_count": row_count, "file_size_bytes": file_size_bytes, **filters},
)
async def _proxy_xlsx(
client: httpx.AsyncClient,
go_path: str,
params: dict,
filename_stem: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
"""Fetch XLSX bytes from Go, write ExportRecord, return response."""
try:
r = await client.get(go_path, params=params, headers=_propagation_headers())
r.raise_for_status()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
content = r.content
row_count = int(r.headers.get("X-Row-Count", "0"))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.xlsx"
trace_id, span_id = current_span_context()
await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _record_export(pg_factory, domain, source_view, "xlsx", filters,
row_count, len(content), actor_id, trace_id, span_id),
)
return Response(
content=content, media_type=_XLSX_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
def _make_pdf(
data: list[dict],
filename_stem: str,
pdf_title: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
with tracer.start_as_current_span(f"export.{domain}.{source_view}") as span:
span.set_attribute("export.format", "pdf")
span.set_attribute("export.row_count", len(data))
content = to_pdf_bytes(data, title=pdf_title)
span.set_attribute("export.file_size_bytes", len(content))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.pdf"
trace_id, span_id = current_span_context()
_record_export(pg_factory, domain, source_view, "pdf", filters,
len(data), len(content), actor_id, trace_id, span_id)
return Response(
content=content, media_type=_PDF_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
# ---------------------------------------------------------------------------
# Sales
# ---------------------------------------------------------------------------
@router.get("/sales/kpis")
async def aw_sales_kpis(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/aw/sales/kpis")
@router.get("/sales/history")
async def aw_sales_history(
response: Response, request: Request,
days_back: int = Query(default=settings.default_history_days, ge=30, le=1460),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/aw/sales/history", {"days_back": days_back})
@router.get("/sales/forecast")
async def aw_sales_forecast(
response: Response, request: Request,
horizon_days: int = Query(default=settings.forecast_horizon_days, ge=7, le=180),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/sales/forecast", {"horizon_days": horizon_days})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_forecast(pg_factory, data, horizon_days, "api.sales.forecast"),
)
return data
# ---------------------------------------------------------------------------
# Rep scores & product demand
# ---------------------------------------------------------------------------
@router.get("/reps/scores")
async def aw_rep_scores(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/reps/scores", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_rep_scores(pg_factory, data, top_n, "api.reps.scores"),
)
return data
@router.get("/products/demand")
async def aw_product_demand(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/products/demand", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_product_demand(pg_factory, data, top_n, "api.products.demand"),
)
return data
# ---------------------------------------------------------------------------
# Anomaly detection
# ---------------------------------------------------------------------------
@router.get("/anomalies")
async def aw_anomalies(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/anomalies")
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_anomaly_run(pg_factory, data, "api.aw.anomalies"),
)
return data
# ---------------------------------------------------------------------------
# Stored records
# ---------------------------------------------------------------------------
@router.get("/records/forecasts")
async def aw_records_forecasts(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_forecasts(pg_factory, limit=limit)
)
@router.get("/records/rep-scores")
async def aw_records_rep_scores(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_rep_scores(pg_factory, limit=limit)
)
@router.get("/records/product-demand")
async def aw_records_product_demand(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_product_demand(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Exports
# ---------------------------------------------------------------------------
@router.get("/export/sales-history")
async def export_aw_sales_history(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
days_back: int = Query(default=settings.default_history_days, ge=30, le=1460),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"days_back": days_back}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/sales-history", filters,
"aw_sales_history", "aw", "sales-history", filters, actor_id, pg_factory)
data = await _get(client, "/aw/sales/history", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_sales_history", "AdventureWorks — Sales History",
"aw", "sales-history", filters, actor_id, pg_factory),
)
@router.get("/export/sales-forecast")
async def export_aw_sales_forecast(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
horizon_days: int = Query(default=settings.forecast_horizon_days, ge=7, le=180),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"horizon_days": horizon_days}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/sales-forecast", filters,
"aw_sales_forecast", "aw", "sales-forecast", filters, actor_id, pg_factory)
data = await _get(client, "/aw/sales/forecast", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_sales_forecast", "AdventureWorks — Sales Forecast",
"aw", "sales-forecast", filters, actor_id, pg_factory),
)
@router.get("/export/rep-scores")
async def export_aw_rep_scores(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/rep-scores", filters,
"aw_rep_scores", "aw", "rep-scores", filters, actor_id, pg_factory)
data = await _get(client, "/aw/reps/scores", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_rep_scores", "AdventureWorks — Sales Rep Performance",
"aw", "rep-scores", filters, actor_id, pg_factory),
)
@router.get("/export/product-demand")
async def export_aw_product_demand(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/product-demand", filters,
"aw_product_demand", "aw", "product-demand", filters, actor_id, pg_factory)
data = await _get(client, "/aw/products/demand", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_product_demand", "AdventureWorks — Product Demand Scores",
"aw", "product-demand", filters, actor_id, pg_factory),
)
# ---------------------------------------------------------------------------
# Job triggers
# ---------------------------------------------------------------------------
@router.post("/jobs/{job_name}/trigger")
async def trigger_aw_job(
job_name: str, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _post(request.app.state.analytics_client, f"/scheduler/aw/{job_name}/trigger", {})
@router.get("/jobs")
async def aw_job_history(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "aw", limit)
)
def _list_jobs(pg_factory, domain: str, limit: int) -> list[dict]:
from app.core.audit import JobExecution
with pg_factory() as session:
rows = (
session.query(JobExecution)
.filter_by(domain=domain)
.order_by(JobExecution.started_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"job_name": r.job_name,
"domain": r.domain,
"status": r.status,
"started_at": r.started_at.isoformat(),
"completed_at": r.completed_at.isoformat() if r.completed_at else None,
"duration_ms": r.duration_ms,
"records_processed": r.records_processed,
"error_message": r.error_message,
"trace_id": r.trace_id,
}
for r in rows
]