Files
zavrsni-rad-otel-app/backend/app/routers/wwi.py
2026-05-11 10:58:46 +02:00

441 lines
16 KiB
Python

from __future__ import annotations
import asyncio
import logging
from datetime import datetime, timezone
from typing import Any, Literal
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from opentelemetry import propagate, trace
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import ExportRecord, append_audit, current_span_context
from app.core.config import settings
from app.core.executor import get_executor
from app.core.export import to_pdf_bytes
from app.core.security import FrontendPrincipal, require_frontend_principal
from app.domain.wwi import analytics
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.routers.wwi")
router = APIRouter(prefix="/api/wwi", tags=["wwi"])
_XLSX_MEDIA = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
_PDF_MEDIA = "application/pdf"
class WhatIfRequest(BaseModel):
stock_item_key: int = Field(..., ge=1)
demand_multiplier: float = Field(default=1.0, ge=0.1, le=5.0)
def _trace_headers() -> dict[str, str]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return {}
return {"x-trace-id": f"{ctx.trace_id:032x}", "x-span-id": f"{ctx.span_id:016x}"}
def _propagation_headers() -> dict[str, str]:
headers: dict[str, str] = {}
propagate.inject(headers)
return headers
async def _get(client: httpx.AsyncClient, path: str, params: dict | None = None) -> Any:
try:
r = await client.get(path, params=params, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
async def _post(client: httpx.AsyncClient, path: str, json: dict) -> Any:
try:
r = await client.post(path, json=json, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502 if exc.response.status_code != 404 else 404,
detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
def _record_export(
pg_factory: sessionmaker[Session],
domain: str,
source_view: str,
fmt: str,
filters: dict,
row_count: int,
file_size_bytes: int,
actor_id: str,
trace_id: str | None,
span_id: str | None,
) -> None:
try:
with pg_factory() as session:
session.add(ExportRecord(
domain=domain, service="otel-bi-backend", source_view=source_view,
format=fmt, filters_applied=filters, row_count=row_count,
file_size_bytes=file_size_bytes, actor_id=actor_id,
trace_id=trace_id, span_id=span_id,
))
session.commit()
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to record export metadata: %s", exc)
append_audit(
pg_factory,
action="export.created", actor_type="user", actor_id=actor_id,
domain=domain, service="otel-bi-backend", entity_type=source_view,
payload={"format": fmt, "row_count": row_count, "file_size_bytes": file_size_bytes, **filters},
)
async def _proxy_xlsx(
client: httpx.AsyncClient,
go_path: str,
params: dict,
filename_stem: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
"""Fetch XLSX bytes from Go, write ExportRecord, return response."""
try:
r = await client.get(go_path, params=params, headers=_propagation_headers())
r.raise_for_status()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
content = r.content
row_count = int(r.headers.get("X-Row-Count", "0"))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.xlsx"
trace_id, span_id = current_span_context()
await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _record_export(pg_factory, domain, source_view, "xlsx", filters,
row_count, len(content), actor_id, trace_id, span_id),
)
return Response(
content=content, media_type=_XLSX_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
def _make_pdf(
data: list[dict],
filename_stem: str,
pdf_title: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
with tracer.start_as_current_span(f"export.{domain}.{source_view}") as span:
span.set_attribute("export.format", "pdf")
span.set_attribute("export.row_count", len(data))
content = to_pdf_bytes(data, title=pdf_title)
span.set_attribute("export.file_size_bytes", len(content))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.pdf"
trace_id, span_id = current_span_context()
_record_export(pg_factory, domain, source_view, "pdf", filters,
len(data), len(content), actor_id, trace_id, span_id)
return Response(
content=content, media_type=_PDF_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
# ---------------------------------------------------------------------------
# KPIs
# ---------------------------------------------------------------------------
@router.get("/sales/kpis")
async def wwi_sales_kpis(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/wwi/sales/kpis")
# ---------------------------------------------------------------------------
# Stock & reorder
# ---------------------------------------------------------------------------
@router.get("/stock/recommendations")
async def wwi_reorder_recommendations(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/wwi/stock/recommendations")
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: (
analytics.generate_stock_events(pg_factory, data),
analytics.persist_reorder_recommendations(pg_factory, data, "api.stock.recommendations"),
),
)
return data
# ---------------------------------------------------------------------------
# Supplier scores
# ---------------------------------------------------------------------------
@router.get("/suppliers/scores")
async def wwi_supplier_scores(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/wwi/suppliers/scores", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_supplier_scores(pg_factory, data, top_n, "api.suppliers.scores"),
)
return data
# ---------------------------------------------------------------------------
# Business events
# ---------------------------------------------------------------------------
@router.get("/events")
async def wwi_business_events(
response: Response, request: Request,
limit: int = Query(default=100, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.get_business_events(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# What-if scenarios
# ---------------------------------------------------------------------------
@router.post("/scenarios")
async def wwi_create_scenario(
body: WhatIfRequest, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
result = await _post(client, "/wwi/scenarios", {
"stock_item_key": body.stock_item_key,
"demand_multiplier": body.demand_multiplier,
})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_whatif_scenario(pg_factory, result),
)
return result
@router.get("/scenarios")
async def wwi_list_scenarios(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_whatif_scenarios(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Stored records
# ---------------------------------------------------------------------------
@router.get("/records/reorder-recommendations")
async def wwi_records_reorder(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_reorder_recommendations(pg_factory, limit=limit)
)
@router.get("/records/supplier-scores")
async def wwi_records_supplier_scores(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_supplier_scores(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Exports
# ---------------------------------------------------------------------------
@router.get("/export/stock-recommendations")
async def export_wwi_stock_recommendations(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
if format == "xlsx":
return await _proxy_xlsx(client, "/wwi/export/stock-recommendations", {},
"wwi_stock_recommendations", "wwi", "stock-recommendations",
{}, actor_id, pg_factory)
data = await _get(client, "/wwi/stock/recommendations")
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_stock_recommendations",
"WideWorldImporters — Stock Reorder Recommendations",
"wwi", "stock-recommendations", {}, actor_id, pg_factory),
)
@router.get("/export/supplier-scores")
async def export_wwi_supplier_scores(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/wwi/export/supplier-scores", filters,
"wwi_supplier_scores", "wwi", "supplier-scores",
filters, actor_id, pg_factory)
data = await _get(client, "/wwi/suppliers/scores", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_supplier_scores",
"WideWorldImporters — Supplier Reliability Scores",
"wwi", "supplier-scores", filters, actor_id, pg_factory),
)
@router.get("/export/business-events")
async def export_wwi_business_events(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
limit: int = Query(default=100, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"limit": limit}
data = await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.get_business_events(pg_factory, limit=limit)
)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_business_events",
"WideWorldImporters — Business Events",
"wwi", "business-events", filters, actor_id, pg_factory),
)
# ---------------------------------------------------------------------------
# Job triggers
# ---------------------------------------------------------------------------
@router.post("/jobs/{job_name}/trigger")
async def trigger_wwi_job(
job_name: str, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _post(request.app.state.analytics_client, f"/scheduler/wwi/{job_name}/trigger", {})
@router.get("/jobs")
async def wwi_job_history(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "wwi", limit)
)
def _list_jobs(pg_factory, domain: str, limit: int) -> list[dict]:
from app.core.audit import JobExecution
with pg_factory() as session:
rows = (
session.query(JobExecution)
.filter_by(domain=domain)
.order_by(JobExecution.started_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"job_name": r.job_name,
"domain": r.domain,
"status": r.status,
"started_at": r.started_at.isoformat(),
"completed_at": r.completed_at.isoformat() if r.completed_at else None,
"duration_ms": r.duration_ms,
"records_processed": r.records_processed,
"error_message": r.error_message,
"trace_id": r.trace_id,
}
for r in rows
]