Push the rest

This commit is contained in:
2026-05-11 10:58:46 +02:00
parent adb5c1a439
commit 0031caf16c
94 changed files with 11777 additions and 3474 deletions

View File

464
backend/app/routers/aw.py Normal file
View File

@@ -0,0 +1,464 @@
from __future__ import annotations
import asyncio
import logging
from datetime import datetime, timezone
from typing import Any, Literal
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from opentelemetry import propagate, trace
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import ExportRecord, append_audit, current_span_context
from app.core.config import settings
from app.core.executor import get_executor
from app.core.export import to_pdf_bytes
from app.core.security import FrontendPrincipal, require_frontend_principal
from app.domain.aw import analytics
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.routers.aw")
router = APIRouter(prefix="/api/aw", tags=["aw"])
_XLSX_MEDIA = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
_PDF_MEDIA = "application/pdf"
def _trace_headers() -> dict[str, str]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return {}
return {"x-trace-id": f"{ctx.trace_id:032x}", "x-span-id": f"{ctx.span_id:016x}"}
def _propagation_headers() -> dict[str, str]:
headers: dict[str, str] = {}
propagate.inject(headers)
return headers
async def _get(client: httpx.AsyncClient, path: str, params: dict | None = None) -> Any:
try:
r = await client.get(path, params=params, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
async def _post(client: httpx.AsyncClient, path: str, json: dict) -> Any:
try:
r = await client.post(path, json=json, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(
status_code=502 if exc.response.status_code != 404 else 404,
detail=f"Analytics service error: {exc.response.status_code}",
)
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
def _record_export(
pg_factory: sessionmaker[Session],
domain: str,
source_view: str,
fmt: str,
filters: dict,
row_count: int,
file_size_bytes: int,
actor_id: str,
trace_id: str | None,
span_id: str | None,
) -> None:
try:
with pg_factory() as session:
session.add(ExportRecord(
domain=domain, service="otel-bi-backend", source_view=source_view,
format=fmt, filters_applied=filters, row_count=row_count,
file_size_bytes=file_size_bytes, actor_id=actor_id,
trace_id=trace_id, span_id=span_id,
))
session.commit()
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to record export metadata: %s", exc)
append_audit(
pg_factory,
action="export.created", actor_type="user", actor_id=actor_id,
domain=domain, service="otel-bi-backend", entity_type=source_view,
payload={"format": fmt, "row_count": row_count, "file_size_bytes": file_size_bytes, **filters},
)
async def _proxy_xlsx(
client: httpx.AsyncClient,
go_path: str,
params: dict,
filename_stem: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
"""Fetch XLSX bytes from Go, write ExportRecord, return response."""
try:
r = await client.get(go_path, params=params, headers=_propagation_headers())
r.raise_for_status()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
content = r.content
row_count = int(r.headers.get("X-Row-Count", "0"))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.xlsx"
trace_id, span_id = current_span_context()
await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _record_export(pg_factory, domain, source_view, "xlsx", filters,
row_count, len(content), actor_id, trace_id, span_id),
)
return Response(
content=content, media_type=_XLSX_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
def _make_pdf(
data: list[dict],
filename_stem: str,
pdf_title: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
with tracer.start_as_current_span(f"export.{domain}.{source_view}") as span:
span.set_attribute("export.format", "pdf")
span.set_attribute("export.row_count", len(data))
content = to_pdf_bytes(data, title=pdf_title)
span.set_attribute("export.file_size_bytes", len(content))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.pdf"
trace_id, span_id = current_span_context()
_record_export(pg_factory, domain, source_view, "pdf", filters,
len(data), len(content), actor_id, trace_id, span_id)
return Response(
content=content, media_type=_PDF_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
# ---------------------------------------------------------------------------
# Sales
# ---------------------------------------------------------------------------
@router.get("/sales/kpis")
async def aw_sales_kpis(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/aw/sales/kpis")
@router.get("/sales/history")
async def aw_sales_history(
response: Response, request: Request,
days_back: int = Query(default=settings.default_history_days, ge=30, le=1460),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/aw/sales/history", {"days_back": days_back})
@router.get("/sales/forecast")
async def aw_sales_forecast(
response: Response, request: Request,
horizon_days: int = Query(default=settings.forecast_horizon_days, ge=7, le=180),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/sales/forecast", {"horizon_days": horizon_days})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_forecast(pg_factory, data, horizon_days, "api.sales.forecast"),
)
return data
# ---------------------------------------------------------------------------
# Rep scores & product demand
# ---------------------------------------------------------------------------
@router.get("/reps/scores")
async def aw_rep_scores(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/reps/scores", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_rep_scores(pg_factory, data, top_n, "api.reps.scores"),
)
return data
@router.get("/products/demand")
async def aw_product_demand(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/products/demand", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_product_demand(pg_factory, data, top_n, "api.products.demand"),
)
return data
# ---------------------------------------------------------------------------
# Anomaly detection
# ---------------------------------------------------------------------------
@router.get("/anomalies")
async def aw_anomalies(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/aw/anomalies")
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_anomaly_run(pg_factory, data, "api.aw.anomalies"),
)
return data
# ---------------------------------------------------------------------------
# Stored records
# ---------------------------------------------------------------------------
@router.get("/records/forecasts")
async def aw_records_forecasts(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_forecasts(pg_factory, limit=limit)
)
@router.get("/records/rep-scores")
async def aw_records_rep_scores(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_rep_scores(pg_factory, limit=limit)
)
@router.get("/records/product-demand")
async def aw_records_product_demand(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_product_demand(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Exports
# ---------------------------------------------------------------------------
@router.get("/export/sales-history")
async def export_aw_sales_history(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
days_back: int = Query(default=settings.default_history_days, ge=30, le=1460),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"days_back": days_back}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/sales-history", filters,
"aw_sales_history", "aw", "sales-history", filters, actor_id, pg_factory)
data = await _get(client, "/aw/sales/history", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_sales_history", "AdventureWorks — Sales History",
"aw", "sales-history", filters, actor_id, pg_factory),
)
@router.get("/export/sales-forecast")
async def export_aw_sales_forecast(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
horizon_days: int = Query(default=settings.forecast_horizon_days, ge=7, le=180),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"horizon_days": horizon_days}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/sales-forecast", filters,
"aw_sales_forecast", "aw", "sales-forecast", filters, actor_id, pg_factory)
data = await _get(client, "/aw/sales/forecast", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_sales_forecast", "AdventureWorks — Sales Forecast",
"aw", "sales-forecast", filters, actor_id, pg_factory),
)
@router.get("/export/rep-scores")
async def export_aw_rep_scores(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/rep-scores", filters,
"aw_rep_scores", "aw", "rep-scores", filters, actor_id, pg_factory)
data = await _get(client, "/aw/reps/scores", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_rep_scores", "AdventureWorks — Sales Rep Performance",
"aw", "rep-scores", filters, actor_id, pg_factory),
)
@router.get("/export/product-demand")
async def export_aw_product_demand(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/aw/export/product-demand", filters,
"aw_product_demand", "aw", "product-demand", filters, actor_id, pg_factory)
data = await _get(client, "/aw/products/demand", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "aw_product_demand", "AdventureWorks — Product Demand Scores",
"aw", "product-demand", filters, actor_id, pg_factory),
)
# ---------------------------------------------------------------------------
# Job triggers
# ---------------------------------------------------------------------------
@router.post("/jobs/{job_name}/trigger")
async def trigger_aw_job(
job_name: str, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _post(request.app.state.analytics_client, f"/scheduler/aw/{job_name}/trigger", {})
@router.get("/jobs")
async def aw_job_history(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "aw", limit)
)
def _list_jobs(pg_factory, domain: str, limit: int) -> list[dict]:
from app.core.audit import JobExecution
with pg_factory() as session:
rows = (
session.query(JobExecution)
.filter_by(domain=domain)
.order_by(JobExecution.started_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"job_name": r.job_name,
"domain": r.domain,
"status": r.status,
"started_at": r.started_at.isoformat(),
"completed_at": r.completed_at.isoformat() if r.completed_at else None,
"duration_ms": r.duration_ms,
"records_processed": r.records_processed,
"error_message": r.error_message,
"trace_id": r.trace_id,
}
for r in rows
]

View File

@@ -0,0 +1,254 @@
from __future__ import annotations
import asyncio
import logging
from fastapi import APIRouter, Depends, Query, Request, Response
from opentelemetry import propagate, trace
from app.core.audit import AuditLog, ExportRecord, append_audit
from app.core.config import settings
from app.core.executor import get_executor
from app.core.reports import save_report
from app.core.security import FrontendPrincipal, require_frontend_principal
from app.domain.wwi import analytics as wwi_analytics
LOGGER = logging.getLogger(__name__)
router = APIRouter(tags=["platform"])
def _trace_headers() -> dict[str, str]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return {}
return {"x-trace-id": f"{ctx.trace_id:032x}", "x-span-id": f"{ctx.span_id:016x}"}
# ---------------------------------------------------------------------------
# System
# ---------------------------------------------------------------------------
@router.get("/api/config")
def frontend_config() -> dict:
return {
"oidc_enabled": settings.require_frontend_auth,
"oidc_authority": settings.frontend_jwt_issuer_url,
"oidc_client_id": settings.frontend_oidc_client_id,
"oidc_scope": settings.frontend_oidc_scope,
}
@router.get("/api/health")
def health(response: Response) -> dict:
response.headers.update(_trace_headers())
return {"status": "ok", "service": "otel-bi-backend"}
@router.get("/api/telemetry/status")
def telemetry_status(
response: Response,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return {
"status": "instrumented",
"service": "otel-bi-backend",
"collector_endpoint": settings.otel_collector_endpoint,
"subject": principal.subject,
**_trace_headers(),
}
# ---------------------------------------------------------------------------
# Cross-domain report generation
# ---------------------------------------------------------------------------
def _propagation_headers() -> dict[str, str]:
headers: dict[str, str] = {}
propagate.inject(headers)
return headers
@router.post("/api/reports/generate")
async def generate_report(
request: Request,
response: Response,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
loop = asyncio.get_running_loop()
executor = get_executor()
import httpx as _httpx
async def _fetch(path: str, params: dict | None = None):
try:
r = await client.get(path, params=params, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except (_httpx.HTTPStatusError, _httpx.RequestError):
return {}
(
aw_kpis, aw_history, aw_forecast,
aw_reps, aw_products,
wwi_kpis, wwi_stock, wwi_suppliers,
) = await asyncio.gather(
_fetch("/aw/sales/kpis"),
_fetch("/aw/sales/history", {"days_back": settings.default_history_days}),
_fetch("/aw/sales/forecast", {"horizon_days": settings.forecast_horizon_days}),
_fetch("/aw/reps/scores", {"top_n": settings.ranking_default_top_n}),
_fetch("/aw/products/demand", {"top_n": settings.ranking_default_top_n}),
_fetch("/wwi/sales/kpis"),
_fetch("/wwi/stock/recommendations"),
_fetch("/wwi/suppliers/scores", {"top_n": settings.ranking_default_top_n}),
)
wwi_events = await loop.run_in_executor(
executor, lambda: wwi_analytics.get_business_events(pg_factory, 200)
)
data = {
"aw_sales_kpis": aw_kpis,
"aw_sales_history": aw_history,
"aw_sales_forecast": aw_forecast,
"aw_rep_scores": aw_reps,
"aw_product_demand": aw_products,
"wwi_sales_kpis": wwi_kpis,
"wwi_stock_recommendations": wwi_stock,
"wwi_supplier_scores": wwi_suppliers,
"wwi_business_events": wwi_events,
}
report = await loop.run_in_executor(
executor, lambda: save_report(data, settings.report_output_dir)
)
append_audit(
pg_factory,
action="report.generated", actor_type="user", actor_id=actor_id,
domain="platform", service="otel-bi-backend", entity_type="full_report",
payload={
"report_id": report["report_id"],
"xlsx": report["xlsx"]["filename"],
"pdf": report["pdf"]["filename"],
},
)
return {**report, "output_dir": settings.report_output_dir, **_trace_headers()}
# ---------------------------------------------------------------------------
# Audit log
# ---------------------------------------------------------------------------
@router.get("/api/audit")
async def audit_log(
response: Response, request: Request,
limit: int = Query(default=100, ge=1, le=500),
domain: str | None = Query(default=None),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
def _query():
with pg_factory() as session:
q = session.query(AuditLog).order_by(AuditLog.occurred_at.desc())
if domain:
q = q.filter_by(domain=domain)
rows = q.limit(limit).all()
return [
{
"id": r.id,
"occurred_at": r.occurred_at.isoformat(),
"action": r.action,
"status": r.status,
"actor_type": r.actor_type,
"actor_id": r.actor_id,
"domain": r.domain,
"service": r.service,
"entity_type": r.entity_type,
"trace_id": r.trace_id,
"payload": r.payload,
}
for r in rows
]
return await asyncio.get_running_loop().run_in_executor(get_executor(), _query)
# ---------------------------------------------------------------------------
# Export history
# ---------------------------------------------------------------------------
@router.get("/api/exports")
async def export_history(
response: Response, request: Request,
limit: int = Query(default=100, ge=1, le=500),
domain: str | None = Query(default=None),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
def _query():
with pg_factory() as session:
q = session.query(ExportRecord).order_by(ExportRecord.created_at.desc())
if domain:
q = q.filter_by(domain=domain)
rows = q.limit(limit).all()
return [
{
"id": r.id,
"exported_at": r.created_at.isoformat(),
"domain": r.domain,
"service": r.service,
"source_view": r.source_view,
"format": r.format,
"filters_applied": r.filters_applied,
"row_count": r.row_count,
"file_size_bytes": r.file_size_bytes,
"actor_id": r.actor_id,
"trace_id": r.trace_id,
}
for r in rows
]
return await asyncio.get_running_loop().run_in_executor(get_executor(), _query)
# ---------------------------------------------------------------------------
# Job history (platform-level — both domains in one response)
# ---------------------------------------------------------------------------
@router.get("/api/jobs/aw")
async def jobs_aw(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
from app.routers.aw import _list_jobs
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "aw", limit)
)
@router.get("/api/jobs/wwi")
async def jobs_wwi(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
from app.routers.wwi import _list_jobs
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "wwi", limit)
)

440
backend/app/routers/wwi.py Normal file
View File

@@ -0,0 +1,440 @@
from __future__ import annotations
import asyncio
import logging
from datetime import datetime, timezone
from typing import Any, Literal
import httpx
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Response
from opentelemetry import propagate, trace
from pydantic import BaseModel, Field
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import ExportRecord, append_audit, current_span_context
from app.core.config import settings
from app.core.executor import get_executor
from app.core.export import to_pdf_bytes
from app.core.security import FrontendPrincipal, require_frontend_principal
from app.domain.wwi import analytics
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.routers.wwi")
router = APIRouter(prefix="/api/wwi", tags=["wwi"])
_XLSX_MEDIA = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
_PDF_MEDIA = "application/pdf"
class WhatIfRequest(BaseModel):
stock_item_key: int = Field(..., ge=1)
demand_multiplier: float = Field(default=1.0, ge=0.1, le=5.0)
def _trace_headers() -> dict[str, str]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return {}
return {"x-trace-id": f"{ctx.trace_id:032x}", "x-span-id": f"{ctx.span_id:016x}"}
def _propagation_headers() -> dict[str, str]:
headers: dict[str, str] = {}
propagate.inject(headers)
return headers
async def _get(client: httpx.AsyncClient, path: str, params: dict | None = None) -> Any:
try:
r = await client.get(path, params=params, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
async def _post(client: httpx.AsyncClient, path: str, json: dict) -> Any:
try:
r = await client.post(path, json=json, headers=_propagation_headers())
r.raise_for_status()
return r.json()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502 if exc.response.status_code != 404 else 404,
detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
def _record_export(
pg_factory: sessionmaker[Session],
domain: str,
source_view: str,
fmt: str,
filters: dict,
row_count: int,
file_size_bytes: int,
actor_id: str,
trace_id: str | None,
span_id: str | None,
) -> None:
try:
with pg_factory() as session:
session.add(ExportRecord(
domain=domain, service="otel-bi-backend", source_view=source_view,
format=fmt, filters_applied=filters, row_count=row_count,
file_size_bytes=file_size_bytes, actor_id=actor_id,
trace_id=trace_id, span_id=span_id,
))
session.commit()
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to record export metadata: %s", exc)
append_audit(
pg_factory,
action="export.created", actor_type="user", actor_id=actor_id,
domain=domain, service="otel-bi-backend", entity_type=source_view,
payload={"format": fmt, "row_count": row_count, "file_size_bytes": file_size_bytes, **filters},
)
async def _proxy_xlsx(
client: httpx.AsyncClient,
go_path: str,
params: dict,
filename_stem: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
"""Fetch XLSX bytes from Go, write ExportRecord, return response."""
try:
r = await client.get(go_path, params=params, headers=_propagation_headers())
r.raise_for_status()
except httpx.HTTPStatusError as exc:
raise HTTPException(status_code=502, detail=f"Analytics service error: {exc.response.status_code}")
except httpx.RequestError as exc:
raise HTTPException(status_code=503, detail=f"Analytics service unavailable: {exc}")
content = r.content
row_count = int(r.headers.get("X-Row-Count", "0"))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.xlsx"
trace_id, span_id = current_span_context()
await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _record_export(pg_factory, domain, source_view, "xlsx", filters,
row_count, len(content), actor_id, trace_id, span_id),
)
return Response(
content=content, media_type=_XLSX_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
def _make_pdf(
data: list[dict],
filename_stem: str,
pdf_title: str,
domain: str,
source_view: str,
filters: dict,
actor_id: str,
pg_factory: sessionmaker[Session],
) -> Response:
with tracer.start_as_current_span(f"export.{domain}.{source_view}") as span:
span.set_attribute("export.format", "pdf")
span.set_attribute("export.row_count", len(data))
content = to_pdf_bytes(data, title=pdf_title)
span.set_attribute("export.file_size_bytes", len(content))
today = datetime.now(timezone.utc).strftime("%Y%m%d")
filename = f"{filename_stem}_{today}.pdf"
trace_id, span_id = current_span_context()
_record_export(pg_factory, domain, source_view, "pdf", filters,
len(data), len(content), actor_id, trace_id, span_id)
return Response(
content=content, media_type=_PDF_MEDIA,
headers={"Content-Disposition": f'attachment; filename="{filename}"'},
)
# ---------------------------------------------------------------------------
# KPIs
# ---------------------------------------------------------------------------
@router.get("/sales/kpis")
async def wwi_sales_kpis(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _get(request.app.state.analytics_client, "/wwi/sales/kpis")
# ---------------------------------------------------------------------------
# Stock & reorder
# ---------------------------------------------------------------------------
@router.get("/stock/recommendations")
async def wwi_reorder_recommendations(
response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/wwi/stock/recommendations")
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: (
analytics.generate_stock_events(pg_factory, data),
analytics.persist_reorder_recommendations(pg_factory, data, "api.stock.recommendations"),
),
)
return data
# ---------------------------------------------------------------------------
# Supplier scores
# ---------------------------------------------------------------------------
@router.get("/suppliers/scores")
async def wwi_supplier_scores(
response: Response, request: Request,
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
data = await _get(client, "/wwi/suppliers/scores", {"top_n": top_n})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_supplier_scores(pg_factory, data, top_n, "api.suppliers.scores"),
)
return data
# ---------------------------------------------------------------------------
# Business events
# ---------------------------------------------------------------------------
@router.get("/events")
async def wwi_business_events(
response: Response, request: Request,
limit: int = Query(default=100, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.get_business_events(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# What-if scenarios
# ---------------------------------------------------------------------------
@router.post("/scenarios")
async def wwi_create_scenario(
body: WhatIfRequest, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
result = await _post(client, "/wwi/scenarios", {
"stock_item_key": body.stock_item_key,
"demand_multiplier": body.demand_multiplier,
})
loop = asyncio.get_running_loop()
await loop.run_in_executor(
get_executor(),
lambda: analytics.persist_whatif_scenario(pg_factory, result),
)
return result
@router.get("/scenarios")
async def wwi_list_scenarios(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_whatif_scenarios(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Stored records
# ---------------------------------------------------------------------------
@router.get("/records/reorder-recommendations")
async def wwi_records_reorder(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_reorder_recommendations(pg_factory, limit=limit)
)
@router.get("/records/supplier-scores")
async def wwi_records_supplier_scores(
response: Response, request: Request,
limit: int = Query(default=settings.storage_default_limit, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.list_supplier_scores(pg_factory, limit=limit)
)
# ---------------------------------------------------------------------------
# Exports
# ---------------------------------------------------------------------------
@router.get("/export/stock-recommendations")
async def export_wwi_stock_recommendations(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
if format == "xlsx":
return await _proxy_xlsx(client, "/wwi/export/stock-recommendations", {},
"wwi_stock_recommendations", "wwi", "stock-recommendations",
{}, actor_id, pg_factory)
data = await _get(client, "/wwi/stock/recommendations")
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_stock_recommendations",
"WideWorldImporters — Stock Reorder Recommendations",
"wwi", "stock-recommendations", {}, actor_id, pg_factory),
)
@router.get("/export/supplier-scores")
async def export_wwi_supplier_scores(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
top_n: int = Query(default=settings.ranking_default_top_n, ge=3, le=100),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
client = request.app.state.analytics_client
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"top_n": top_n}
if format == "xlsx":
return await _proxy_xlsx(client, "/wwi/export/supplier-scores", filters,
"wwi_supplier_scores", "wwi", "supplier-scores",
filters, actor_id, pg_factory)
data = await _get(client, "/wwi/suppliers/scores", filters)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_supplier_scores",
"WideWorldImporters — Supplier Reliability Scores",
"wwi", "supplier-scores", filters, actor_id, pg_factory),
)
@router.get("/export/business-events")
async def export_wwi_business_events(
request: Request,
format: Literal["xlsx", "pdf"] = Query(default="xlsx"),
limit: int = Query(default=100, ge=1, le=500),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> Response:
pg_factory = request.app.state.pg_factory
actor_id = principal.subject
filters = {"limit": limit}
data = await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: analytics.get_business_events(pg_factory, limit=limit)
)
return await asyncio.get_running_loop().run_in_executor(
get_executor(),
lambda: _make_pdf(data, "wwi_business_events",
"WideWorldImporters — Business Events",
"wwi", "business-events", filters, actor_id, pg_factory),
)
# ---------------------------------------------------------------------------
# Job triggers
# ---------------------------------------------------------------------------
@router.post("/jobs/{job_name}/trigger")
async def trigger_wwi_job(
job_name: str, response: Response, request: Request,
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> dict:
response.headers.update(_trace_headers())
return await _post(request.app.state.analytics_client, f"/scheduler/wwi/{job_name}/trigger", {})
@router.get("/jobs")
async def wwi_job_history(
response: Response, request: Request,
limit: int = Query(default=50, ge=1, le=200),
principal: FrontendPrincipal = Depends(require_frontend_principal),
) -> list[dict]:
response.headers.update(_trace_headers())
pg_factory = request.app.state.pg_factory
return await asyncio.get_running_loop().run_in_executor(
get_executor(), lambda: _list_jobs(pg_factory, "wwi", limit)
)
def _list_jobs(pg_factory, domain: str, limit: int) -> list[dict]:
from app.core.audit import JobExecution
with pg_factory() as session:
rows = (
session.query(JobExecution)
.filter_by(domain=domain)
.order_by(JobExecution.started_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"job_name": r.job_name,
"domain": r.domain,
"status": r.status,
"started_at": r.started_at.isoformat(),
"completed_at": r.completed_at.isoformat() if r.completed_at else None,
"duration_ms": r.duration_ms,
"records_processed": r.records_processed,
"error_message": r.error_message,
"trace_id": r.trace_id,
}
for r in rows
]