Push the rest

This commit is contained in:
2026-05-11 10:58:46 +02:00
parent adb5c1a439
commit 0031caf16c
94 changed files with 11777 additions and 3474 deletions

View File

View File

View File

@@ -0,0 +1,258 @@
from __future__ import annotations
import logging
from opentelemetry import metrics, trace
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import append_audit
from app.domain.aw.models import AWSalesForecast, AWRepScore, AWProductDemand, AWAnomalyRun
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.domain.aw")
meter = metrics.get_meter("otel-bi.domain.aw")
_persist_counter = meter.create_counter(
"aw_persist_writes_total",
description="Number of AW PostgreSQL write operations",
)
def _current_span_context() -> tuple[str | None, str | None]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return None, None
return f"{ctx.trace_id:032x}", f"{ctx.span_id:016x}"
def _actor_type(trigger_source: str) -> str:
return "scheduler" if trigger_source.startswith("scheduler") else "api"
# ---------------------------------------------------------------------------
# Persist functions — called after Go service returns data
# ---------------------------------------------------------------------------
def persist_forecast(
factory: sessionmaker[Session],
data: list[dict],
horizon_days: int,
trigger_source: str,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(AWSalesForecast(
horizon_days=horizon_days,
point_count=len(data),
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "sales_forecast"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist AW forecast: %s", exc)
append_audit(
factory,
action="forecast.generated",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="aw",
service="otel-bi-backend",
entity_type="sales_forecast",
payload={"horizon_days": horizon_days, "point_count": len(data)},
)
def persist_rep_scores(
factory: sessionmaker[Session],
data: list[dict],
top_n: int,
trigger_source: str,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(AWRepScore(
rep_count=len(data),
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "rep_scores"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist AW rep scores: %s", exc)
append_audit(
factory,
action="scores.generated",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="aw",
service="otel-bi-backend",
entity_type="rep_scores",
payload={"rep_count": len(data), "top_n": top_n},
)
def persist_product_demand(
factory: sessionmaker[Session],
data: list[dict],
top_n: int,
trigger_source: str,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(AWProductDemand(
product_count=len(data),
top_n=top_n,
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "product_demand"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist AW product demand: %s", exc)
append_audit(
factory,
action="scores.generated",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="aw",
service="otel-bi-backend",
entity_type="product_demand",
payload={"product_count": len(data), "top_n": top_n},
)
def persist_anomaly_run(
factory: sessionmaker[Session],
data: list[dict],
trigger_source: str,
) -> None:
anomaly_count = sum(1 for p in data if p.get("is_anomaly"))
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(AWAnomalyRun(
anomaly_count=anomaly_count,
series_days=365,
window_days=30,
threshold_sigma=2.0,
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "anomaly_run"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist AW anomaly run: %s", exc)
append_audit(
factory,
action="anomaly_detection.ran",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="aw",
service="otel-bi-backend",
entity_type="anomaly_detection",
payload={"series_days": 365, "window_days": 30, "anomaly_count": anomaly_count},
)
# ---------------------------------------------------------------------------
# Read functions — query PostgreSQL for stored results
# ---------------------------------------------------------------------------
def list_forecasts(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(AWSalesForecast)
.order_by(AWSalesForecast.created_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"created_at": r.created_at.isoformat(),
"horizon_days": r.horizon_days,
"point_count": r.point_count,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
}
for r in rows
]
def list_rep_scores(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(AWRepScore)
.order_by(AWRepScore.computed_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"computed_at": r.computed_at.isoformat(),
"rep_count": r.rep_count,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
"payload": r.payload,
}
for r in rows
]
def list_product_demand(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(AWProductDemand)
.order_by(AWProductDemand.computed_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"computed_at": r.computed_at.isoformat(),
"product_count": r.product_count,
"top_n": r.top_n,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
"payload": r.payload,
}
for r in rows
]
def list_anomaly_runs(factory: sessionmaker[Session], limit: int = 20) -> list[dict]:
with factory() as session:
rows = (
session.query(AWAnomalyRun)
.order_by(AWAnomalyRun.detected_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"detected_at": r.detected_at.isoformat(),
"anomaly_count": r.anomaly_count,
"series_days": r.series_days,
"window_days": r.window_days,
"threshold_sigma": r.threshold_sigma,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
}
for r in rows
]

View File

@@ -0,0 +1,77 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import uuid4
from sqlalchemy import JSON, DateTime, Integer, String
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
def _utcnow() -> datetime:
return datetime.now(timezone.utc)
class AWBase(DeclarativeBase):
pass
class AWSalesForecast(AWBase):
"""Persisted AW sales forecast runs."""
__tablename__ = "aw_sales_forecasts"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
horizon_days: Mapped[int] = mapped_column(Integer)
point_count: Mapped[int] = mapped_column(Integer)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)
class AWRepScore(AWBase):
"""Persisted AW sales rep performance scoring runs."""
__tablename__ = "aw_rep_scores"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
computed_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
rep_count: Mapped[int] = mapped_column(Integer)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)
class AWProductDemand(AWBase):
"""Persisted AW product demand scoring runs."""
__tablename__ = "aw_product_demand"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
computed_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
product_count: Mapped[int] = mapped_column(Integer)
top_n: Mapped[int] = mapped_column(Integer)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)
class AWAnomalyRun(AWBase):
"""Persisted AW revenue anomaly detection runs."""
__tablename__ = "aw_anomaly_runs"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
detected_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
anomaly_count: Mapped[int] = mapped_column(Integer)
series_days: Mapped[int] = mapped_column(Integer)
window_days: Mapped[int] = mapped_column(Integer)
threshold_sigma: Mapped[float] = mapped_column(default=2.0)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
# Full annotated series (date, revenue, rolling_mean, lower_band, upper_band, is_anomaly, z_score)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)

View File

@@ -0,0 +1,131 @@
from __future__ import annotations
# ---------------------------------------------------------------------------
# AdventureWorksDW2022 — read-only MSSQL queries
# Each list contains fallback variants tried in order.
# ---------------------------------------------------------------------------
# Daily sales combining FactInternetSales + FactResellerSales
AW_DAILY_SALES: list[str] = [
"""
SELECT
CAST(d.FullDateAlternateKey AS date) AS sale_date,
SUM(f.SalesAmount) AS revenue,
SUM(f.TotalProductCost) AS cost,
SUM(f.OrderQuantity) AS quantity,
COUNT_BIG(*) AS orders
FROM dbo.FactInternetSales AS f
INNER JOIN dbo.DimDate AS d ON d.DateKey = f.OrderDateKey
GROUP BY CAST(d.FullDateAlternateKey AS date)
UNION ALL
SELECT
CAST(d.FullDateAlternateKey AS date) AS sale_date,
SUM(r.SalesAmount) AS revenue,
SUM(r.TotalProductCost) AS cost,
SUM(r.OrderQuantity) AS quantity,
COUNT_BIG(*) AS orders
FROM dbo.FactResellerSales AS r
INNER JOIN dbo.DimDate AS d ON d.DateKey = r.OrderDateKey
GROUP BY CAST(d.FullDateAlternateKey AS date)
ORDER BY sale_date;
""",
# Fallback: internet sales only using OrderDate column directly
"""
SELECT
CAST(OrderDate AS date) AS sale_date,
SUM(SalesAmount) AS revenue,
SUM(TotalProductCost) AS cost,
SUM(OrderQuantity) AS quantity,
COUNT_BIG(*) AS orders
FROM dbo.FactInternetSales
GROUP BY CAST(OrderDate AS date)
ORDER BY sale_date;
""",
]
# Sales rep performance — reseller sales attributed to employees
AW_REP_PERFORMANCE: list[str] = [
"""
SELECT
e.EmployeeKey AS employee_key,
e.FirstName + ' ' + e.LastName AS rep_name,
COALESCE(e.Title, 'Sales Rep') AS rep_title,
COALESCE(st.SalesTerritoryRegion, 'Unknown') AS territory,
SUM(r.SalesAmount) AS revenue,
SUM(r.TotalProductCost) AS cost,
COUNT_BIG(*) AS orders,
AVG(r.SalesAmount) AS avg_deal_size
FROM dbo.FactResellerSales AS r
INNER JOIN dbo.DimEmployee AS e
ON e.EmployeeKey = r.EmployeeKey
INNER JOIN dbo.DimSalesTerritory AS st
ON st.SalesTerritoryKey = r.SalesTerritoryKey
WHERE e.SalesPersonFlag = 1
GROUP BY
e.EmployeeKey,
e.FirstName, e.LastName,
e.Title,
st.SalesTerritoryRegion
ORDER BY revenue DESC;
""",
# Fallback without SalesPersonFlag filter
"""
SELECT
e.EmployeeKey AS employee_key,
e.FirstName + ' ' + e.LastName AS rep_name,
COALESCE(e.Title, 'Employee') AS rep_title,
'Unknown' AS territory,
SUM(r.SalesAmount) AS revenue,
SUM(r.TotalProductCost) AS cost,
COUNT_BIG(*) AS orders,
AVG(r.SalesAmount) AS avg_deal_size
FROM dbo.FactResellerSales AS r
INNER JOIN dbo.DimEmployee AS e ON e.EmployeeKey = r.EmployeeKey
GROUP BY e.EmployeeKey, e.FirstName, e.LastName, e.Title
ORDER BY revenue DESC;
""",
]
# Product demand — internet sales with full category hierarchy
AW_PRODUCT_DEMAND: list[str] = [
"""
SELECT
p.ProductAlternateKey AS product_id,
p.EnglishProductName AS product_name,
COALESCE(pc.EnglishProductCategoryName, 'Unknown') AS category,
SUM(f.SalesAmount) AS revenue,
SUM(f.TotalProductCost) AS cost,
SUM(f.OrderQuantity) AS quantity,
COUNT_BIG(*) AS orders
FROM dbo.FactInternetSales AS f
INNER JOIN dbo.DimProduct AS p
ON p.ProductKey = f.ProductKey
LEFT JOIN dbo.DimProductSubcategory AS sc
ON sc.ProductSubcategoryKey = p.ProductSubcategoryKey
LEFT JOIN dbo.DimProductCategory AS pc
ON pc.ProductCategoryKey = sc.ProductCategoryKey
GROUP BY
p.ProductAlternateKey,
p.EnglishProductName,
pc.EnglishProductCategoryName
ORDER BY revenue DESC;
""",
# Fallback: no category join
"""
SELECT
CAST(f.ProductKey AS nvarchar(50)) AS product_id,
COALESCE(p.EnglishProductName, CAST(f.ProductKey AS nvarchar(50))) AS product_name,
'Unknown' AS category,
SUM(f.SalesAmount) AS revenue,
SUM(f.TotalProductCost) AS cost,
SUM(f.OrderQuantity) AS quantity,
COUNT_BIG(*) AS orders
FROM dbo.FactInternetSales AS f
LEFT JOIN dbo.DimProduct AS p ON p.ProductKey = f.ProductKey
GROUP BY f.ProductKey, p.EnglishProductName
ORDER BY revenue DESC;
""",
]

View File

View File

@@ -0,0 +1,297 @@
from __future__ import annotations
import logging
from datetime import datetime, timedelta, timezone
from opentelemetry import metrics, trace
from sqlalchemy.orm import sessionmaker, Session
from app.core.audit import append_audit
from app.domain.wwi.models import (
WWIReorderRecommendation,
WWISupplierScore,
WWIWhatIfScenario,
WWIBusinessEvent,
)
LOGGER = logging.getLogger(__name__)
tracer = trace.get_tracer("otel-bi.domain.wwi")
meter = metrics.get_meter("otel-bi.domain.wwi")
_persist_counter = meter.create_counter(
"wwi_persist_writes_total",
description="Number of WWI PostgreSQL write operations",
)
_event_counter = meter.create_counter(
"wwi_business_events_generated_total",
description="Business events automatically generated",
)
def _current_span_context() -> tuple[str | None, str | None]:
ctx = trace.get_current_span().get_span_context()
if not ctx.is_valid:
return None, None
return f"{ctx.trace_id:032x}", f"{ctx.span_id:016x}"
def _actor_type(trigger_source: str) -> str:
return "scheduler" if trigger_source.startswith("scheduler") else "api"
# ---------------------------------------------------------------------------
# Persist functions — called after Go service returns data
# ---------------------------------------------------------------------------
def persist_reorder_recommendations(
factory: sessionmaker[Session],
data: list[dict],
trigger_source: str,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(WWIReorderRecommendation(
item_count=len(data),
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "reorder_recommendations"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist WWI reorder recommendations: %s", exc)
append_audit(
factory,
action="recommendations.generated",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="wwi",
service="otel-bi-backend",
entity_type="reorder_recommendations",
payload={"item_count": len(data)},
)
def persist_supplier_scores(
factory: sessionmaker[Session],
data: list[dict],
top_n: int,
trigger_source: str,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(WWISupplierScore(
supplier_count=len(data),
top_n=top_n,
trigger_source=trigger_source,
trace_id=trace_id,
span_id=span_id,
payload=data,
))
session.commit()
_persist_counter.add(1, {"entity": "supplier_scores"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist WWI supplier scores: %s", exc)
append_audit(
factory,
action="scores.generated",
actor_type=_actor_type(trigger_source),
actor_id=trigger_source,
domain="wwi",
service="otel-bi-backend",
entity_type="supplier_scores",
payload={"supplier_count": len(data), "top_n": top_n},
)
def persist_whatif_scenario(
factory: sessionmaker[Session],
result: dict,
) -> None:
trace_id, span_id = _current_span_context()
try:
with factory() as session:
session.add(WWIWhatIfScenario(
stock_item_key=result["stock_item_key"],
stock_item_name=result["stock_item_name"],
demand_multiplier=result["demand_multiplier"],
current_stock=result["current_stock"],
avg_daily_demand=result["adjusted_daily_demand"],
projected_days_until_stockout=result.get("projected_days_until_stockout"),
recommended_order_qty=float(result["recommended_order_qty"]),
trace_id=trace_id,
span_id=span_id,
result=result,
))
session.commit()
_persist_counter.add(1, {"entity": "whatif_scenario"})
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist WWI what-if scenario: %s", exc)
append_audit(
factory,
action="scenario.submitted",
actor_type="user",
domain="wwi",
service="otel-bi-backend",
entity_type="whatif_scenario",
payload={
"stock_item_key": result["stock_item_key"],
"demand_multiplier": result["demand_multiplier"],
"projected_days_until_stockout": result.get("projected_days_until_stockout"),
},
)
# ---------------------------------------------------------------------------
# Business events — generated from reorder data in Python (PostgreSQL writes)
# ---------------------------------------------------------------------------
def generate_stock_events(
factory: sessionmaker[Session],
recommendations: list[dict],
) -> None:
"""Write LOW_STOCK events for HIGH-urgency items, deduplicating within 24h."""
trace_id, span_id = _current_span_context()
cutoff = datetime.now(timezone.utc) - timedelta(hours=24)
try:
with factory() as session:
for item in recommendations:
if item.get("urgency") != "HIGH":
continue
entity_key = str(item["stock_item_key"])
existing = (
session.query(WWIBusinessEvent)
.filter(
WWIBusinessEvent.event_type == "LOW_STOCK",
WWIBusinessEvent.entity_key == entity_key,
WWIBusinessEvent.occurred_at >= cutoff,
)
.first()
)
if existing:
continue
days_str = (
f"{item['days_until_stockout']:.1f} days"
if item.get("days_until_stockout") is not None
else "immediately"
)
session.add(WWIBusinessEvent(
event_type="LOW_STOCK",
severity="HIGH",
entity_key=entity_key,
entity_name=item["stock_item_name"],
message=(
f"Stock for '{item['stock_item_name']}' will be exhausted in {days_str}. "
f"Current stock: {item['current_stock']:.0f} units, "
f"daily demand: {item['avg_daily_demand']:.1f} units."
),
trace_id=trace_id,
span_id=span_id,
details={
"current_stock": item["current_stock"],
"avg_daily_demand": item["avg_daily_demand"],
"recommended_reorder_qty": item["recommended_reorder_qty"],
},
))
_event_counter.add(1, {"event_type": "LOW_STOCK"})
session.commit()
except Exception as exc: # noqa: BLE001
LOGGER.warning("Failed to persist WWI business events: %s", exc)
# ---------------------------------------------------------------------------
# Read functions — query PostgreSQL for stored results
# ---------------------------------------------------------------------------
def get_business_events(factory: sessionmaker[Session], limit: int = 100) -> list[dict]:
with tracer.start_as_current_span("wwi.analytics.business_events"):
with factory() as session:
rows = (
session.query(WWIBusinessEvent)
.order_by(WWIBusinessEvent.occurred_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"occurred_at": r.occurred_at.isoformat(),
"event_type": r.event_type,
"severity": r.severity,
"entity_key": r.entity_key,
"entity_name": r.entity_name,
"message": r.message,
"trace_id": r.trace_id,
"details": r.details,
}
for r in rows
]
def list_reorder_recommendations(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(WWIReorderRecommendation)
.order_by(WWIReorderRecommendation.created_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"created_at": r.created_at.isoformat(),
"item_count": r.item_count,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
}
for r in rows
]
def list_supplier_scores(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(WWISupplierScore)
.order_by(WWISupplierScore.computed_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"computed_at": r.computed_at.isoformat(),
"supplier_count": r.supplier_count,
"top_n": r.top_n,
"trigger_source": r.trigger_source,
"trace_id": r.trace_id,
"payload": r.payload,
}
for r in rows
]
def list_whatif_scenarios(factory: sessionmaker[Session], limit: int = 50) -> list[dict]:
with factory() as session:
rows = (
session.query(WWIWhatIfScenario)
.order_by(WWIWhatIfScenario.created_at.desc())
.limit(limit)
.all()
)
return [
{
"id": r.id,
"created_at": r.created_at.isoformat(),
"stock_item_key": r.stock_item_key,
"stock_item_name": r.stock_item_name,
"demand_multiplier": r.demand_multiplier,
"projected_days_until_stockout": r.projected_days_until_stockout,
"recommended_order_qty": r.recommended_order_qty,
"result": r.result,
}
for r in rows
]

View File

@@ -0,0 +1,80 @@
from __future__ import annotations
from datetime import datetime, timezone
from uuid import uuid4
from sqlalchemy import JSON, DateTime, Float, Integer, String, Text
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column
def _utcnow() -> datetime:
return datetime.now(timezone.utc)
class WWIBase(DeclarativeBase):
pass
class WWIReorderRecommendation(WWIBase):
"""Persisted WWI stock reorder recommendation runs."""
__tablename__ = "wwi_reorder_recommendations"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
item_count: Mapped[int] = mapped_column(Integer)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)
class WWISupplierScore(WWIBase):
"""Persisted WWI supplier reliability scoring runs."""
__tablename__ = "wwi_supplier_scores"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
computed_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
supplier_count: Mapped[int] = mapped_column(Integer)
top_n: Mapped[int] = mapped_column(Integer)
trigger_source: Mapped[str] = mapped_column(String(64), index=True)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
payload: Mapped[list[dict]] = mapped_column(JSON, default=list)
class WWIWhatIfScenario(WWIBase):
"""User-submitted what-if simulation results."""
__tablename__ = "wwi_whatif_scenarios"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
stock_item_key: Mapped[int] = mapped_column(Integer, index=True)
stock_item_name: Mapped[str] = mapped_column(String(200))
demand_multiplier: Mapped[float] = mapped_column(Float)
current_stock: Mapped[float] = mapped_column(Float)
avg_daily_demand: Mapped[float] = mapped_column(Float)
projected_days_until_stockout: Mapped[float | None] = mapped_column(Float, nullable=True)
recommended_order_qty: Mapped[float] = mapped_column(Float)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
result: Mapped[dict] = mapped_column(JSON, default=dict)
class WWIBusinessEvent(WWIBase):
"""Automatically generated business alert events."""
__tablename__ = "wwi_business_events"
id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: str(uuid4()))
occurred_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), default=_utcnow, index=True)
event_type: Mapped[str] = mapped_column(String(50), index=True) # LOW_STOCK, ORDER_DROP, SUPPLIER_RISK
severity: Mapped[str] = mapped_column(String(20), index=True) # HIGH, MEDIUM, LOW
entity_key: Mapped[str] = mapped_column(String(100), index=True)
entity_name: Mapped[str] = mapped_column(String(200))
message: Mapped[str] = mapped_column(Text)
trace_id: Mapped[str | None] = mapped_column(String(32), nullable=True, index=True)
span_id: Mapped[str | None] = mapped_column(String(16), nullable=True)
details: Mapped[dict] = mapped_column(JSON, default=dict)

View File

@@ -0,0 +1,171 @@
from __future__ import annotations
# ---------------------------------------------------------------------------
# WideWorldImportersDW — read-only MSSQL queries
#
# Column names in this DW use spaces and require bracket notation.
# Each list contains fallback variants tried in order.
# ---------------------------------------------------------------------------
# Daily sales from Fact.Sale joined to Dimension.Date
WWI_DAILY_SALES: list[str] = [
"""
SELECT
d.[Date] AS sale_date,
SUM(s.[Total Excluding Tax]) AS revenue,
SUM(s.[Total Excluding Tax] - s.[Profit]) AS cost,
SUM(CAST(s.[Quantity] AS FLOAT)) AS quantity,
COUNT_BIG(*) AS orders
FROM [Fact].[Sale] AS s
INNER JOIN [Dimension].[Date] AS d
ON d.[Date Key] = s.[Delivery Date Key]
GROUP BY d.[Date]
ORDER BY d.[Date];
""",
# Fallback: use Invoice Date Key if Delivery Date Key is missing
"""
SELECT
d.[Date] AS sale_date,
SUM(s.[Total Excluding Tax]) AS revenue,
SUM(s.[Total Excluding Tax] - s.[Profit]) AS cost,
SUM(CAST(s.[Quantity] AS FLOAT)) AS quantity,
COUNT_BIG(*) AS orders
FROM [Fact].[Sale] AS s
INNER JOIN [Dimension].[Date] AS d
ON d.[Date Key] = s.[Invoice Date Key]
GROUP BY d.[Date]
ORDER BY d.[Date];
""",
]
# Current stock levels per stock item (net movement quantity)
WWI_STOCK_LEVELS: list[str] = [
"""
SELECT
si.[Stock Item Key] AS stock_item_key,
si.[Stock Item] AS stock_item_name,
si.[Unit Price] AS unit_price,
si.[Lead Time Days] AS lead_time_days,
SUM(CAST(m.[Quantity] AS FLOAT)) AS current_stock
FROM [Dimension].[Stock Item] AS si
LEFT JOIN [Fact].[Movement] AS m
ON m.[Stock Item Key] = si.[Stock Item Key]
WHERE si.[Stock Item Key] <> 0
GROUP BY
si.[Stock Item Key],
si.[Stock Item],
si.[Unit Price],
si.[Lead Time Days];
""",
# Fallback: without movement (returns 0 stock)
"""
SELECT
si.[Stock Item Key] AS stock_item_key,
si.[Stock Item] AS stock_item_name,
si.[Unit Price] AS unit_price,
si.[Lead Time Days] AS lead_time_days,
CAST(0 AS FLOAT) AS current_stock
FROM [Dimension].[Stock Item] AS si
WHERE si.[Stock Item Key] <> 0;
""",
]
# 90-day demand velocity per stock item from Fact.Sale
WWI_DEMAND_VELOCITY: list[str] = [
"""
SELECT
s.[Stock Item Key] AS stock_item_key,
SUM(CAST(s.[Quantity] AS FLOAT)) AS qty_sold_90d,
COUNT_BIG(DISTINCT s.[WWI Invoice ID]) AS invoice_count_90d
FROM [Fact].[Sale] AS s
INNER JOIN [Dimension].[Date] AS d
ON d.[Date Key] = s.[Delivery Date Key]
WHERE d.[Date] >= DATEADD(day, -90, GETDATE())
AND s.[Stock Item Key] <> 0
GROUP BY s.[Stock Item Key];
""",
"""
SELECT
s.[Stock Item Key] AS stock_item_key,
SUM(CAST(s.[Quantity] AS FLOAT)) AS qty_sold_90d,
COUNT_BIG(DISTINCT s.[WWI Invoice ID]) AS invoice_count_90d
FROM [Fact].[Sale] AS s
INNER JOIN [Dimension].[Date] AS d
ON d.[Date Key] = s.[Invoice Date Key]
WHERE d.[Date] >= DATEADD(day, -90, GETDATE())
AND s.[Stock Item Key] <> 0
GROUP BY s.[Stock Item Key];
""",
]
# Supplier reliability data from Fact.Purchase
WWI_SUPPLIER_PERFORMANCE: list[str] = [
"""
SELECT
sup.[Supplier Key] AS supplier_key,
sup.[Supplier] AS supplier_name,
sup.[Category] AS category,
COUNT_BIG(*) AS total_orders,
SUM(CAST(p.[Ordered Outers] AS FLOAT)) AS total_ordered_outers,
SUM(CAST(p.[Received Outers] AS FLOAT)) AS total_received_outers,
SUM(CASE WHEN p.[Is Order Finalized] = 1 THEN 1 ELSE 0 END) AS finalized_orders
FROM [Dimension].[Supplier] AS sup
INNER JOIN [Fact].[Purchase] AS p
ON p.[Supplier Key] = sup.[Supplier Key]
WHERE sup.[Supplier Key] <> 0
GROUP BY
sup.[Supplier Key],
sup.[Supplier],
sup.[Category]
ORDER BY total_orders DESC;
""",
# Fallback: without Is Order Finalized
"""
SELECT
sup.[Supplier Key] AS supplier_key,
sup.[Supplier] AS supplier_name,
sup.[Category] AS category,
COUNT_BIG(*) AS total_orders,
SUM(CAST(p.[Ordered Outers] AS FLOAT)) AS total_ordered_outers,
SUM(CAST(p.[Received Outers] AS FLOAT)) AS total_received_outers,
COUNT_BIG(*) AS finalized_orders
FROM [Dimension].[Supplier] AS sup
INNER JOIN [Fact].[Purchase] AS p
ON p.[Supplier Key] = sup.[Supplier Key]
WHERE sup.[Supplier Key] <> 0
GROUP BY
sup.[Supplier Key],
sup.[Supplier],
sup.[Category]
ORDER BY total_orders DESC;
""",
]
# Single stock item detail for what-if scenario computation
WWI_STOCK_ITEM_DETAIL = """
SELECT
si.[Stock Item Key] AS stock_item_key,
si.[Stock Item] AS stock_item_name,
si.[Unit Price] AS unit_price,
si.[Lead Time Days] AS lead_time_days,
COALESCE(SUM(CAST(m.[Quantity] AS FLOAT)), 0) AS current_stock
FROM [Dimension].[Stock Item] AS si
LEFT JOIN [Fact].[Movement] AS m
ON m.[Stock Item Key] = si.[Stock Item Key]
WHERE si.[Stock Item Key] = :stock_item_key
GROUP BY
si.[Stock Item Key],
si.[Stock Item],
si.[Unit Price],
si.[Lead Time Days];
"""
WWI_STOCK_ITEM_DEMAND = """
SELECT
SUM(CAST(s.[Quantity] AS FLOAT)) / NULLIF(90.0, 0) AS avg_daily_demand
FROM [Fact].[Sale] AS s
INNER JOIN [Dimension].[Date] AS d
ON d.[Date Key] = s.[Delivery Date Key]
WHERE s.[Stock Item Key] = :stock_item_key
AND d.[Date] >= DATEADD(day, -90, GETDATE());
"""