from sqlalchemy.orm import Session
from typing import List, Optional
from datetime import datetime, date
from sqlalchemy import func
from . import models
from ..products.models import Product, Location
import logging
import pandas as pd

logger = logging.getLogger(__name__)


# Sales Order Controllers
def get_sales_orders(db: Session, skip: int = 0, limit: int = 100) -> List[models.SalesOrder]:
    """Get sales orders with pagination"""
    return db.query(models.SalesOrder).offset(skip).limit(limit).all()


# Sales Order Line Controllers
def get_sales_order_lines(db: Session, skip: int = 0, limit: int = 100) -> List[models.SalesOrderLine]:
    """Get sales order lines with pagination"""
    return db.query(models.SalesOrderLine).offset(skip).limit(limit).all()


# Inventory Batch Controllers
def get_inventory_batches(db: Session, skip: int = 0, limit: int = 100) -> List[models.InventoryBatch]:
    """Get inventory batches with pagination"""
    return db.query(models.InventoryBatch).offset(skip).limit(limit).all()


# Inventory Movement Controllers
def get_inventory_movements(db: Session, skip: int = 0, limit: int = 100) -> List[models.InventoryMovement]:
    """Get inventory movements with pagination"""
    return db.query(models.InventoryMovement).offset(skip).limit(limit).all()


# Purchase Order Controllers
def get_purchase_orders(db: Session, skip: int = 0, limit: int = 100) -> List[models.PurchaseOrder]:
    """Get purchase orders with pagination"""
    return db.query(models.PurchaseOrder).offset(skip).limit(limit).all()


# Purchase Order Line Controllers
def get_purchase_order_lines(db: Session, skip: int = 0, limit: int = 100) -> List[models.PurchaseOrderLine]:
    """Get purchase order lines with pagination"""
    return db.query(models.PurchaseOrderLine).offset(skip).limit(limit).all()


# Daily Sales Controllers
def get_daily_sales(db: Session, date: Optional[str] = None, company_id: Optional[int] = None, page: int = 1, perpage: int = 10) -> dict:
    """Get daily sales with pagination and joined product/store information. If date is provided, returns sales for that specific date. If company_id is provided, filters by company."""
    # Create query with joins
    query = db.query(
        models.DailySales,
        Product.product_name,
        Product.brand_name,
        Location.location_name
    ).join(
        Product, models.DailySales.product_id == Product.product_id
    ).outerjoin(
        Location, models.DailySales.location_id == Location.location_id
    )
    
    if date:
        try:
            # Parse the date string (YYYY-MM-DD format)
            target_date = datetime.strptime(date, "%Y-%m-%d").date()
            # Filter by date (comparing only the date part)
            query = query.filter(func.date(models.DailySales.sale_date) == target_date)
        except ValueError:
            # If date parsing fails, return empty list or raise exception
            # For now, we'll just ignore the invalid date and return all records
            pass
    
    if company_id is not None:
        query = query.filter(models.DailySales.company_id == company_id)
    
    # Get total count
    total = query.count()
    
    # Apply pagination
    offset = (page - 1) * perpage
    results = query.offset(offset).limit(perpage).all()
    
    # Transform results to include joined data
    data = []
    for daily_sale, product_name, brand_name, location_name in results:
        daily_sale_dict = {
            "id": daily_sale.id,
            "company_id": daily_sale.company_id,
            "product_id": daily_sale.product_id,
            "location_id": daily_sale.location_id,
            "sale_date": daily_sale.sale_date,
            "quantity_sold": daily_sale.quantity_sold,
            "total_amount": daily_sale.total_amount,
            "created_at": daily_sale.created_at,
            "updated_at": daily_sale.updated_at,
            "product_name": product_name,
            "brand_name": brand_name,
            "location_name": location_name
        }
        data.append(daily_sale_dict)
    
    return {
        "data": data,
        "page": page,
        "perpage": perpage,
        "total": total
    }


# Service Level Controllers
def get_service_level(db: Session, date: Optional[str] = None, page: int = 1, perpage: int = 10) -> dict:
    """Get service level data with pagination and joined product/location information. If date is provided, returns service level for that specific date."""
    # Create query with joins
    query = db.query(
        models.ServiceLevelDaily,
        Product.product_name,
        Product.brand_name,
        Location.location_name
    ).join(
        Product, models.ServiceLevelDaily.product_id == Product.product_id
    ).outerjoin(
        Location, models.ServiceLevelDaily.location_id == Location.location_id
    )
    
    if date:
        try:
            # Parse the date string (YYYY-MM-DD format)
            target_date = datetime.strptime(date, "%Y-%m-%d").date()
            # Filter by date
            query = query.filter(models.ServiceLevelDaily.date == target_date)
        except ValueError:
            # If date parsing fails, ignore the invalid date and return all records
            pass
    
    # Get total count
    total = query.count()
    
    # Apply pagination
    offset = (page - 1) * perpage
    results = query.offset(offset).limit(perpage).all()
    
    # Transform results to include joined data
    data = []
    for service_level, product_name, brand_name, location_name in results:
        service_level_dict = {
            "id": service_level.id,
            "date": service_level.date,
            "company_id": service_level.company_id,
            "location_id": service_level.location_id,
            "product_id": service_level.product_id,
            "demand_qty": service_level.demand_qty,
            "fulfilled_qty": service_level.fulfilled_qty,
            "lost_sales_qty": service_level.lost_sales_qty,
            "service_level": service_level.service_level,
            "created_at": service_level.created_at,
            "product_name": product_name,
            "brand_name": brand_name,
            "location_name": location_name
        }
        data.append(service_level_dict)
    
    return {
        "data": data,
        "page": page,
        "perpage": perpage,
        "total": total
    }


# Inventory Snapshot Controllers
def get_inventory_snapshots(db: Session, date: Optional[str] = None, page: int = 1, perpage: int = 10) -> dict:
    """Get inventory snapshot data with pagination and joined product/location information. If date is provided, returns snapshots for that specific date."""
    # Create query with joins
    query = db.query(
        models.InventorySnapshotDaily,
        Product.product_name,
        Product.brand_name,
        Location.location_name
    ).join(
        Product, models.InventorySnapshotDaily.product_id == Product.product_id
    ).outerjoin(
        Location, models.InventorySnapshotDaily.location_id == Location.location_id
    )
    
    if date:
        try:
            # Parse the date string (YYYY-MM-DD format)
            target_date = datetime.strptime(date, "%Y-%m-%d").date()
            # Filter by date
            query = query.filter(models.InventorySnapshotDaily.snapshot_date == target_date)
        except ValueError:
            # If date parsing fails, ignore the invalid date and return all records
            pass
    
    # Get total count
    total = query.count()
    
    # Apply pagination
    offset = (page - 1) * perpage
    results = query.offset(offset).limit(perpage).all()
    
    # Transform results to include joined data
    data = []
    for snapshot, product_name, brand_name, location_name in results:
        snapshot_dict = {
            "id": snapshot.id,
            "snapshot_date": snapshot.snapshot_date,
            "company_id": snapshot.company_id,
            "location_id": snapshot.location_id,
            "product_id": snapshot.product_id,
            "on_hand_qty": snapshot.on_hand_qty,
            "inbound_qty": snapshot.inbound_qty,
            "outbound_qty": snapshot.outbound_qty,
            "created_at": snapshot.created_at,
            "product_name": product_name,
            "brand_name": brand_name,
            "location_name": location_name
        }
        data.append(snapshot_dict)
    
    return {
        "data": data,
        "page": page,
        "perpage": perpage,
        "total": total
    }


# Inventory Planning Controllers
def get_inventory_planning(db: Session, page: int = 1, perpage: int = 100, company_id: Optional[int] = None, location_id: Optional[int] = None, product_id: Optional[int] = None, status: Optional[str] = None) -> dict:
    """Get inventory planning data with stock levels, reorder points, and status."""
    from ..products.models import Product, ProductLocation
    from . import models
    from sqlalchemy import func, desc
    
    # 1. Base query: ProductLocation (all active products at locations)
    # We join with Product to get details
    query = db.query(
        ProductLocation.company_id,
        ProductLocation.location_id,
        ProductLocation.product_id,
        Product.product_name,
        # Product.sku, # Assuming SKU exists or we use product_id/name
    ).join(
        Product, ProductLocation.product_id == Product.product_id
    )

    if company_id:
        query = query.filter(ProductLocation.company_id == company_id)
    if location_id:
        query = query.filter(ProductLocation.location_id == location_id)
    if product_id:
        query = query.filter(ProductLocation.product_id == product_id)

    # Pagination for the base list
    total = query.count()
    query = query.offset((page - 1) * perpage).limit(perpage)
    product_locs = query.all()

    results = []
    
    # TODO: Optimize this N+1 query pattern if performance becomes an issue
    # For now, it's safer to query details per item to ensure correct joins
    
    for pl in product_locs:
        c_id = pl.company_id
        l_id = pl.location_id
        p_id = pl.product_id
        p_name = pl.product_name
        
        # 2. Get current on_hand_qty
        on_hand = db.query(func.sum(models.InventoryBatch.quantity_on_hand)).filter(
            models.InventoryBatch.company_id == c_id,
            models.InventoryBatch.location_id == l_id,
            models.InventoryBatch.product_id == p_id,
            models.InventoryBatch.status == 'active' # Assuming 'active' enum value
        ).scalar() or 0.0

        # 3. Get latest slow mover status
        # We want the latest snapshot for this item
        sm_snapshot = db.query(models.SlowMoverSnapshot).filter(
            models.SlowMoverSnapshot.company_id == c_id,
            models.SlowMoverSnapshot.location_id == l_id,
            models.SlowMoverSnapshot.product_id == p_id
        ).order_by(desc(models.SlowMoverSnapshot.snapshot_date)).first()

        is_slow = False
        severity = None
        if sm_snapshot and sm_snapshot.is_slow_mover:
            is_slow = True
            severity = sm_snapshot.slow_mover_severity

        # 4. Static Reorder Values (TODO: Make dynamic)
        reorder_point = 0
        reorder_qty = 0

        # 5. Determine Status
        item_status = "in_stock"
        
        if on_hand == 0:
            item_status = "out_of_stock"
        elif on_hand <= reorder_point:
            item_status = "low_stock"
        
        # Override if slow mover (optional: depends on priority. usually dead stock is more important than low stock?)
        # Let's say Dead Stock > Out of Stock > Low Stock > Slow Mover > In Stock
        # Or maybe Slow Mover is a separate flag? 
        # The user asked for "status" field. Let's prioritize:
        
        if severity == "dead":
            item_status = "dead_stock"
        elif severity == "slow":
             # If it's slow but also out of stock, it's effectively dead/gone. 
             # If it's slow and we have stock, it's a slow mover.
             if on_hand > 0:
                 item_status = "slow_mover"
        
        # Filter by status if requested
        if status and item_status != status:
            continue

        results.append({
            "company_id": c_id,
            "location_id": l_id,
            "product_id": p_id,
            "product_name": p_name,
            "on_hand_qty": on_hand,
            "reorder_point": reorder_point,
            "reorder_qty": reorder_qty,
            "status": item_status,
            "slow_mover_severity": severity
        })

    # Note: Pagination was applied to ProductLocation, but post-filtering by status 
    # might reduce the page size. This is a known trade-off with computed fields.
    # For a true paginated filtered view, we'd need a complex SQL query.
    
    return {
        "data": results,
        "page": page,
        "perpage": perpage,
        "total": total # Total products, not filtered total
    }


# Slow Mover Controllers
def get_slow_movers(db: Session, page: int = 1, perpage: int = 100, date: Optional[str] = None, company_id: Optional[int] = None, location_id: Optional[int] = None, product_id: Optional[int] = None, is_slow_mover: Optional[bool] = None) -> dict:
    """Get slow mover snapshots with pagination and filters."""
    from ..products.models import Product, Location, Company
    from . import models
    from datetime import date as date_type
    
    query = db.query(
        models.SlowMoverSnapshot,
        Product.product_name,
        Product.product_id.label("product_sku"), # Assuming product_id is used as SKU or similar identifier, or just mapping product_name
        Location.location_name,
        Company.company_name
    ).join(
        Product, models.SlowMoverSnapshot.product_id == Product.product_id
    ).join(
        Location, models.SlowMoverSnapshot.location_id == Location.location_id
    ).join(
        Company, models.SlowMoverSnapshot.company_id == Company.company_id
    )
    
    # Default to today's date if not provided
    if date:
        query = query.filter(models.SlowMoverSnapshot.snapshot_date == date)
    else:
        query = query.filter(models.SlowMoverSnapshot.snapshot_date == date_type.today())
    
    if company_id:
        query = query.filter(models.SlowMoverSnapshot.company_id == company_id)
        
    if location_id:
        query = query.filter(models.SlowMoverSnapshot.location_id == location_id)
        
    if product_id:
        query = query.filter(models.SlowMoverSnapshot.product_id == product_id)
        
    if is_slow_mover is not None:
        query = query.filter(models.SlowMoverSnapshot.is_slow_mover == is_slow_mover)
        
    # Total count for pagination
    total = query.count()
    
    # Pagination
    query = query.offset((page - 1) * perpage).limit(perpage)
    
    results = query.all()
    
    # Return raw data, let the router/schema handle transformation
    mapped_results = []
    for row in results:
        snapshot, product_name, product_sku, location_name, company_name = row
        
        # Create simple flat structure
        item_data = {
            "id": snapshot.id,
            "snapshot_date": snapshot.snapshot_date,
            "company_id": snapshot.company_id,
            "company_name": company_name,
            "location_id": snapshot.location_id,
            "location_name": location_name,
            "product_id": snapshot.product_id,
            "product_name": product_name,
            "product_sku": str(product_sku),
            "on_hand_qty": snapshot.on_hand_qty,
            "total_sold_90d": snapshot.total_sold_90d,
            "ads_90d": int(snapshot.ads_90d) if snapshot.ads_90d is not None else None,
            "doh_90d": int(snapshot.doh_90d) if snapshot.doh_90d is not None else None,
            "days_since_last_sale": snapshot.days_since_last_sale,
            "is_slow_mover": snapshot.is_slow_mover,
            "slow_mover_severity": snapshot.slow_mover_severity,
            "slow_mover_reason": snapshot.slow_mover_reason,
            "created_at": snapshot.created_at
        }
        
        mapped_results.append(item_data)
    
    return {
        "items": mapped_results,
        "page": page,
        "perpage": perpage,
        "total": total
    }
    
def get_analytics_overview(
    db: Session, 
    date: Optional[str] = None, 
    company_id: Optional[int] = None, 
    location_id: Optional[int] = None, 
    product_id: Optional[int] = None
) -> dict:
    """Get slow mover snapshots with pagination and filters."""
    from ..products.models import Product, Location, Company
    from . import models
    from datetime import date as date_class
    
    # Use today's date if not provided
    if not date:
        date = date_class.today().strftime("%Y-%m-%d")
    
    query = db.query(
        models.SlowMoverSnapshot
    ).filter(
        models.SlowMoverSnapshot.is_slow_mover == True
    )
    
    query = query.filter(models.SlowMoverSnapshot.snapshot_date == date)
    
    if company_id:
        query = query.filter(models.SlowMoverSnapshot.company_id == company_id)
        
    if location_id:
        query = query.filter(models.SlowMoverSnapshot.location_id == location_id)
        
    if product_id:
        query = query.filter(models.SlowMoverSnapshot.product_id == product_id)
        
    # Total count for pagination
    slow_mover_count = query.count()
    
    query = db.query(
        models.InventoryPlanningSnapshot
    )
    
    query = query.filter(models.InventoryPlanningSnapshot.snapshot_date == date)
    
    if company_id:
        query = query.filter(models.InventoryPlanningSnapshot.company_id == company_id)
        
    if location_id:
        query = query.filter(models.InventoryPlanningSnapshot.location_id == location_id)
        
    if product_id:
        query = query.filter(models.InventoryPlanningSnapshot.product_id == product_id)
    
    rows = query.all()
    
    over_stock_count = 0
    under_stock_count = 0
    for row in rows:
        if row.stock_status == 'Overstocked':
            over_stock_count += 1
        elif row.stock_status == 'Understocked':
            under_stock_count += 1    
    
    return {
        "slow_mover_count": slow_mover_count,
        "over_stock_count": over_stock_count,
        "under_stock_count": under_stock_count,
    }


def get_kpi_summary(
    db: Session, 
    company_id: int,
    date: Optional[str] = None, 
    location_id: Optional[int] = None, 
) -> dict:
    """
    Get KPI summary for frontend dashboard cards.
    
    Returns:
        - avg_service_level: Average service level across all products/locations
        - stockouts_count: Number of products currently out of stock
        - overstock_count: Number of overstocked products
        - understock_count: Number of understocked products
        - slow_mover_count: Number of slow-moving products
        - total_products_tracked: Total number of products being tracked
        - avg_days_on_hand: Average days of inventory on hand (stock turns indicator)
    """
    from . import models
    from sqlalchemy import func
    
    # Parse date if provided, otherwise use latest available data
    target_date = None
    if date:
        try:
            target_date = datetime.strptime(date, "%Y-%m-%d").date()
        except ValueError:
            pass
    
    # 1. Calculate Average Service Level
    service_level_query = db.query(
        func.avg(models.ServiceLevelDaily.service_level)
    ).filter(
        models.ServiceLevelDaily.company_id == company_id
    )
    
    if target_date:
        service_level_query = service_level_query.filter(
            models.ServiceLevelDaily.date == target_date
        )
    else:
        # Get the latest date's data
        latest_date = db.query(func.max(models.ServiceLevelDaily.date)).filter(
            models.ServiceLevelDaily.company_id == company_id
        ).scalar()
        if latest_date:
            service_level_query = service_level_query.filter(
                models.ServiceLevelDaily.date == latest_date
            )
    
    if location_id:
        service_level_query = service_level_query.filter(
            models.ServiceLevelDaily.location_id == location_id
        )
    
    avg_service_level = service_level_query.scalar() or 0.0
    
    # 2. Get Inventory Planning Snapshot data for stock status counts
    planning_query = db.query(models.InventoryPlanningSnapshot).filter(
        models.InventoryPlanningSnapshot.company_id == company_id
    )
    
    # Track the effective snapshot date for the response
    latest_snapshot_date = None
    
    if target_date:
        planning_query = planning_query.filter(
            models.InventoryPlanningSnapshot.snapshot_date == target_date
        )
        latest_snapshot_date = target_date
    else:
        # Get the latest snapshot date
        latest_snapshot_date = db.query(
            func.max(models.InventoryPlanningSnapshot.snapshot_date)
        ).filter(
            models.InventoryPlanningSnapshot.company_id == company_id
        ).scalar()
        if latest_snapshot_date:
            planning_query = planning_query.filter(
                models.InventoryPlanningSnapshot.snapshot_date == latest_snapshot_date
            )
    
    if location_id:
        planning_query = planning_query.filter(
            models.InventoryPlanningSnapshot.location_id == location_id
        )
    
    planning_rows = planning_query.all()
    
    overstock_count = 0
    understock_count = 0
    stockouts_count = 0
    total_products_tracked = len(planning_rows)
    
    for row in planning_rows:
        if row.stock_status == 'Overstocked':
            overstock_count += 1
        elif row.stock_status == 'Understocked':
            understock_count += 1
        # Count stockouts (out of stock = on_hand_qty is 0 or below reorder point with no stock)
        if row.on_hand_qty <= 0:
            stockouts_count += 1
    
    # 3. Get Slow Mover count
    slow_mover_query = db.query(func.count(models.SlowMoverSnapshot.id)).filter(
        models.SlowMoverSnapshot.company_id == company_id,
        models.SlowMoverSnapshot.is_slow_mover == True
    )
    
    if target_date:
        slow_mover_query = slow_mover_query.filter(
            models.SlowMoverSnapshot.snapshot_date == target_date
        )
    else:
        # Get the latest snapshot date
        latest_slow_mover_date = db.query(
            func.max(models.SlowMoverSnapshot.snapshot_date)
        ).filter(
            models.SlowMoverSnapshot.company_id == company_id
        ).scalar()
        if latest_slow_mover_date:
            slow_mover_query = slow_mover_query.filter(
                models.SlowMoverSnapshot.snapshot_date == latest_slow_mover_date
            )
    
    if location_id:
        slow_mover_query = slow_mover_query.filter(
            models.SlowMoverSnapshot.location_id == location_id
        )
    
    slow_mover_count = slow_mover_query.scalar() or 0
    
    # 4. Calculate Average Days on Hand (indicator for stock turns)
    doh_query = db.query(
        func.avg(models.SlowMoverSnapshot.doh_90d)
    ).filter(
        models.SlowMoverSnapshot.company_id == company_id
    )
    
    if target_date:
        doh_query = doh_query.filter(
            models.SlowMoverSnapshot.snapshot_date == target_date
        )
    else:
        if latest_slow_mover_date:
            doh_query = doh_query.filter(
                models.SlowMoverSnapshot.snapshot_date == latest_slow_mover_date
            )
    
    if location_id:
        doh_query = doh_query.filter(
            models.SlowMoverSnapshot.location_id == location_id
        )
    
    avg_days_on_hand = doh_query.scalar()
    if avg_days_on_hand is not None:
        avg_days_on_hand = float(avg_days_on_hand)
    
    return {
        "snapshot_date": latest_snapshot_date,
        "avg_service_level": float(avg_service_level),
        "stockouts_count": stockouts_count,
        "overstock_count": overstock_count,
        "understock_count": understock_count,
        "slow_mover_count": slow_mover_count,
        "total_products_tracked": total_products_tracked,
        "avg_days_on_hand": avg_days_on_hand,
    }

    
def get_analytics_stock_vs_demand(
    db: Session, 
    company_id: int,
    location_id: Optional[int] = None, 
    product_id: Optional[int] = None,
    month_from: Optional[str] = None,
    month_to: Optional[str] = None
) -> dict:
    """Get month-wise stock vs demand analysis.
    
    Returns aggregated data by month:
    - avg_daily_demand: Average of avg_daily_demand for all dates in that month
    - available_stock: Average of available_stock for all dates in that month
    
    Args:
    - month_from: Start month in YYYY-MM format (optional, defaults to 11 months ago)
    - month_to: End month in YYYY-MM format (optional, defaults to current month)
    
    Filters:
    - If location_id provided: aggregate only for that location
    - If product_id provided: aggregate only for that product
    - If both provided: aggregate for that specific location+product combination
    - If neither: aggregate across all locations and products
    """
    from datetime import datetime, timedelta
    from dateutil.relativedelta import relativedelta
    from sqlalchemy import func, extract
    from . import models
    
    today = datetime.now().date()
    
    # Parse month_from or default to 11 months ago
    if month_from:
        try:
            start_date = datetime.strptime(month_from, "%Y-%m").date().replace(day=1)
        except ValueError:
            raise ValueError("Invalid month_from format. Use YYYY-MM")
    else:
        start_date = (today - relativedelta(months=11)).replace(day=1)
    
    # Parse month_to or default to current month
    if month_to:
        try:
            end_month = datetime.strptime(month_to, "%Y-%m").date()
            # Set end_date to last day of the month
            end_date = (end_month.replace(day=1) + relativedelta(months=1) - timedelta(days=1))
        except ValueError:
            raise ValueError("Invalid month_to format. Use YYYY-MM")
    else:
        end_date = today
    
    # Validate date range
    if start_date > end_date:
        raise ValueError("month_from cannot be after month_to")
    
    # Build query to get inventory planning snapshots
    query = db.query(
        func.extract('year', models.InventoryPlanningSnapshot.snapshot_date).label('year'),
        func.extract('month', models.InventoryPlanningSnapshot.snapshot_date).label('month'),
        func.avg(models.InventoryPlanningSnapshot.avg_daily_demand).label('avg_daily_demand'),
        func.avg(models.InventoryPlanningSnapshot.available_stock).label('available_stock'),
        func.count(models.InventoryPlanningSnapshot.id).label('data_points')
    ).filter(
        models.InventoryPlanningSnapshot.company_id == company_id,
        models.InventoryPlanningSnapshot.snapshot_date >= start_date,
        models.InventoryPlanningSnapshot.snapshot_date <= end_date
    )
    
    # Apply filters
    if location_id is not None:
        query = query.filter(models.InventoryPlanningSnapshot.location_id == location_id)
        
    if product_id is not None:
        query = query.filter(models.InventoryPlanningSnapshot.product_id == product_id)
    
    # Group by year and month
    query = query.group_by(
        func.extract('year', models.InventoryPlanningSnapshot.snapshot_date),
        func.extract('month', models.InventoryPlanningSnapshot.snapshot_date)
    ).order_by(
        func.extract('year', models.InventoryPlanningSnapshot.snapshot_date),
        func.extract('month', models.InventoryPlanningSnapshot.snapshot_date)
    )
    
    results = query.all()
    
    # Create a dictionary of results indexed by year-month key
    data_by_month = {}
    for row in results:
        year, month, avg_demand, avg_stock, data_points = row
        month_date = datetime(int(year), int(month), 1)
        month_key = month_date.strftime("%Y-%m")
        data_by_month[month_key] = {
            "avg_daily_demand": int(round(float(avg_demand or 0))),
            "available_stock": int(round(float(avg_stock or 0)))
        }
    
    # Calculate total months in range
    total_months = (end_date.year - start_date.year) * 12 + (end_date.month - start_date.month) + 1
    
    # Generate all months in the range
    monthly_data = []
    current_month = start_date.replace(day=1)
    for _ in range(total_months):
        month_key = current_month.strftime("%Y-%m")
        
        # Use actual data if available, otherwise use zeros
        if month_key in data_by_month:
            monthly_data.append({
                "month": month_key,
                "avg_daily_demand": data_by_month[month_key]["avg_daily_demand"],
                "available_stock": data_by_month[month_key]["available_stock"]
            })
        else:
            monthly_data.append({
                "month": month_key,
                "avg_daily_demand": 0,
                "available_stock": 0
            })
        
        # Move to next month
        current_month = current_month + relativedelta(months=1)
    
    return {
        "company_id": company_id,
        "location_id": location_id,
        "product_id": product_id,
        "month_from": start_date.strftime("%Y-%m"),
        "month_to": end_date.strftime("%Y-%m"),
        "monthly_data": monthly_data
    }


def get_analytics_service_level_vs_demand(
    db: Session,
    company_id: int,
    location_id: Optional[int] = None,
    product_id: Optional[int] = None,
    month_from: Optional[str] = None,
    month_to: Optional[str] = None
) -> dict:
    """Get month-wise service level vs demand analytics.
    
    Args:
        db: Database session
        company_id: Company ID (required)
        location_id: Filter by location (optional)
        product_id: Filter by product (optional)
        month_from: Start month in YYYY-MM format (optional, defaults to 11 months ago)
        month_to: End month in YYYY-MM format (optional, defaults to current month)
    
    Returns:
        Dictionary with monthly aggregated service level and demand data
    """
    from datetime import datetime as dt
    from dateutil.relativedelta import relativedelta
    from sqlalchemy import func
    from . import models
    
    # Parse month_from and month_to, or use defaults
    if month_from:
        try:
            start_month = dt.strptime(month_from, "%Y-%m").date().replace(day=1)
        except ValueError:
            raise ValueError("Invalid month_from format. Use YYYY-MM")
    else:
        # Default to 11 months ago
        start_month = (dt.now().date().replace(day=1) - relativedelta(months=11))
    
    if month_to:
        try:
            end_month = dt.strptime(month_to, "%Y-%m").date().replace(day=1)
        except ValueError:
            raise ValueError("Invalid month_to format. Use YYYY-MM")
    else:
        # Default to current month
        end_month = dt.now().date().replace(day=1)
    
    if end_month < start_month:
        raise ValueError("month_to must be >= month_from")
    
    # Build query with monthly aggregation
    query = (
        db.query(
            func.extract('year', models.ServiceLevelDaily.date).label('year'),
            func.extract('month', models.ServiceLevelDaily.date).label('month'),
            func.coalesce(func.sum(models.ServiceLevelDaily.demand_qty), 0.0).label("demand_qty"),
            func.coalesce(func.sum(models.ServiceLevelDaily.fulfilled_qty), 0.0).label("fulfilled_qty"),
            func.coalesce(func.sum(models.ServiceLevelDaily.lost_sales_qty), 0.0).label("lost_sales_qty"),
        )
        .filter(models.ServiceLevelDaily.company_id == company_id)
    )
    
    # Filter by date range
    # Get last day of end_month for upper bound
    end_month_last_day = (end_month + relativedelta(months=1)) - relativedelta(days=1)
    query = query.filter(models.ServiceLevelDaily.date >= start_month)
    query = query.filter(models.ServiceLevelDaily.date <= end_month_last_day)
    
    # Optional filters
    if location_id is not None:
        query = query.filter(models.ServiceLevelDaily.location_id == location_id)
    
    if product_id is not None:
        query = query.filter(models.ServiceLevelDaily.product_id == product_id)
    
    # Group by year and month
    query = query.group_by(
        func.extract('year', models.ServiceLevelDaily.date),
        func.extract('month', models.ServiceLevelDaily.date)
    ).order_by(
        func.extract('year', models.ServiceLevelDaily.date).asc(),
        func.extract('month', models.ServiceLevelDaily.date).asc()
    )
    
    rows = query.all()
    
    # Create a dictionary for quick lookup by month
    data_dict = {}
    for row in rows:
        year = int(row.year)
        month = int(row.month)
        month_key = f"{year}-{month:02d}"
        
        demand = float(row.demand_qty or 0.0)
        fulfilled = float(row.fulfilled_qty or 0.0)
        lost = float(row.lost_sales_qty or 0.0)
        
        # Calculate service level
        if demand <= 0:
            service_level_val = 1.0
        else:
            service_level_val = fulfilled / demand
        
        data_dict[month_key] = {
            "demand_qty": int(round(demand)),
            "fulfilled_qty": int(round(fulfilled)),
            "lost_sales_qty": int(round(lost)),
            "service_level": round(service_level_val, 4),
        }
    
    # Generate all months in range (fill missing months with zeros)
    monthly_data = []
    current = start_month
    while current <= end_month:
        month_key = current.strftime("%Y-%m")
        
        if month_key in data_dict:
            monthly_data.append({
                "month": month_key,
                **data_dict[month_key]
            })
        else:
            # No data for this month, fill with zeros
            monthly_data.append({
                "month": month_key,
                "demand_qty": 0,
                "fulfilled_qty": 0,
                "lost_sales_qty": 0,
                "service_level": 1.0,
            })
        
        current = current + relativedelta(months=1)
    
    return {
        "company_id": company_id,
        "location_id": location_id,
        "product_id": product_id,
        "month_from": start_month.strftime("%Y-%m"),
        "month_to": end_month.strftime("%Y-%m"),
        "monthly_data": monthly_data
    }


def get_inventory_details(
    db: Session,
    page: int = 1,
    perpage: int = 100,
    company_id: Optional[int] = None,
    location_id: Optional[int] = None,
    product_id: Optional[int] = None,
    status_filter: Optional[str] = None
) -> dict:
    """Get comprehensive inventory details with joins across multiple tables."""
    from ..products.models import Product, Location, Company, Category, ProductLocation
    from . import models
    from sqlalchemy import func, desc, case
    
    # Start with ProductLocation to include all products that should have inventory
    # This ensures we capture products even if they have zero stock
    base_query = db.query(
        # IDs
        ProductLocation.company_id,
        ProductLocation.location_id, 
        ProductLocation.product_id,
        
        # Product info
        Product.product_name,
        Product.brand_name,
        Category.category_name,
        Product.is_perishable,
        
        # Location info
        Location.location_name,
        
        # Company info
        Company.company_name,
        
        # Aggregated inventory data - use COALESCE to handle null values for products with no inventory
        func.coalesce(func.sum(models.InventoryBatch.quantity_on_hand), 0).label('current_stock'),
        func.count(models.InventoryBatch.id).label('active_batches_count'),
        func.min(models.InventoryBatch.expiry_date).label('earliest_expiry'),
    ).join(
        Product, ProductLocation.product_id == Product.product_id
    ).join(
        Location, ProductLocation.location_id == Location.location_id  
    ).join(
        Company, ProductLocation.company_id == Company.company_id
    ).outerjoin(
        Category, Product.fk_product_category_id == Category.category_id
    ).outerjoin(
        # LEFT JOIN to include products with no inventory batches
        models.InventoryBatch, 
        (models.InventoryBatch.product_id == ProductLocation.product_id) &
        (models.InventoryBatch.location_id == ProductLocation.location_id) &
        (models.InventoryBatch.company_id == ProductLocation.company_id) &
        (models.InventoryBatch.status == 'active')
    ).group_by(
        ProductLocation.company_id,
        ProductLocation.location_id,
        ProductLocation.product_id,
        Product.product_name,
        Product.brand_name,
        Category.category_name,
        Product.is_perishable,
        Location.location_name,
        Company.company_name
    )
    
    # Apply filters
    if company_id is not None:
        base_query = base_query.filter(ProductLocation.company_id == company_id)
    
    if location_id is not None:
        base_query = base_query.filter(ProductLocation.location_id == location_id)
        
    if product_id is not None:
        base_query = base_query.filter(ProductLocation.product_id == product_id)
    
    # Apply pagination to base query first
    offset = (page - 1) * perpage
    base_results = base_query.offset(offset).limit(perpage).all()
    
    # Get total count for pagination - use the same base query without pagination
    total = base_query.count()
    
    # If no results, return empty but valid response
    if not base_results:
        return {
            "items": [],
            "page": page,
            "perpage": perpage,
            "total": total
        }
    
    # Now enrich with additional data from other tables
    enriched_results = []
    
    for result in base_results:
        company_id_val = result.company_id
        location_id_val = result.location_id
        product_id_val = result.product_id
        
        # Get latest inventory snapshot
        latest_snapshot = db.query(models.InventorySnapshotDaily).filter(
            models.InventorySnapshotDaily.company_id == company_id_val,
            models.InventorySnapshotDaily.location_id == location_id_val,
            models.InventorySnapshotDaily.product_id == product_id_val
        ).order_by(desc(models.InventorySnapshotDaily.snapshot_date)).first()
        
        # Get latest slow mover data
        latest_slow_mover = db.query(models.SlowMoverSnapshot).filter(
            models.SlowMoverSnapshot.company_id == company_id_val,
            models.SlowMoverSnapshot.location_id == location_id_val,
            models.SlowMoverSnapshot.product_id == product_id_val
        ).order_by(desc(models.SlowMoverSnapshot.snapshot_date)).first()
        
        # Get latest inventory planning data
        latest_planning = db.query(models.InventoryPlanningSnapshot).filter(
            models.InventoryPlanningSnapshot.company_id == company_id_val,
            models.InventoryPlanningSnapshot.location_id == location_id_val,
            models.InventoryPlanningSnapshot.product_id == product_id_val
        ).order_by(desc(models.InventoryPlanningSnapshot.snapshot_date)).first()
        
        # Get latest service level
        latest_service = db.query(models.ServiceLevelDaily).filter(
            models.ServiceLevelDaily.company_id == company_id_val,
            models.ServiceLevelDaily.location_id == location_id_val,
            models.ServiceLevelDaily.product_id == product_id_val
        ).order_by(desc(models.ServiceLevelDaily.date)).first()
        
        # Determine overall status
        current_stock_val = float(result.current_stock or 0)
        status = "in_stock"
        
        if current_stock_val == 0:
            status = "out_of_stock"
        elif latest_planning and latest_planning.current_reorder_point:
            if current_stock_val <= latest_planning.current_reorder_point:
                status = "low_stock"
        
        # Override with slow mover status if applicable
        if latest_slow_mover and latest_slow_mover.is_slow_mover:
            if latest_slow_mover.slow_mover_severity == "dead":
                status = "dead_stock"
            elif latest_slow_mover.slow_mover_severity == "slow" and current_stock_val > 0:
                status = "slow_mover"
        
        # Apply status filter if provided - skip this item if it doesn't match
        if status_filter and status != status_filter:
            continue
            
        # Create enriched result
        enriched_item = {
            "company_id": company_id_val,
            "company_name": result.company_name,
            "location_id": location_id_val,
            "location_name": result.location_name,
            "product_id": product_id_val,
            "product_name": result.product_name,
            "brand_name": result.brand_name,
            "category_name": result.category_name,
            "is_perishable": result.is_perishable,
            
            # Stock information
            "current_stock": current_stock_val,
            "active_batches_count": result.active_batches_count,
            "earliest_expiry": result.earliest_expiry,
            "status": status,
            
            # Snapshot data
            "snapshot_stock": float(latest_snapshot.on_hand_qty) if latest_snapshot else None,
            "snapshot_date": latest_snapshot.snapshot_date if latest_snapshot else None,
            
            # Slow mover data
            "is_slow_mover": latest_slow_mover.is_slow_mover if latest_slow_mover else False,
            "slow_mover_severity": latest_slow_mover.slow_mover_severity if latest_slow_mover else None,
            "avg_daily_sales_90d": float(latest_slow_mover.ads_90d) if latest_slow_mover and latest_slow_mover.ads_90d else 0.0,
            "days_on_hand_90d": float(latest_slow_mover.doh_90d) if latest_slow_mover and latest_slow_mover.doh_90d else 0.0,
            "days_since_last_sale": latest_slow_mover.days_since_last_sale if latest_slow_mover else None,
            
            # Planning data
            "reorder_point": float(latest_planning.current_reorder_point) if latest_planning and latest_planning.current_reorder_point else 0.0,
            "safety_stock": float(latest_planning.current_safety_stock) if latest_planning and latest_planning.current_safety_stock else 0.0,
            "should_reorder": latest_planning.should_reorder if latest_planning else False,
            "recommended_order_qty": float(latest_planning.recommended_order_qty) if latest_planning and latest_planning.recommended_order_qty else 0.0,
            
            # Additional planning data from InventoryPlanningSnapshot
            "planning_snapshot_date": latest_planning.snapshot_date if latest_planning else None,
            "avg_daily_demand": float(latest_planning.avg_daily_demand) if latest_planning else 0.0,
            "sigma_daily_demand": float(latest_planning.sigma_daily_demand) if latest_planning else 0.0,
            "lead_time_days": latest_planning.lead_time_days if latest_planning else 0,
            "review_period_days": latest_planning.review_period_days if latest_planning else 0,
            "service_level_target": float(latest_planning.service_level_target) if latest_planning else 0.95,
            "forecast_avg_daily_demand_90d": float(latest_planning.forecast_avg_daily_demand_90d) if latest_planning else 0.0,
            "forecast_safety_stock_90d": float(latest_planning.forecast_safety_stock_90d) if latest_planning else 0.0,
            "forecasted_reorder_point_90d": float(latest_planning.forecasted_reorder_point_90d) if latest_planning else 0.0,
            "planning_on_hand_qty": float(latest_planning.on_hand_qty) if latest_planning else 0.0,
            "inbound_qty": float(latest_planning.inbound_qty) if latest_planning else 0.0,
            "available_stock": float(latest_planning.available_stock) if latest_planning else 0.0,
            "min_target": float(latest_planning.min_target) if latest_planning else 0.0,
            "max_target": float(latest_planning.max_target) if latest_planning else 0.0,
            "stock_status": latest_planning.stock_status if latest_planning else None,
            
            # Service level
            "latest_service_level": float(latest_service.service_level) if latest_service and latest_service.service_level else None,
            "service_level_date": latest_service.date if latest_service else None,
        }
        
        enriched_results.append(enriched_item)
    
    # Note: When status filtering is applied, the actual returned count might be less than perpage
    # This is a known limitation of post-query filtering
    return {
        "items": enriched_results,
        "page": page,
        "perpage": perpage,
        "total": total
    }


# Inventory Planning Snapshot Controllers
def get_inventory_planning_snapshots(db: Session, date: Optional[str] = None, page: int = 1, perpage: int = 100, company_id: Optional[int] = None, location_id: Optional[int] = None, product_id: Optional[int] = None) -> dict:
    """Get inventory planning snapshot data with pagination. If date is provided, returns snapshots for that specific date."""
    # Create query (raw data only)
    query = db.query(models.InventoryPlanningSnapshot)
    
    if date:
        try:
            # Parse the date string (YYYY-MM-DD format)
            target_date = datetime.strptime(date, "%Y-%m-%d").date()
            # Filter by date
            query = query.filter(models.InventoryPlanningSnapshot.snapshot_date == target_date)
        except ValueError:
            # If date parsing fails, ignore the invalid date and return all records
            pass
    
    if company_id is not None:
        query = query.filter(models.InventoryPlanningSnapshot.company_id == company_id)
    
    if location_id is not None:
        query = query.filter(models.InventoryPlanningSnapshot.location_id == location_id)
    
    if product_id is not None:
        query = query.filter(models.InventoryPlanningSnapshot.product_id == product_id)
    
    # Get total count
    total = query.count()
    
    # Apply pagination
    offset = (page - 1) * perpage
    results = query.offset(offset).limit(perpage).all()
    
    # Transform results
    data = []
    for snapshot in results:
        # Convert model instance to dict
        snapshot_dict = {c.name: getattr(snapshot, c.name) for c in snapshot.__table__.columns}
        data.append(snapshot_dict)
    
    return {
        "data": data,
        "page": page,
        "perpage": perpage,
        "total": total
    }


def save_demand_forecast(
    db: Session,
    company_id: int,
    location_id: int,
    product_id: int,
    df_predictions: pd.DataFrame,
    model_version: str = "v1"
) -> int:
    """
    Save demand forecast predictions to database.
    Deletes existing forecasts for the same company/location/product/date range before inserting new ones.
    Uses database transaction to ensure atomicity (all-or-nothing operation).
    
    Args:
        db: Database session
        company_id: Company ID
        location_id: Location ID
        product_id: Product ID
        df_predictions: DataFrame with columns: date, forecast_qty
        model_version: Model version identifier
        
    Returns:
        Number of forecast records saved
        
    Raises:
        ValueError: If predictions DataFrame is empty or invalid
        Exception: If database operation fails
    """
    # Validate input
    if df_predictions is None or df_predictions.empty:
        raise ValueError("Predictions DataFrame is empty")
    
    if "date" not in df_predictions.columns or "forecast_qty" not in df_predictions.columns:
        raise ValueError("Predictions DataFrame must contain 'date' and 'forecast_qty' columns")
    
    today = date.today()
    
    try:
        # Extract date range from predictions
        target_dates = df_predictions["date"].apply(
            lambda x: x.date() if hasattr(x, 'date') else x
        ).tolist()
        min_date = min(target_dates)
        max_date = max(target_dates)
        
        # Delete existing forecasts for this company/location/product/date range
        # This happens within the same transaction as the insert
        deleted_count = db.query(models.DemandForecast).filter(
            models.DemandForecast.company_id == company_id,
            models.DemandForecast.location_id == location_id,
            models.DemandForecast.product_id == product_id,
            models.DemandForecast.target_date >= min_date,
            models.DemandForecast.target_date <= max_date
        ).delete(synchronize_session=False)
        
        if deleted_count > 0:
            logger.info(
                f"Deleted {deleted_count} existing forecasts for company={company_id}, "
                f"location={location_id}, product={product_id}, date_range={min_date} to {max_date}"
            )
        
        # Create new forecast records
        forecast_objs = []
        for _, row in df_predictions.iterrows():
            fc = models.DemandForecast(
                company_id=company_id,
                location_id=location_id,
                product_id=product_id,
                forecast_date=today,
                target_date=row["date"].date() if hasattr(row["date"], 'date') else row["date"],
                forecast_qty=float(row["forecast_qty"]),
                model_version=model_version,
            )
            forecast_objs.append(fc)
        
        # Bulk insert new forecasts
        db.bulk_save_objects(forecast_objs)
        
        # Commit transaction (both delete and insert succeed together)
        db.commit()
        
        logger.info(
            f"Saved {len(forecast_objs)} new forecasts for company={company_id}, "
            f"location={location_id}, product={product_id}"
        )
        
        return len(forecast_objs)
        
    except Exception as e:
        # Rollback transaction on any error to restore previous state
        db.rollback()
        logger.error(
            f"Failed to save forecasts for company={company_id}, "
            f"location={location_id}, product={product_id}: {str(e)}"
        )
        raise


def get_fastest_moving_products(
    db: Session,
    company_id: int,
    location_id: Optional[int] = None,
    category_id: Optional[int] = None,
    limit: int = 10
) -> dict:
    """
    Get fastest moving products based on highest sales velocity.
    Returns raw data that will be transformed by FastAPI response_model.
    """
    from ..products.models import Product, Location, Category, Vendor, ProductVendor
    from .models import SlowMoverSnapshot
    from sqlalchemy import func, desc
    
    # Validate limit
    if limit < 1 or limit > 100:
        raise ValueError("Limit must be between 1 and 100")
    
    # Get latest snapshot date for this company
    latest_date_query = db.query(func.max(SlowMoverSnapshot.snapshot_date)).filter(
        SlowMoverSnapshot.company_id == company_id
    )
    
    if location_id:
        latest_date_query = latest_date_query.filter(
            SlowMoverSnapshot.location_id == location_id
        )
    
    latest_date = latest_date_query.scalar()
    
    if not latest_date:
        return {
            "success": True,
            "data": [],
            "message": "No snapshot data available for the specified filters"
        }
    
    # Query for fastest moving products
    query = db.query(
        SlowMoverSnapshot,
        Product,
        Location,
        Category,
        Vendor
    ).join(
        Product, SlowMoverSnapshot.product_id == Product.product_id
    ).join(
        Location, SlowMoverSnapshot.location_id == Location.location_id
    ).outerjoin(
        Category, Product.fk_product_category_id == Category.category_id
    ).outerjoin(
        ProductVendor, Product.product_id == ProductVendor.product_id
    ).outerjoin(
        Vendor, ProductVendor.vendor_id == Vendor.vendor_id
    ).filter(
        SlowMoverSnapshot.company_id == company_id,
        SlowMoverSnapshot.snapshot_date == latest_date
    )
    
    # Apply optional filters
    if location_id:
        query = query.filter(SlowMoverSnapshot.location_id == location_id)
    
    if category_id:
        query = query.filter(Product.fk_product_category_id == category_id)
    
    # Order by ads_30d descending (highest sales = fastest moving)
    query = query.order_by(desc(SlowMoverSnapshot.ads_30d))
    
    # Apply limit
    query = query.limit(limit)
    
    results = query.all()
    
    # Return raw data - let FastAPI/Pydantic handle transformation
    fastest_moving = []
    for rank, (snapshot, product, location, category, vendor) in enumerate(results, start=1):
        fastest_moving.append({
            "rank": rank,
            "product": {
                "id": product.product_id,
                "name": product.product_name,
                "sku": product.short_name
            },
            "location": {
                "id": location.location_id,
                "name": location.location_name
            },
            "category": {
                "id": category.category_id,
                "name": category.category_name
            } if category else None,
            "vendor": {
                "id": vendor.vendor_id if vendor else None,
                "name": vendor.vendor_name if vendor else None,
                "code": vendor.vendor_code if vendor else None
            } if vendor else None,
            "on_hand_qty": round(float(snapshot.on_hand_qty), 2),
            "total_sold_7d": round(float(snapshot.total_sold_7d), 2),
            "total_sold_30d": round(float(snapshot.total_sold_30d), 2),
            "total_sold_90d": round(float(snapshot.total_sold_90d), 2),
            "ads_7d": round(float(snapshot.ads_7d), 2),
            "ads_30d": round(float(snapshot.ads_30d), 2),
            "ads_90d": round(float(snapshot.ads_90d), 2),
            "snapshot_date": snapshot.snapshot_date
        })
    
    return {
        "success": True,
        "data": fastest_moving,
        "message": f"Top {len(fastest_moving)} fastest moving products retrieved successfully"
    }


def get_demand_forecasts(
    db: Session,
    company_id: int,
    location_id: Optional[int] = None,
    product_id: Optional[int] = None,
    date_from: Optional[str] = None,
    date_to: Optional[str] = None,
    page: int = 1,
    perpage: int = 100
) -> dict:
    """
    Get demand forecast data with pagination and filters.
    
    Args:
        db: Database session
        company_id: Company ID (required)
        location_id: Filter by location (optional)
        product_id: Filter by product (optional)
        date_from: Start date for target_date in YYYY-MM-DD format (optional)
        date_to: End date for target_date in YYYY-MM-DD format (optional)
        page: Page number (default: 1)
        perpage: Items per page (default: 100)
    
    Returns:
        Dictionary with forecast data, pagination info
    """
    from ..products.models import Product, Location
    from . import models
    from datetime import datetime
    
    # Build base query with joins
    query = db.query(
        models.DemandForecast,
        Product.product_name,
        Location.location_name
    ).join(
        Product, models.DemandForecast.product_id == Product.product_id
    ).join(
        Location, models.DemandForecast.location_id == Location.location_id
    ).filter(
        models.DemandForecast.company_id == company_id
    )
    
    # Apply filters
    if location_id is not None:
        query = query.filter(models.DemandForecast.location_id == location_id)
    
    if product_id is not None:
        query = query.filter(models.DemandForecast.product_id == product_id)
    
    # Date range filters
    if date_from:
        try:
            start_date = datetime.strptime(date_from, "%Y-%m-%d").date()
            query = query.filter(models.DemandForecast.target_date >= start_date)
        except ValueError:
            raise ValueError("Invalid date_from format. Use YYYY-MM-DD")
    
    if date_to:
        try:
            end_date = datetime.strptime(date_to, "%Y-%m-%d").date()
            query = query.filter(models.DemandForecast.target_date <= end_date)
        except ValueError:
            raise ValueError("Invalid date_to format. Use YYYY-MM-DD")
    
    # Order by target_date and forecast_date
    query = query.order_by(
        models.DemandForecast.target_date.asc(),
        models.DemandForecast.forecast_date.desc()
    )
    
    # Get total count
    total = query.count()
    
    # Apply pagination
    offset = (page - 1) * perpage
    results = query.offset(offset).limit(perpage).all()
    
    # Transform results
    forecast_items = []
    for forecast, product_name, location_name in results:
        forecast_items.append({
            "id": forecast.id,
            "company_id": forecast.company_id,
            "location_id": forecast.location_id,
            "location_name": location_name,
            "product_id": forecast.product_id,
            "product_name": product_name,
            "forecast_date": forecast.forecast_date,
            "target_date": forecast.target_date,
            "forecast_qty": float(forecast.forecast_qty),
            "created_at": forecast.created_at
        })
    
    return {
        "data": forecast_items,
        "page": page,
        "perpage": perpage,
        "total": total
    }


def get_most_stagnant_products(
    db: Session,
    company_id: int,
    location_id: Optional[int] = None,
    category_id: Optional[int] = None,
    limit: int = 10
) -> dict:
    """
    Get most stagnant (dead/slow moving) products based on lowest sales velocity.
    Returns raw data that will be transformed by FastAPI response_model.
    """
    from ..products.models import Product, Location, Category, Vendor, ProductVendor
    from .models import SlowMoverSnapshot, ReorderPolicy, InventoryPlanningSnapshot
    from sqlalchemy import func
    
    # Validate limit
    if limit < 1 or limit > 100:
        raise ValueError("Limit must be between 1 and 100")
    
    # Get latest snapshot date for this company
    latest_date_query = db.query(func.max(SlowMoverSnapshot.snapshot_date)).filter(
        SlowMoverSnapshot.company_id == company_id
    )
    
    if location_id:
        latest_date_query = latest_date_query.filter(
            SlowMoverSnapshot.location_id == location_id
        )
    
    latest_date = latest_date_query.scalar()
    
    if not latest_date:
        return {
            "success": True,
            "data": [],
            "message": "No snapshot data available for the specified filters"
        }
    
    # Query for stagnant products
    query = db.query(
        SlowMoverSnapshot,
        Product,
        Location,
        Category,
        Vendor,
        ReorderPolicy,
        InventoryPlanningSnapshot
    ).join(
        Product, SlowMoverSnapshot.product_id == Product.product_id
    ).join(
        Location, SlowMoverSnapshot.location_id == Location.location_id
    ).outerjoin(
        Category, Product.fk_product_category_id == Category.category_id
    ).outerjoin(
        ProductVendor, Product.product_id == ProductVendor.product_id
    ).outerjoin(
        Vendor, ProductVendor.vendor_id == Vendor.vendor_id
    ).outerjoin(
        ReorderPolicy,
        (ReorderPolicy.company_id == SlowMoverSnapshot.company_id) &
        (ReorderPolicy.location_id == SlowMoverSnapshot.location_id) &
        (ReorderPolicy.product_id == SlowMoverSnapshot.product_id)
    ).outerjoin(
        InventoryPlanningSnapshot,
        (InventoryPlanningSnapshot.company_id == SlowMoverSnapshot.company_id) &
        (InventoryPlanningSnapshot.location_id == SlowMoverSnapshot.location_id) &
        (InventoryPlanningSnapshot.product_id == SlowMoverSnapshot.product_id) &
        (InventoryPlanningSnapshot.snapshot_date == SlowMoverSnapshot.snapshot_date)
    ).filter(
        SlowMoverSnapshot.company_id == company_id,
        SlowMoverSnapshot.snapshot_date == latest_date
    )
    
    # Apply optional filters
    if location_id:
        query = query.filter(SlowMoverSnapshot.location_id == location_id)
    
    if category_id:
        query = query.filter(Product.fk_product_category_id == category_id)
    
    # Order by ads_90d ascending (lowest sales = most stagnant)
    query = query.order_by(SlowMoverSnapshot.ads_90d.asc())
    
    # Apply limit
    query = query.limit(limit)
    
    results = query.all()
    
    # Return raw data - let FastAPI/Pydantic handle transformation
    stagnant_products = []
    for rank, (snapshot, product, location, category, vendor, reorder_policy, planning_snapshot) in enumerate(results, start=1):
        # Calculate lead time demand
        # Prefer planning snapshot's lead time (most current), fall back to policy, then default to 7
        if planning_snapshot and planning_snapshot.lead_time_days:
            lead_time_days = planning_snapshot.lead_time_days
        elif reorder_policy:
            lead_time_days = reorder_policy.lead_time_days
        else:
            lead_time_days = 7
        
        lead_time_demand = snapshot.ads_90d * lead_time_days
        
        # Calculate excess stock level
        # Use reorder point from planning snapshot if available, otherwise use 2x lead_time_demand as baseline
        if planning_snapshot and planning_snapshot.current_reorder_point:
            reorder_point = planning_snapshot.current_reorder_point
        else:
            reorder_point = lead_time_demand * 2
        
        excess_stock_level = max(0, snapshot.on_hand_qty - reorder_point)
        
        stagnant_products.append({
            "rank": rank,
            "product": {
                "id": product.product_id,
                "name": product.product_name
            },
            "location": {
                "id": location.location_id,
                "name": location.location_name
            },
            "category": {
                "id": category.category_id,
                "name": category.category_name
            } if category else None,
            "vendor": {
                "id": vendor.vendor_id if vendor else None,
                "name": vendor.vendor_name if vendor else None,
                "code": vendor.vendor_code if vendor else None
            } if vendor else None,
            "on_hand_qty": round(float(snapshot.on_hand_qty), 2),
            "lead_time_demand": round(float(lead_time_demand), 2),
            "ads_90d": round(float(snapshot.ads_90d), 2),
            "excess_stock_level": round(float(excess_stock_level), 2),
            "days_since_last_sale": round(float(snapshot.days_since_last_sale), 2) if snapshot.days_since_last_sale is not None else None,
            "doh_90d": round(float(snapshot.doh_90d), 2) if snapshot.doh_90d is not None else None,
            "snapshot_date": snapshot.snapshot_date
        })
    
    return {
        "success": True,
        "data": stagnant_products,
        "message": f"Top {len(stagnant_products)} most stagnant products retrieved successfully"
    }


def get_most_urgent_products(
    db: Session,
    company_id: int,
    location_id: Optional[int] = None,
    category_id: Optional[int] = None,
    limit: int = 10
) -> dict:
    """
    Get most urgent order requirements based on urgency score.
    Returns products that need immediate ordering based on days of cover vs lead time.
    """
    from ..products.models import Product, Location, Category, Vendor, ProductVendor
    from .models import InventoryPlanningSnapshot
    from sqlalchemy import func, desc
    
    # Validate limit
    if limit < 1 or limit > 100:
        raise ValueError("Limit must be between 1 and 100")
    
    # Get latest snapshot date for this company
    latest_date_query = db.query(func.max(InventoryPlanningSnapshot.snapshot_date)).filter(
        InventoryPlanningSnapshot.company_id == company_id
    )
    
    if location_id:
        latest_date_query = latest_date_query.filter(
            InventoryPlanningSnapshot.location_id == location_id
        )
    
    latest_date = latest_date_query.scalar()
    
    if not latest_date:
        return {
            "success": True,
            "data": [],
            "message": "No inventory planning data available for the specified filters"
        }
    
    # Query for most urgent products
    query = db.query(
        InventoryPlanningSnapshot,
        Product,
        Location,
        Category,
        Vendor
    ).join(
        Product, InventoryPlanningSnapshot.product_id == Product.product_id
    ).join(
        Location, InventoryPlanningSnapshot.location_id == Location.location_id
    ).outerjoin(
        Category, Product.fk_product_category_id == Category.category_id
    ).outerjoin(
        ProductVendor, Product.product_id == ProductVendor.product_id
    ).outerjoin(
        Vendor, ProductVendor.vendor_id == Vendor.vendor_id
    ).filter(
        InventoryPlanningSnapshot.company_id == company_id,
        InventoryPlanningSnapshot.snapshot_date == latest_date,
        InventoryPlanningSnapshot.is_urgent == True,  # Only urgent items
        InventoryPlanningSnapshot.recommended_order_qty > 0  # Only items that need ordering
    )
    
    # Apply optional filters
    if location_id:
        query = query.filter(InventoryPlanningSnapshot.location_id == location_id)
    
    if category_id:
        query = query.filter(Product.fk_product_category_id == category_id)
    
    # Order by urgency_score descending (highest urgency = most urgent)
    query = query.order_by(desc(InventoryPlanningSnapshot.urgency_score))
    
    # Apply limit
    query = query.limit(limit)
    
    results = query.all()
    
    # Return raw data - let FastAPI/Pydantic handle transformation
    urgent_products = []
    for rank, (planning, product, location, category, vendor) in enumerate(results, start=1):
        urgent_products.append({
            "rank": rank,
            "product": {
                "id": product.product_id,
                "name": product.product_name,
                "sku": product.short_name
            },
            "location": {
                "id": location.location_id,
                "name": location.location_name
            },
            "category": {
                "id": category.category_id,
                "name": category.category_name
            } if category else None,
            "vendor": {
                "id": vendor.vendor_id if vendor else None,
                "name": vendor.vendor_name if vendor else None,
                "code": vendor.vendor_code if vendor else None
            } if vendor else None,
            "on_hand_qty": round(float(planning.on_hand_qty), 2),
            "inbound_qty": round(float(planning.inbound_qty), 2),
            "days_of_cover": round(float(planning.days_of_cover), 2) if planning.days_of_cover is not None else 0.0,
            "recommended_order_qty": round(float(planning.recommended_order_qty), 2),
            "urgency_score": round(float(planning.urgency_score), 2),
            "snapshot_date": planning.snapshot_date
        })
    
    return {
        "success": True,
        "data": urgent_products,
        "message": f"Top {len(urgent_products)} most urgent order requirements retrieved successfully"
    }