import pandas as pd
import io
import logging
import os
from fastapi import UploadFile, HTTPException
from sqlalchemy.orm import Session
from datetime import datetime
from typing import Dict, Any, List, Set

from src.smart_inventory.apps.inventory.models import (
    PurchaseOrder, PurchaseOrderLine, PurchaseOrderReceiveLine, PurchaseOrderStatus, 
    PurchaseReceiveOrder, PurchaseReturnOrder, PurchaseOrderReturnLine,
    SalesOrder, SalesOrderLine, SalesReturnOrder, SalesReturnOrderLine,
    InventoryBatch, InventoryMovement, MovementType
)
from src.smart_inventory.apps.products.models import Category, Company, Location, Product, ProductLocation, ProductVendor, Vendor
from src.smart_inventory.apps.data_import.models import CSVUploadLog
from src.smart_inventory.utils.task_orchestrator import trigger_tasks_for_affected_tables
from src.smart_inventory.utils.csv_task_orchestrator import (
    trigger_csv_processing_task,
    save_uploaded_file,
)
from sqlalchemy import desc

logger = logging.getLogger(__name__)


# CSV type to affected database tables mapping
# Used to determine which Celery tasks should be triggered after CSV processing
CSV_TYPE_TO_AFFECTED_TABLES: Dict[str, List[str]] = {
    # "product": ["products", "categories", "locations", "vendors", "product_locations", "product_vendors"],
    # "purchase_order": ["purchase_orders", "purchase_order_lines"],
    "purchase_receive": ["purchase_order_receive", "purchase_order_receive_lines", "inventory_batches", "inventory_movements"],
    "sales_order": ["sales_orders", "sales_order_lines", "inventory_batches", "inventory_movements"],
    "sales_return": ["sales_return_orders", "sales_return_order_lines", "inventory_batches", "inventory_movements"],
    "purchase_return": ["purchase_order_return", "purchase_order_return_lines", "inventory_batches", "inventory_movements"],
    "stock_transfer": ["inventory_movements", "inventory_batches"],
}

# Date column mapping for each CSV type (used to extract unique dates for task triggering)
# NOTE: These are CSV column names (not DB field names)
CSV_DATE_COLUMNS: Dict[str, List[str]] = {
    # "product": [],  # Products don't have date-based processing
    # "purchase_order": ["purchase_order_date"],  # CSV column -> DB: PurchaseOrder.order_date
    "purchase_receive": ["receive_order_date"],  # CSV column -> DB: PurchaseReceiveOrder.received_date
    "sales_order": ["order_date"],  # CSV column -> DB: SalesOrder.sold_date
    "sales_return": ["return_date"],  # CSV column -> DB: SalesReturnOrder.returned_date
    "purchase_return": ["return_date"],  # CSV column -> DB: PurchaseReturnOrder.returned_date
    "stock_transfer": ["transfer_date"]
}


def get_affected_tables_for_csv_type(csv_type: str) -> List[str]:
    """
    Get list of affected database tables for a CSV upload type.
    
    Args:
        csv_type: One of: product, purchase_order, purchase_receive, 
                  sales_order, sales_return, purchase_return, stock_transfer
                  
    Returns:
        List of affected table names
    """
    return CSV_TYPE_TO_AFFECTED_TABLES.get(csv_type, [])


def extract_unique_dates_from_csv(df: pd.DataFrame, upload_type: str) -> List[str]:
    """
    Extract unique dates from CSV data based on upload type.
    
    Args:
        df: DataFrame containing CSV data
        upload_type: Type of CSV upload
        
    Returns:
        List of unique date strings in YYYY-MM-DD format
    """
    date_columns = CSV_DATE_COLUMNS.get(upload_type, [])
    unique_dates: Set[str] = set()
    
    for col in date_columns:
        if col in df.columns:
            try:
                # Parse dates and extract unique values
                dates = pd.to_datetime(df[col], errors='coerce').dropna()
                for d in dates.dt.date.unique():
                    unique_dates.add(d.strftime("%Y-%m-%d"))
            except Exception as e:
                logger.warning(f"Could not parse dates from column {col}: {e}")
    
    return sorted(list(unique_dates))


async def process_csv_file(
    db: Session,
    file: UploadFile,
    company_id: int,
    upload_type: str
) -> Dict[str, Any]:
    """
    Process CSV file upload by saving to disk and triggering async Celery task.
    
    All CSV processing is now done asynchronously via Celery tasks.
    This function:
    1. Validates the file and company
    2. Saves the file to /uploaded_files directory
    3. Creates a CeleryTaskTracker entry
    4. Triggers the appropriate Celery task (or sets to HOLD if dependencies not met)
    
    Returns:
        Dict containing:
        - task_id: Celery task ID for tracking
        - task_name: Unique task name
        - status: "pending" or "hold"
        - file_path: Path to saved CSV file
    """
    if not file.filename.endswith(".csv"):
        raise HTTPException(status_code=400, detail="Invalid file type. Only CSV files are accepted.")
    
    company = db.query(Company).filter(Company.id == company_id).first()
    if not company:
        raise HTTPException(status_code=404, detail="Company not found")
    
    # Validate upload_type
    valid_types = {"product", "purchase_order", "purchase_receive", "sales_order", "sales_return", "purchase_return", "stock_transfer"}
    if upload_type not in valid_types:
        raise HTTPException(status_code=400, detail=f"Invalid upload type. Must be one of: {', '.join(valid_types)}")

    # Read file contents
    contents = file.file.read()
    
    # Basic CSV validation - try to parse it
    try:
        df = pd.read_csv(io.BytesIO(contents))
        if df.empty:
            raise HTTPException(status_code=400, detail="CSV file is empty")
    except Exception as e:
        raise HTTPException(status_code=400, detail=f"Invalid CSV file: {str(e)}")
    
    # Validate required columns for product upload
    if upload_type == "product":
        expected_columns = {
            "product_name",
            "short_name",
            "description",
            "brand_name",
            "sku",
            "category_name",
            "location_name",
            "vendor_name"
        }
        if not expected_columns.issubset(set(df.columns)):
            missing = expected_columns - set(df.columns)
            raise HTTPException(
                status_code=400,
                detail=f"CSV missing required columns: {missing}"
            )
    
    # Save file to disk
    try:
        file_path = save_uploaded_file(contents, upload_type, company_id)
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"Failed to save file: {str(e)}")
    
    # Trigger async Celery task
    try:
        task_result = trigger_csv_processing_task(
            db=db,
            company_id=company_id,
            upload_type=upload_type,
            file_path=file_path
        )
        
        logger.info(
            f"CSV upload queued: {task_result['task_name']} "
            f"(status: {task_result['status']}, file: {file_path})"
        )
        
        return {
            "task_id": task_result["task_id"],
            "task_name": task_result["task_name"],
            "status": task_result["status"],
            "file_path": task_result["file_path"],
            "message": f"CSV upload queued for processing. Status: {task_result['status']}"
        }
        
    except Exception as e:
        logger.error(f"Failed to queue CSV processing task: {e}")
        raise HTTPException(status_code=500, detail=f"Failed to queue processing task: {str(e)}")

async def upload_products_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    # Placeholder for actual database insertion logic
    # For example, iterate over DataFrame rows and insert into DB
    inserted_rows = 0
    for _, row in data.iterrows():
        if not row["product_name"]:
            continue  # Skip rows without product_name
        category = None
        if row["category_name"]:
            category = db.query(Category).filter(Category.category_name == row["category_name"], Category.company_id == company_id).first()
            if not category:
                category = Category(
                    category_name=row["category_name"],
                    company_id=company_id
                    )
                db.add(category)
                db.commit() 
                db.refresh(category)

        product = db.query(Product).filter(Product.product_name == row["product_name"], Product.company_id == company_id).first()
        if not product:
            product = Product(
                company_id=company_id,
                fk_product_category_id=category.id if category else None,
                product_name=row["product_name"],
                short_name=row.get("short_name"),
                description=row.get("description"),
                brand_name=row.get("brand_name"),
                sku=row.get("sku"),
                eligible_for_return=row.get("eligible_for_return", False)
            )
            db.add(product)
            db.commit()
            db.refresh(product)
        else:
            # Update existing product logic if needed
            product.fk_product_category_id = category.id if category else product.fk_product_category_id
            product.short_name = row.get("short_name", product.short_name)
            product.description = row.get("description", product.description)
            product.brand_name = row.get("brand_name", product.brand_name)
            product.sku = row.get("sku", product.sku)
            
            db.add(product)
            db.commit()
            db.refresh(product)
        
        if row["location_name"]:
            locations = row["location_name"].split('|')
            for loc_name in locations:
                location = db.query(Location).filter(Location.location_name == loc_name, Location.company_id == company_id).first()
                if not location:
                    location = Location(location_name=loc_name, company_id=company_id)
                    db.add(location)
                    db.commit()
                    db.refresh(location)
                
                product_location = db.query(ProductLocation).filter(
                    ProductLocation.company_id == company_id,
                    ProductLocation.product_id == product.id,
                    ProductLocation.location_id == location.id
                ).first()
                if not product_location:
                    product_location = ProductLocation(
                        company_id=company_id,
                        product_id=product.id,
                        location_id=location.id
                    )
                    db.add(product_location)
                    db.commit()
                    db.refresh(product_location)
            
        if row["vendor_name"]:
            vendors = row["vendor_name"].split('|')
            for vendor_name in vendors:
                vendor = db.query(Vendor).filter(Vendor.vendor_name == vendor_name, Vendor.company_id == company_id).first()
                if not vendor:
                    vendor = Vendor(vendor_name=vendor_name, company_id=company_id)
                    db.add(vendor)
                    db.commit()
                    db.refresh(vendor)
                product_vendor = db.query(ProductVendor).filter(
                    ProductVendor.company_id == company_id,
                    ProductVendor.product_id == product.id,
                    ProductVendor.vendor_id == vendor.id
                ).first()
                if not product_vendor:
                    product_vendor = ProductVendor(
                        company_id=company_id,
                        product_id=product.id,
                        vendor_id=vendor.id
                    )
                    db.add(product_vendor)
                    db.commit()
                    db.refresh(product_vendor)

        # Insert logic here
        inserted_rows += 1

    return inserted_rows

# purchase order
async def process_purchase_orders_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    # Placeholder for actual database insertion logic for purchase orders
    inserted_rows = 0
    for _, row in data.iterrows():
        # Insert logic here
        product = db.query(Product).filter(Product.product_name == row["product_name"], Product.company_id == company_id).first()
        if not product:
            # Here you would add logic to create a PurchaseOrderItem or similar
            continue  # Skip if product not found
        purchase_order = db.query(PurchaseOrder).filter(PurchaseOrder.ref_number == row["purchase_order_ref_no"], PurchaseOrder.company_id == company_id).first()
        if not purchase_order:
            vendor = db.query(Vendor).filter(Vendor.vendor_name == row["vendor_name"], Vendor.company_id == company_id).first()
            location = db.query(Location).filter(Location.location_name == row["location_name"], Location.company_id == company_id).first()
            purchase_order = PurchaseOrder(
                company_id=company_id,
                ref_number=row["purchase_order_ref_no"],
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                order_date=row["purchase_order_date"],
                status=PurchaseOrderStatus.SENT,
            )
            db.add(purchase_order)
            db.commit()
            db.refresh(purchase_order)
            
        purchase_order_item = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.company_id == company_id,
            PurchaseOrderLine.purchase_order_id == purchase_order.id,
            PurchaseOrderLine.product_id == product.id
        ).first()
        if not purchase_order_item:
            purchase_order_item = PurchaseOrderLine(
                company_id=company_id,
                purchase_order_id=purchase_order.id,
                product_id=product.id,
                ordered_qty=row["order_qty"],
                unit_cost=row.get("unit_cost", 0.0)
            )
            db.add(purchase_order_item)
            db.commit()
            db.refresh(purchase_order_item)
        inserted_rows += 1

    return inserted_rows

# purchase receive order
async def process_purchase_receive_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    # Placeholder for actual database insertion logic for purchase receive
    inserted_rows = 0
    for _, row in data.iterrows():
        # Insert logic here
        linked_purchase_order = db.query(PurchaseOrder).filter(PurchaseOrder.ref_number == row["purchase_order_ref_no"], PurchaseOrder.company_id == company_id).first()
        if not linked_purchase_order:
            continue  # Skip if linked purchase order not found
        # Here you would add logic to create a PurchaseReceive or similar
        product = db.query(Product).filter(Product.product_name == row["product_name"], Product.company_id == company_id).first()
        if not product:
            continue  # Skip if product not found
        # Insert logic for receiving the product against the purchase order
        purchase_receive = db.query(PurchaseReceiveOrder).filter(
            PurchaseReceiveOrder.company_id == company_id,
            PurchaseReceiveOrder.linked_ref_number == row["purchase_order_ref_no"],
            PurchaseReceiveOrder.ref_number == row["receive_order_ref_no"],
        ).first()
        
        # Always query location (needed for InventoryBatch lookup)
        location = db.query(Location).filter(Location.location_name == row["location_name"], Location.company_id == company_id).first()
        
        if not purchase_receive:
            vendor = db.query(Vendor).filter(Vendor.vendor_name == row["vendor_name"], Vendor.company_id == company_id).first()
            purchase_receive = PurchaseReceiveOrder(
                company_id=company_id,
                linked_ref_number=row["purchase_order_ref_no"],
                ref_number = row["receive_order_ref_no"],
                received_date=row["receive_order_date"],
                purchase_order_id=linked_purchase_order.id,
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                status=PurchaseOrderStatus.RECEIVED,
            )
            db.add(purchase_receive)
            db.commit()
            db.refresh(purchase_receive)
        
        receive_order_item = db.query(PurchaseOrderReceiveLine).filter(
            PurchaseOrderReceiveLine.company_id == company_id,
            PurchaseOrderReceiveLine.purchase_order_receive_id == purchase_receive.id,
            PurchaseOrderReceiveLine.product_id == product.id
        ).first()
        if not receive_order_item:
            receive_order_item = PurchaseOrderReceiveLine(
                company_id=company_id,
                purchase_order_receive_id=purchase_receive.id,
                product_id=product.id,
                received_qty=row["receive_qty"],
                unit_cost=row.get("unit_cost", 0.0)
            )
            db.add(receive_order_item)
            db.commit()
            db.refresh(receive_order_item)
        
        # Update received_qty in PurchaseOrderLine
        po_line = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.purchase_order_id == linked_purchase_order.id,
            PurchaseOrderLine.product_id == product.id
        ).first()
        if po_line:
            po_line.received_qty = po_line.received_qty + row["receive_qty"]
            db.add(po_line)
        
        # Check if all PO lines are fully received, then update PO status
        all_po_lines = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.purchase_order_id == linked_purchase_order.id
        ).all()
        all_received = all(line.received_qty >= line.ordered_qty for line in all_po_lines)
        if all_received:
            linked_purchase_order.status = PurchaseOrderStatus.RECEIVED
            db.add(linked_purchase_order)
        
        # Update InventoryBatch and create InventoryMovement
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand + row["receive_qty"]
            currentstock.received_date = row["receive_order_date"]
            currentstock.updated_at = datetime.now()
        else:
         
            currentstock = InventoryBatch(
                company_id=company_id,
                product_id=product.id,
                location_id=location.id if location else None,
                # setting batch_ref to purchase order ref for now
                batch_ref=row["purchase_order_ref_no"],
                quantity_on_hand=row["receive_qty"],
                received_date=row["receive_order_date"],
                created_at=datetime.now(),
                updated_at=datetime.now()
            )
        db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=row["receive_qty"],
            movement_type=MovementType.RECEIPT,
            reference=str(purchase_receive.id),
        )
        db.add(inventory_move)
        db.commit()
          
        inserted_rows += 1

    return inserted_rows

# sales order
async def process_sales_orders_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    # Placeholder for actual database insertion logic for sales orders
    inserted_rows = 0
    for _, row in data.iterrows():
        # Insert logic here
        product= db.query(Product).filter(Product.product_name == row["product_name"], Product.company_id == company_id).first()
        if not product:
            continue  # Skip if product not found
        
        # Always query location (needed for InventoryBatch lookup)
        location = db.query(Location).filter(Location.location_name == row["location_name"], Location.company_id == company_id).first()
        
        # Here you would add logic to create a SalesOrder or similar
        salesorder = db.query(SalesOrder).filter(SalesOrder.ref_number == row["sales_order_ref_no"], SalesOrder.company_id == company_id).first()
        if not salesorder:
            salesorder = SalesOrder(
                company_id=company_id,
                ref_number=row["sales_order_ref_no"],
                location_id=location.id if location else None,
                sold_date=row["order_date"],
                channel="store",
            )
            db.add(salesorder)
            db.commit()
            db.refresh(salesorder)
        # Here you would add logic to create a SalesOrderLine or similar
        sales_order_item = db.query(SalesOrderLine).filter(
            SalesOrderLine.company_id == company_id,
            SalesOrderLine.sales_order_id == salesorder.id,
            SalesOrderLine.product_id == product.id
        ).first()
        if not sales_order_item:
            sales_order_item = SalesOrderLine(
                company_id=company_id,
                sales_order_id=salesorder.id,
                product_id=product.id,
                quantity=row["quantity"],
                unit_price=row.get("unit_price", 0.0)
            )
            db.add(sales_order_item)
            db.commit()
            db.refresh(sales_order_item)
        
        # Update InventoryBatch and create InventoryMovement for sales
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand - row["quantity"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=(row["quantity"] * -1),
            movement_type=MovementType.SALE,
            reference=str(salesorder.id),
        )
        db.add(inventory_move)
        db.commit()
            
        inserted_rows += 1

    return inserted_rows

# sales order return
async def process_sales_return_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    """Process sales return CSV upload."""
    inserted_rows = 0
    for _, row in data.iterrows():
        # Find the linked sales order
        linked_sales_order = db.query(SalesOrder).filter(
            SalesOrder.ref_number == row["sales_order_ref_no"],
            SalesOrder.company_id == company_id
        ).first()
        if not linked_sales_order:
            continue  # Skip if linked sales order not found
        
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue  # Skip if product not found
        
        # Check if sales return order already exists
        sales_return = db.query(SalesReturnOrder).filter(
            SalesReturnOrder.company_id == company_id,
            SalesReturnOrder.linked_ref_number == row["sales_order_ref_no"],
            SalesReturnOrder.ref_number == row["return_order_ref_no"],
        ).first()
        
        if not sales_return:
            location = db.query(Location).filter(
                Location.location_name == row["location_name"],
                Location.company_id == company_id
            ).first()
            sales_return = SalesReturnOrder(
                company_id=company_id,
                sales_order_id=linked_sales_order.id,
                ref_number=row["return_order_ref_no"],
                linked_ref_number=row["sales_order_ref_no"],
                location_id=location.id if location else None,
                returned_date=row["return_date"],
                channel="store",
            )
            db.add(sales_return)
            db.commit()
            db.refresh(sales_return)
        else:
            location = db.query(Location).filter(
                Location.location_name == row["location_name"],
                Location.company_id == company_id
            ).first()
        
        # Check if sales return line already exists
        return_order_item = db.query(SalesReturnOrderLine).filter(
            SalesReturnOrderLine.company_id == company_id,
            SalesReturnOrderLine.sales_return_order_id == sales_return.id,
            SalesReturnOrderLine.product_id == product.id
        ).first()
        
        if not return_order_item:
            return_order_item = SalesReturnOrderLine(
                company_id=company_id,
                sales_return_order_id=sales_return.id,
                product_id=product.id,
                quantity=row["quantity"],
            )
            db.add(return_order_item)
            db.commit()
            db.refresh(return_order_item)
        
        # Update InventoryBatch (increase stock on return)
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand + row["quantity"]
            currentstock.received_date = row["return_date"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        # Create InventoryMovement for return
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=row["quantity"],
            movement_type=MovementType.SALE_RETURN,
            reference=str(sales_return.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1

    return inserted_rows

# purchase return
async def process_purchase_return_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    """Process purchase return (vendor return) CSV upload."""
    inserted_rows = 0
    for _, row in data.iterrows():
        # Find the linked purchase receive order
        linked_receive_order = db.query(PurchaseReceiveOrder).filter(
            PurchaseReceiveOrder.ref_number == row["receive_order_ref_no"],
            PurchaseReceiveOrder.company_id == company_id
        ).first()
        if not linked_receive_order:
            continue  # Skip if linked purchase receive order not found
        
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue  # Skip if product not found
        
        # Check if purchase return order already exists
        purchase_return = db.query(PurchaseReturnOrder).filter(
            PurchaseReturnOrder.company_id == company_id,
            PurchaseReturnOrder.linked_ref_number == row["receive_order_ref_no"],
            PurchaseReturnOrder.ref_number == row["return_order_ref_no"],
        ).first()
        
        if not purchase_return:
            vendor = db.query(Vendor).filter(
                Vendor.vendor_name == row["vendor_name"],
                Vendor.company_id == company_id
            ).first()
            location = db.query(Location).filter(
                Location.location_name == row["location_name"],
                Location.company_id == company_id
            ).first()
            purchase_return = PurchaseReturnOrder(
                company_id=company_id,
                purchase_receive_order_id=linked_receive_order.id,
                ref_number=row["return_order_ref_no"],
                linked_ref_number=row["receive_order_ref_no"],
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                returned_date=row["return_date"],
                status=PurchaseOrderStatus.RETURNED,
            )
            db.add(purchase_return)
            db.commit()
            db.refresh(purchase_return)
        else:
            location = db.query(Location).filter(
                Location.location_name == row["location_name"],
                Location.company_id == company_id
            ).first()
        
        # Check if purchase return line already exists
        return_order_item = db.query(PurchaseOrderReturnLine).filter(
            PurchaseOrderReturnLine.company_id == company_id,
            PurchaseOrderReturnLine.purchase_order_return_id == purchase_return.id,
            PurchaseOrderReturnLine.product_id == product.id
        ).first()
        
        if not return_order_item:
            return_order_item = PurchaseOrderReturnLine(
                company_id=company_id,
                purchase_order_return_id=purchase_return.id,
                product_id=product.id,
                return_qty=row["return_qty"],
            )
            db.add(return_order_item)
            db.commit()
            db.refresh(return_order_item)
        
        # Update InventoryBatch (decrease stock on vendor return)
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand - row["return_qty"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        # Create InventoryMovement for purchase return
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=(row["return_qty"] * -1),
            movement_type=MovementType.PURCHASE_RETURN,
            reference=str(purchase_return.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1

    return inserted_rows

# stock transfer
async def process_stock_transfer_csv(
    db: Session,
    company_id: int,
    data: pd.DataFrame
) -> int:
    """Process stock transfer CSV upload."""
    inserted_rows = 0
    for _, row in data.iterrows():
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue  # Skip if product not found
        
        from_location = db.query(Location).filter(
            Location.location_name == row["from_location_name"],
            Location.company_id == company_id
        ).first()
        
        to_location = db.query(Location).filter(
            Location.location_name == row["to_location_name"],
            Location.company_id == company_id
        ).first()
        
        transfer_qty = row["quantity"]
        transfer_ref = row.get("transfer_ref_no", "")
        
        # Update source location inventory (decrease)
        currentstock_from = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == from_location.id if from_location else None,
        ).first()
        
        if currentstock_from:
            currentstock_from.quantity_on_hand = currentstock_from.quantity_on_hand - transfer_qty
            currentstock_from.updated_at = datetime.now()
            db.add(currentstock_from)
        
        # Create InventoryMovement for transfer out
        transfer_out_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=from_location.id if from_location else None,
            batch_id=currentstock_from.id if currentstock_from else None,
            quantity_delta=(transfer_qty * -1),
            movement_type=MovementType.TRANSFER_OUT,
            reference=transfer_ref,
        )
        db.add(transfer_out_move)
        db.commit()
        
        # Update destination location inventory (increase)
        currentstock_to = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == to_location.id if to_location else None,
        ).first()
        
        if currentstock_to:
            currentstock_to.quantity_on_hand = currentstock_to.quantity_on_hand + transfer_qty
            currentstock_to.received_date = row["transfer_date"]
            currentstock_to.updated_at = datetime.now()
            db.add(currentstock_to)
        db.commit()
        
        # Create InventoryMovement for transfer in
        transfer_in_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=to_location.id if to_location else None,
            batch_id=currentstock_to.id if currentstock_to else None,
            quantity_delta=transfer_qty,
            movement_type=MovementType.TRANSFER_IN,
            reference=transfer_ref,
        )
        db.add(transfer_in_move)
        db.commit()
        
        inserted_rows += 1

    return inserted_rows


def get_csv_upload_logs(
    db: Session,
    company_id: int,
    page: int = 1,
    perpage: int = 10,
    csv_type: str = None,
    processing_status: str = None
) -> dict:
    """
    Fetch CSV upload logs with pagination and optional filtering.
    Returns logs ordered by upload_date descending (latest first).
    
    Args:
        db: Database session
        company_id: Company ID to filter logs
        page: Page number (1-indexed)
        perpage: Number of items per page
        csv_type: Optional filter by CSV type (product, purchase_order, etc.)
        processing_status: Optional filter by status (succeeded, failed, partially_succeeded)
    
    Returns:
        dict with success, data (list of logs), page, perpage, total, message
    """
    try:
        # Calculate offset
        offset = (page - 1) * perpage
        
        # Query logs for the company, ordered by upload_date descending (latest first)
        query = db.query(CSVUploadLog).filter(
            CSVUploadLog.company_id == company_id
        )
        
        # Apply optional filters
        if csv_type:
            query = query.filter(CSVUploadLog.csv_type == csv_type)
        if processing_status:
            query = query.filter(CSVUploadLog.processing_status == processing_status)
        
        query = query.order_by(desc(CSVUploadLog.upload_date))
        
        # Get total count
        total = query.count()
        
        # Get paginated results
        logs = query.offset(offset).limit(perpage).all()
        
        return {
            "success": True,
            "data": logs,
            "page": page,
            "perpage": perpage,
            "total": total,
            "message": "CSV upload logs retrieved successfully"
        }
        
    except Exception as e:
        logger.error(f"Error fetching CSV upload logs: {str(e)}")
        return {
            "success": False,
            "data": [],
            "page": page,
            "perpage": perpage,
            "total": 0,
            "message": f"Error fetching CSV upload logs: {str(e)}"
        }