"""
CSV Processing Celery Tasks
============================

Celery tasks for processing CSV file uploads asynchronously.
Each task reads the CSV file, processes it using the existing processor functions,
creates upload logs, and triggers snapshot tasks.

Task Pattern:
1. Read CSV file from file_path
2. Call existing processor function from controller.py
3. Create CSV upload log entry
4. Trigger snapshot tasks via trigger_tasks_for_affected_tables()
5. Update task tracker status
"""

import logging
import pandas as pd
from datetime import datetime
from pathlib import Path
from typing import Dict, Any, Optional, List, Set

from src.utils.celery_worker import celery_app

logger = logging.getLogger(__name__)


# =============================================================================
# HELPER FUNCTIONS
# =============================================================================

def _update_tracker(task_id: str, status: str, error_message: Optional[str] = None):
    """Helper to update task tracker status"""
    try:
        from src.smart_inventory.utils.csv_task_orchestrator import update_csv_task_status
        update_csv_task_status(task_id, status, error_message)
    except Exception as e:
        logger.warning(f"Could not update CSV task tracker: {e}")


def _create_upload_log(db, company_id: int, csv_type: str, file_path: str, 
                       processing_status: str, rows_processed: int = None, 
                       rows_total: int = None, error_message: str = None):
    """Helper to create CSV upload log entry"""
    try:
        from src.smart_inventory.apps.data_import.models import CSVUploadLog
        log = CSVUploadLog(
            company_id=company_id,
            csv_type=csv_type,
            file_path=file_path,
            processing_status=processing_status,
            rows_processed=rows_processed,
            rows_total=rows_total,
            error_message=error_message
        )
        db.add(log)
        db.commit()
        logger.info(f"Created CSV upload log: {csv_type} - {processing_status}")
    except Exception as e:
        logger.warning(f"Could not create CSV upload log: {e}")
        db.rollback()


def _extract_unique_dates(df: pd.DataFrame, upload_type: str) -> List[str]:
    """
    Extract unique dates from CSV data based on upload type.
    """
    # Date column mapping for each CSV type
    CSV_DATE_COLUMNS = {
        "purchase_receive": ["receive_order_date"],
        "sales_order": ["order_date"],
        "sales_return": ["return_date"],
        "purchase_return": ["return_date"],
        "stock_transfer": ["transfer_date"]
    }
    
    date_columns = CSV_DATE_COLUMNS.get(upload_type, [])
    unique_dates: Set[str] = set()
    
    for col in date_columns:
        if col in df.columns:
            try:
                dates = pd.to_datetime(df[col], errors='coerce').dropna()
                for d in dates.dt.date.unique():
                    unique_dates.add(d.strftime("%Y-%m-%d"))
            except Exception as e:
                logger.warning(f"Could not parse dates from column {col}: {e}")
    
    return sorted(list(unique_dates))


def _trigger_snapshot_tasks(
    db,
    company_id: int,
    upload_type: str,
    unique_dates: List[str]
) -> Dict[str, Any]:
    """
    Trigger snapshot tasks based on affected tables.
    """
    from src.smart_inventory.utils.task_orchestrator import trigger_tasks_for_affected_tables
    
    # CSV type to affected tables mapping
    CSV_TYPE_TO_AFFECTED_TABLES = {
        "purchase_receive": ["purchase_order_receive", "purchase_order_receive_lines", "inventory_batches", "inventory_movements"],
        "sales_order": ["sales_orders", "sales_order_lines", "inventory_batches", "inventory_movements"],
        "sales_return": ["sales_return_orders", "sales_return_order_lines", "inventory_batches", "inventory_movements"],
        "purchase_return": ["purchase_order_return", "purchase_order_return_lines", "inventory_batches", "inventory_movements"],
        "stock_transfer": ["inventory_movements", "inventory_batches"],
    }
    
    affected_tables = CSV_TYPE_TO_AFFECTED_TABLES.get(upload_type, [])
    
    if not affected_tables:
        return {"batch_id": None, "tasks_triggered": 0, "tasks_on_hold": 0}
    
    # Ensure today's date is included
    today_str = datetime.now().strftime("%Y-%m-%d")
    if not unique_dates or unique_dates[-1] != today_str:
        unique_dates.append(today_str)
    
    try:
        result = trigger_tasks_for_affected_tables(
            db=db,
            affected_tables=affected_tables,
            company_id=company_id,
            unique_dates=unique_dates
        )
        logger.info(
            f"Triggered {result['tasks_triggered']} snapshot tasks, "
            f"{result['tasks_on_hold']} on hold for batch {result['batch_id']}"
        )
        return result
    except Exception as e:
        logger.error(f"Failed to trigger snapshot tasks: {e}")
        return {"batch_id": None, "tasks_triggered": 0, "tasks_on_hold": 0, "error": str(e)}


# =============================================================================
# BATCH PROCESSING UTILITIES
# =============================================================================

BATCH_SIZE = 100  # Commit every 100 rows


def _commit_batch(db, batch: list) -> int:
    """Commit a batch of objects to the database."""
    if batch:
        db.add_all(batch)
        db.commit()
        return len(batch)
    return 0


# =============================================================================
# CSV PROCESSOR FUNCTIONS (Sync versions for Celery)
# =============================================================================

def _process_product_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process product CSV synchronously with batch commits."""
    from src.smart_inventory.apps.products.models import (
        Category, Product, Location, ProductLocation, Vendor, ProductVendor
    )
    
    inserted_rows = 0
    batch = []
    
    for idx, row in df.iterrows():
        if not row.get("product_name"):
            continue
        
        # Handle category
        category = None
        if row.get("category_name"):
            category = db.query(Category).filter(
                Category.category_name == row["category_name"],
                Category.company_id == company_id
            ).first()
            if not category:
                category = Category(
                    category_name=row["category_name"],
                    company_id=company_id
                )
                db.add(category)
                db.commit()
                db.refresh(category)
        
        # Handle product
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        
        if not product:
            product = Product(
                company_id=company_id,
                fk_product_category_id=category.id if category else None,
                product_name=row["product_name"],
                short_name=row.get("short_name"),
                description=row.get("description"),
                brand_name=row.get("brand_name"),
                sku=row.get("sku"),
                eligible_for_return=row.get("eligible_for_return", False)
            )
            db.add(product)
            db.commit()
            db.refresh(product)
        else:
            product.fk_product_category_id = category.id if category else product.fk_product_category_id
            product.short_name = row.get("short_name", product.short_name)
            product.description = row.get("description", product.description)
            product.brand_name = row.get("brand_name", product.brand_name)
            product.sku = row.get("sku", product.sku)
            db.add(product)
            db.commit()
            db.refresh(product)
        
        # Handle locations
        if row.get("location_name"):
            locations = str(row["location_name"]).split('|')
            for loc_name in locations:
                location = db.query(Location).filter(
                    Location.location_name == loc_name,
                    Location.company_id == company_id
                ).first()
                if not location:
                    location = Location(location_name=loc_name, company_id=company_id)
                    db.add(location)
                    db.commit()
                    db.refresh(location)
                
                product_location = db.query(ProductLocation).filter(
                    ProductLocation.company_id == company_id,
                    ProductLocation.product_id == product.id,
                    ProductLocation.location_id == location.id
                ).first()
                if not product_location:
                    product_location = ProductLocation(
                        company_id=company_id,
                        product_id=product.id,
                        location_id=location.id
                    )
                    db.add(product_location)
                    db.commit()
        
        # Handle vendors
        if row.get("vendor_name"):
            vendors = str(row["vendor_name"]).split('|')
            for vendor_name in vendors:
                vendor = db.query(Vendor).filter(
                    Vendor.vendor_name == vendor_name,
                    Vendor.company_id == company_id
                ).first()
                if not vendor:
                    vendor = Vendor(vendor_name=vendor_name, company_id=company_id)
                    db.add(vendor)
                    db.commit()
                    db.refresh(vendor)
                
                product_vendor = db.query(ProductVendor).filter(
                    ProductVendor.company_id == company_id,
                    ProductVendor.product_id == product.id,
                    ProductVendor.vendor_id == vendor.id
                ).first()
                if not product_vendor:
                    product_vendor = ProductVendor(
                        company_id=company_id,
                        product_id=product.id,
                        vendor_id=vendor.id
                    )
                    db.add(product_vendor)
                    db.commit()
        
        inserted_rows += 1
        
        # Batch commit every BATCH_SIZE rows
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} product rows")
    
    return inserted_rows


def _process_purchase_order_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process purchase order CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        PurchaseOrder, PurchaseOrderLine, PurchaseOrderStatus
    )
    from src.smart_inventory.apps.products.models import Product, Location, Vendor
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        purchase_order = db.query(PurchaseOrder).filter(
            PurchaseOrder.ref_number == row["purchase_order_ref_no"],
            PurchaseOrder.company_id == company_id
        ).first()
        
        if not purchase_order:
            vendor = db.query(Vendor).filter(
                Vendor.vendor_name == row["vendor_name"],
                Vendor.company_id == company_id
            ).first()
            location = db.query(Location).filter(
                Location.location_name == row["location_name"],
                Location.company_id == company_id
            ).first()
            
            purchase_order = PurchaseOrder(
                company_id=company_id,
                ref_number=row["purchase_order_ref_no"],
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                order_date=row["purchase_order_date"],
                status=PurchaseOrderStatus.SENT,
            )
            db.add(purchase_order)
            db.commit()
            db.refresh(purchase_order)
        
        purchase_order_item = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.company_id == company_id,
            PurchaseOrderLine.purchase_order_id == purchase_order.id,
            PurchaseOrderLine.product_id == product.id
        ).first()
        
        if not purchase_order_item:
            purchase_order_item = PurchaseOrderLine(
                company_id=company_id,
                purchase_order_id=purchase_order.id,
                product_id=product.id,
                ordered_qty=row["order_qty"],
                unit_cost=row.get("unit_cost", 0.0)
            )
            db.add(purchase_order_item)
            db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} purchase order rows")
    
    return inserted_rows


def _process_purchase_receive_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process purchase receive CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        PurchaseOrder, PurchaseOrderLine, PurchaseOrderStatus,
        PurchaseReceiveOrder, PurchaseOrderReceiveLine,
        InventoryBatch, InventoryMovement, MovementType
    )
    from src.smart_inventory.apps.products.models import Product, Location, Vendor
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        linked_purchase_order = db.query(PurchaseOrder).filter(
            PurchaseOrder.ref_number == row["purchase_order_ref_no"],
            PurchaseOrder.company_id == company_id
        ).first()
        if not linked_purchase_order:
            continue
        
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        location = db.query(Location).filter(
            Location.location_name == row["location_name"],
            Location.company_id == company_id
        ).first()
        
        purchase_receive = db.query(PurchaseReceiveOrder).filter(
            PurchaseReceiveOrder.company_id == company_id,
            PurchaseReceiveOrder.linked_ref_number == row["purchase_order_ref_no"],
            PurchaseReceiveOrder.ref_number == row["receive_order_ref_no"],
        ).first()
        
        if not purchase_receive:
            vendor = db.query(Vendor).filter(
                Vendor.vendor_name == row["vendor_name"],
                Vendor.company_id == company_id
            ).first()
            
            purchase_receive = PurchaseReceiveOrder(
                company_id=company_id,
                linked_ref_number=row["purchase_order_ref_no"],
                ref_number=row["receive_order_ref_no"],
                received_date=row["receive_order_date"],
                purchase_order_id=linked_purchase_order.id,
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                status=PurchaseOrderStatus.RECEIVED,
            )
            db.add(purchase_receive)
            db.commit()
            db.refresh(purchase_receive)
        
        receive_order_item = db.query(PurchaseOrderReceiveLine).filter(
            PurchaseOrderReceiveLine.company_id == company_id,
            PurchaseOrderReceiveLine.purchase_order_receive_id == purchase_receive.id,
            PurchaseOrderReceiveLine.product_id == product.id
        ).first()
        
        if not receive_order_item:
            receive_order_item = PurchaseOrderReceiveLine(
                company_id=company_id,
                purchase_order_receive_id=purchase_receive.id,
                product_id=product.id,
                received_qty=row["receive_qty"],
                unit_cost=row.get("unit_cost", 0.0)
            )
            db.add(receive_order_item)
            db.commit()
        
        # Update PO line received qty
        po_line = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.purchase_order_id == linked_purchase_order.id,
            PurchaseOrderLine.product_id == product.id
        ).first()
        if po_line:
            po_line.received_qty = po_line.received_qty + row["receive_qty"]
            db.add(po_line)
        
        # Check if all PO lines fully received
        all_po_lines = db.query(PurchaseOrderLine).filter(
            PurchaseOrderLine.purchase_order_id == linked_purchase_order.id
        ).all()
        all_received = all(line.received_qty >= line.ordered_qty for line in all_po_lines)
        if all_received:
            linked_purchase_order.status = PurchaseOrderStatus.RECEIVED
            db.add(linked_purchase_order)
        
        # Update inventory
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand + row["receive_qty"]
            currentstock.received_date = row["receive_order_date"]
            currentstock.updated_at = datetime.now()
        else:
            currentstock = InventoryBatch(
                company_id=company_id,
                product_id=product.id,
                location_id=location.id if location else None,
                batch_ref=row["purchase_order_ref_no"],
                quantity_on_hand=row["receive_qty"],
                received_date=row["receive_order_date"],
                created_at=datetime.now(),
                updated_at=datetime.now()
            )
        db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=row["receive_qty"],
            movement_type=MovementType.RECEIPT,
            reference=str(purchase_receive.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} purchase receive rows")
    
    return inserted_rows


def _process_sales_order_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process sales order CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        SalesOrder, SalesOrderLine,
        InventoryBatch, InventoryMovement, MovementType
    )
    from src.smart_inventory.apps.products.models import Product, Location
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        location = db.query(Location).filter(
            Location.location_name == row["location_name"],
            Location.company_id == company_id
        ).first()
        
        salesorder = db.query(SalesOrder).filter(
            SalesOrder.ref_number == row["sales_order_ref_no"],
            SalesOrder.company_id == company_id
        ).first()
        
        if not salesorder:
            salesorder = SalesOrder(
                company_id=company_id,
                ref_number=row["sales_order_ref_no"],
                location_id=location.id if location else None,
                sold_date=row["order_date"],
                channel="store",
            )
            db.add(salesorder)
            db.commit()
            db.refresh(salesorder)
        
        sales_order_item = db.query(SalesOrderLine).filter(
            SalesOrderLine.company_id == company_id,
            SalesOrderLine.sales_order_id == salesorder.id,
            SalesOrderLine.product_id == product.id
        ).first()
        
        if not sales_order_item:
            sales_order_item = SalesOrderLine(
                company_id=company_id,
                sales_order_id=salesorder.id,
                product_id=product.id,
                quantity=row["quantity"],
                unit_price=row.get("unit_price", 0.0)
            )
            db.add(sales_order_item)
            db.commit()
        
        # Update inventory
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand - row["quantity"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=(row["quantity"] * -1),
            movement_type=MovementType.SALE,
            reference=str(salesorder.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} sales order rows")
    
    return inserted_rows


def _process_sales_return_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process sales return CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        SalesOrder, SalesReturnOrder, SalesReturnOrderLine,
        InventoryBatch, InventoryMovement, MovementType
    )
    from src.smart_inventory.apps.products.models import Product, Location
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        linked_sales_order = db.query(SalesOrder).filter(
            SalesOrder.ref_number == row["sales_order_ref_no"],
            SalesOrder.company_id == company_id
        ).first()
        if not linked_sales_order:
            continue
        
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        location = db.query(Location).filter(
            Location.location_name == row["location_name"],
            Location.company_id == company_id
        ).first()
        
        sales_return = db.query(SalesReturnOrder).filter(
            SalesReturnOrder.company_id == company_id,
            SalesReturnOrder.linked_ref_number == row["sales_order_ref_no"],
            SalesReturnOrder.ref_number == row["return_order_ref_no"],
        ).first()
        
        if not sales_return:
            sales_return = SalesReturnOrder(
                company_id=company_id,
                sales_order_id=linked_sales_order.id,
                ref_number=row["return_order_ref_no"],
                linked_ref_number=row["sales_order_ref_no"],
                location_id=location.id if location else None,
                returned_date=row["return_date"],
                channel="store",
            )
            db.add(sales_return)
            db.commit()
            db.refresh(sales_return)
        
        return_order_item = db.query(SalesReturnOrderLine).filter(
            SalesReturnOrderLine.company_id == company_id,
            SalesReturnOrderLine.sales_return_order_id == sales_return.id,
            SalesReturnOrderLine.product_id == product.id
        ).first()
        
        if not return_order_item:
            return_order_item = SalesReturnOrderLine(
                company_id=company_id,
                sales_return_order_id=sales_return.id,
                product_id=product.id,
                quantity=row["quantity"],
            )
            db.add(return_order_item)
            db.commit()
        
        # Update inventory (increase on return)
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand + row["quantity"]
            currentstock.received_date = row["return_date"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=row["quantity"],
            movement_type=MovementType.SALE_RETURN,
            reference=str(sales_return.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} sales return rows")
    
    return inserted_rows


def _process_purchase_return_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process purchase return CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        PurchaseReceiveOrder, PurchaseReturnOrder, PurchaseOrderReturnLine, PurchaseOrderStatus,
        InventoryBatch, InventoryMovement, MovementType
    )
    from src.smart_inventory.apps.products.models import Product, Location, Vendor
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        linked_receive_order = db.query(PurchaseReceiveOrder).filter(
            PurchaseReceiveOrder.ref_number == row["receive_order_ref_no"],
            PurchaseReceiveOrder.company_id == company_id
        ).first()
        if not linked_receive_order:
            continue
        
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        location = db.query(Location).filter(
            Location.location_name == row["location_name"],
            Location.company_id == company_id
        ).first()
        
        purchase_return = db.query(PurchaseReturnOrder).filter(
            PurchaseReturnOrder.company_id == company_id,
            PurchaseReturnOrder.linked_ref_number == row["receive_order_ref_no"],
            PurchaseReturnOrder.ref_number == row["return_order_ref_no"],
        ).first()
        
        if not purchase_return:
            vendor = db.query(Vendor).filter(
                Vendor.vendor_name == row["vendor_name"],
                Vendor.company_id == company_id
            ).first()
            
            purchase_return = PurchaseReturnOrder(
                company_id=company_id,
                purchase_receive_order_id=linked_receive_order.id,
                ref_number=row["return_order_ref_no"],
                linked_ref_number=row["receive_order_ref_no"],
                supplier_id=vendor.id if vendor else None,
                location_id=location.id if location else None,
                returned_date=row["return_date"],
                status=PurchaseOrderStatus.RETURNED,
            )
            db.add(purchase_return)
            db.commit()
            db.refresh(purchase_return)
        
        return_order_item = db.query(PurchaseOrderReturnLine).filter(
            PurchaseOrderReturnLine.company_id == company_id,
            PurchaseOrderReturnLine.purchase_order_return_id == purchase_return.id,
            PurchaseOrderReturnLine.product_id == product.id
        ).first()
        
        if not return_order_item:
            return_order_item = PurchaseOrderReturnLine(
                company_id=company_id,
                purchase_order_return_id=purchase_return.id,
                product_id=product.id,
                return_qty=row["return_qty"],
            )
            db.add(return_order_item)
            db.commit()
        
        # Update inventory (decrease on return to vendor)
        currentstock = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == location.id if location else None,
        ).first()
        
        if currentstock:
            currentstock.quantity_on_hand = currentstock.quantity_on_hand - row["return_qty"]
            currentstock.updated_at = datetime.now()
            db.add(currentstock)
        
        inventory_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=location.id if location else None,
            batch_id=currentstock.id if currentstock else None,
            quantity_delta=(row["return_qty"] * -1),
            movement_type=MovementType.PURCHASE_RETURN,
            reference=str(purchase_return.id),
        )
        db.add(inventory_move)
        db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} purchase return rows")
    
    return inserted_rows


def _process_stock_transfer_csv_sync(db, company_id: int, df: pd.DataFrame) -> int:
    """Process stock transfer CSV synchronously with batch commits."""
    from src.smart_inventory.apps.inventory.models import (
        InventoryBatch, InventoryMovement, MovementType
    )
    from src.smart_inventory.apps.products.models import Product, Location
    
    inserted_rows = 0
    
    for idx, row in df.iterrows():
        product = db.query(Product).filter(
            Product.product_name == row["product_name"],
            Product.company_id == company_id
        ).first()
        if not product:
            continue
        
        from_location = db.query(Location).filter(
            Location.location_name == row["from_location_name"],
            Location.company_id == company_id
        ).first()
        
        to_location = db.query(Location).filter(
            Location.location_name == row["to_location_name"],
            Location.company_id == company_id
        ).first()
        
        transfer_qty = row["quantity"]
        transfer_ref = row.get("transfer_ref_no", "")
        
        # Decrease source location
        currentstock_from = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == from_location.id if from_location else None,
        ).first()
        
        if currentstock_from:
            currentstock_from.quantity_on_hand = currentstock_from.quantity_on_hand - transfer_qty
            currentstock_from.updated_at = datetime.now()
            db.add(currentstock_from)
        
        transfer_out_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=from_location.id if from_location else None,
            batch_id=currentstock_from.id if currentstock_from else None,
            quantity_delta=(transfer_qty * -1),
            movement_type=MovementType.TRANSFER_OUT,
            reference=transfer_ref,
        )
        db.add(transfer_out_move)
        db.commit()
        
        # Increase destination location
        currentstock_to = db.query(InventoryBatch).filter(
            InventoryBatch.company_id == company_id,
            InventoryBatch.product_id == product.id,
            InventoryBatch.location_id == to_location.id if to_location else None,
        ).first()
        
        if currentstock_to:
            currentstock_to.quantity_on_hand = currentstock_to.quantity_on_hand + transfer_qty
            currentstock_to.received_date = row["transfer_date"]
            currentstock_to.updated_at = datetime.now()
            db.add(currentstock_to)
        db.commit()
        
        transfer_in_move = InventoryMovement(
            company_id=company_id,
            product_id=product.id,
            location_id=to_location.id if to_location else None,
            batch_id=currentstock_to.id if currentstock_to else None,
            quantity_delta=transfer_qty,
            movement_type=MovementType.TRANSFER_IN,
            reference=transfer_ref,
        )
        db.add(transfer_in_move)
        db.commit()
        
        inserted_rows += 1
        
        if inserted_rows % BATCH_SIZE == 0:
            logger.debug(f"Processed {inserted_rows} stock transfer rows")
    
    return inserted_rows


# =============================================================================
# CELERY TASKS
# =============================================================================

@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_product_csv')
def process_product_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process product CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        # Read CSV
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing product CSV: {rows_total} rows")
        
        # Process
        rows_processed = _process_product_csv_sync(db, company_id, df)
        
        # Create log entry
        _create_upload_log(db, company_id, "product", file_path, "succeeded", rows_processed, rows_total)
        
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "product"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing product CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "product", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_purchase_order_csv')
def process_purchase_order_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process purchase order CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing purchase_order CSV: {rows_total} rows")
        
        rows_processed = _process_purchase_order_csv_sync(db, company_id, df)
        
        _create_upload_log(db, company_id, "purchase_order", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "purchase_order"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing purchase_order CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "purchase_order", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_purchase_receive_csv')
def process_purchase_receive_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process purchase receive CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing purchase_receive CSV: {rows_total} rows")
        
        rows_processed = _process_purchase_receive_csv_sync(db, company_id, df)
        
        # Extract unique dates and trigger snapshot tasks
        unique_dates = _extract_unique_dates(df, "purchase_receive")
        _trigger_snapshot_tasks(db, company_id, "purchase_receive", unique_dates)
        
        _create_upload_log(db, company_id, "purchase_receive", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "purchase_receive"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing purchase_receive CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "purchase_receive", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_sales_order_csv')
def process_sales_order_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process sales order CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing sales_order CSV: {rows_total} rows")
        
        rows_processed = _process_sales_order_csv_sync(db, company_id, df)
        
        # Extract unique dates and trigger snapshot tasks
        unique_dates = _extract_unique_dates(df, "sales_order")
        _trigger_snapshot_tasks(db, company_id, "sales_order", unique_dates)
        
        _create_upload_log(db, company_id, "sales_order", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "sales_order"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing sales_order CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "sales_order", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_sales_return_csv')
def process_sales_return_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process sales return CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing sales_return CSV: {rows_total} rows")
        
        rows_processed = _process_sales_return_csv_sync(db, company_id, df)
        
        # Extract unique dates and trigger snapshot tasks
        unique_dates = _extract_unique_dates(df, "sales_return")
        _trigger_snapshot_tasks(db, company_id, "sales_return", unique_dates)
        
        _create_upload_log(db, company_id, "sales_return", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "sales_return"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing sales_return CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "sales_return", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_purchase_return_csv')
def process_purchase_return_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process purchase return CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing purchase_return CSV: {rows_total} rows")
        
        rows_processed = _process_purchase_return_csv_sync(db, company_id, df)
        
        # Extract unique dates and trigger snapshot tasks
        unique_dates = _extract_unique_dates(df, "purchase_return")
        _trigger_snapshot_tasks(db, company_id, "purchase_return", unique_dates)
        
        _create_upload_log(db, company_id, "purchase_return", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "purchase_return"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing purchase_return CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "purchase_return", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()


@celery_app.task(bind=True, name='src.smart_inventory.tasks.csv_processing_task.process_stock_transfer_csv')
def process_stock_transfer_csv(self, company_id: int, file_path: str) -> Dict[str, Any]:
    """Process stock transfer CSV file."""
    try:
        from src.utils.db import get_db_session
    except ImportError as e:
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    db = get_db_session()
    rows_total = None
    try:
        _update_tracker(self.request.id, "started")
        
        df = pd.read_csv(file_path)
        rows_total = len(df)
        logger.info(f"Processing stock_transfer CSV: {rows_total} rows")
        
        rows_processed = _process_stock_transfer_csv_sync(db, company_id, df)
        
        # Extract unique dates and trigger snapshot tasks
        unique_dates = _extract_unique_dates(df, "stock_transfer")
        _trigger_snapshot_tasks(db, company_id, "stock_transfer", unique_dates)
        
        _create_upload_log(db, company_id, "stock_transfer", file_path, "succeeded", rows_processed, rows_total)
        _update_tracker(self.request.id, "success")
        
        return {
            "success": True,
            "rows_processed": rows_processed,
            "upload_type": "stock_transfer"
        }
    
    except Exception as e:
        db.rollback()
        logger.error(f"Error processing stock_transfer CSV: {e}", exc_info=True)
        _create_upload_log(db, company_id, "stock_transfer", file_path, "failed", 0, rows_total, str(e))
        _update_tracker(self.request.id, "failure", str(e))
        return {"success": False, "error": str(e)}
    
    finally:
        db.close()
