from celery import Celery, signals
from sqlalchemy.orm import Session
import uuid
from src.utils.db import SessionLocal
from src.utils.tasks import create_task_entry, update_task_status, extract_review_details, generate_recommendations_task
from src.apps.feedback.services import store_reviews
from src.core.sentiment_analysis import analyze_sentiment, analyze_batch_sentiment
import time 
import pusher
from src.utils.settings import settings
import os
from dotenv import load_dotenv

# Load environment variables
load_dotenv()

# Fetch environment variables
app_id = settings.app_id
key = settings.key
secret = settings.secret
cluster = settings.cluster
batch_size = settings.batch_size

# 1. Configure your Pusher credentials 
pusher_client = pusher.Pusher(
    app_id= app_id,  
    key=key,
    secret=secret,
    cluster=cluster,
    ssl=True
)

# Celery configuration with Redis settings from environment
redis_url = f"redis://{settings.REDIS_HOST}:{settings.REDIS_PORT}/{settings.REDIS_DB}"

celery_app = Celery(
    "celery_worker",
    broker="redis://localhost:6379/0",
    backend="redis://localhost:6379/0"
)

# Configure Celery settings
celery_app.conf.update(
    task_serializer='json',
    accept_content=['json'],
    result_serializer='json',
    timezone='UTC',
    enable_utc=True,
    task_track_started=True,
    task_time_limit=30 * 60,  # 30 minutes
    task_soft_time_limit=25 * 60,  # 25 minutes
    worker_prefetch_multiplier=1,
    worker_max_tasks_per_child=1000,
)

# Configure Celery to discover tasks from all modules
celery_app.autodiscover_tasks([
    "src.marketing.tasks",
    "src.menu_design.tasks",
    "src.smart_inventory.tasks",
    "src.utils"
])

@celery_app.task(bind=True)
def analyze_review_batch(self, reviews, metadata):
    db = SessionLocal()
    try:
        print(f"[{self.request.id}] Processing batch of {len(reviews)} reviews...")

        results = analyze_batch_sentiment(
            [review["comment"] for review in reviews],
            metadata["store_id"]
        )

        if len(results) == len(reviews):
            
            for i, result in enumerate(results):
                if isinstance(result, tuple):
                    result = result[0]

                reviews[i]["sentiment_result"] = result

                print(f"[{self.request.id}] Review {i+1} Sentiment: {result['sentiment']}")
                print(f"[{self.request.id}] Topics: {list(result['relevant_topics_sentiment'].keys())}")

            batch_payload = {
                "store_id": metadata["store_id"],
                "branch_id": metadata["branch_id"],
                "snapshot_id": metadata["snapshot_id"],
                "datasource_id": metadata["datasource_id"],
                "datasource_source": metadata["datasource_source"],
                "reviews": reviews,
            }

            store_reviews(db, batch_payload)

            # 🔔 Pusher notification
            try:
                if metadata.get("total_records"):
                    batch_index = max(metadata.get("batch_index", 1), 1)
                    processed = min(((batch_index - 1) * batch_size + len(reviews)), metadata["total_records"])
                    pct = round((processed / metadata["total_records"]) * 100)
                    pct = min(pct, 100)
                else:
                    processed = pct = 0

                channel_name = f"import-update"
                pusher_client.trigger(
                    channel_name,
                    event_name="progress-update",
                    data={
                        "datasource_id": metadata["datasource_id"],
                        "processed": processed,
                        "total": metadata["total_records"],
                        "percent": pct,
                        "status": "processing" if pct < 100 else "completed",
                        "status_msg": f"Analysing {processed} of {metadata['total_records']} reviews" if  pct < 100 else "Completed",
                    }
                )
                print(f"[{self.request.id}] Pusher notification sent: {channel_name} - {pct}%")
            except Exception as e:
                print(f"[{self.request.id}] Pusher error: {str(e)}")

            print(f"[{self.request.id}] Batch stored successfully.")
            
        else:    
            print(f"[{self.request.id}] ❌ Mismatch: {len(results)} results vs {len(reviews)} reviews")
            
    except Exception as e:
        print(f"[{self.request.id}] Error during batch processing: {str(e)}")
        db.rollback()

        # 🔔 Send Pusher failure update
        try:
            pusher_client.trigger(
                "import-update",
                event_name="progress-update",
                data={
                    "datasource_id": metadata.get("datasource_id"),
                    "processed": 0,
                    "total": metadata.get("total_records", 0),
                    "percent": 0,
                    "status": "failed",
                    "status_msg": "Batch failed"
                }
            )
        except Exception as pusher_err:
            print(f"[{self.request.id}] Pusher error in failure notification: {str(pusher_err)}")

        

    finally:
        db.close()
        print(f"[{self.request.id}] DB session closed.")




@celery_app.task(bind=True)
def process_csv_review_data(self, payload):
    db = SessionLocal()
    try:
        print(f"[{self.request.id}] Creating parent task entry in DB...")
        create_task_entry(
            db, self.request.id,
            payload["store_id"],
            payload["branch_id"],
            payload["snapshot_id"],
            payload["datasource_id"]
        )
        db.commit()
        print(f"[{self.request.id}] Parent task entry created.")

        metadata = {
            "store_id": payload["store_id"],
            "branch_id": payload["branch_id"],
            "snapshot_id": payload["snapshot_id"],
            "datasource_id": payload["datasource_id"],
            "datasource_source": payload["source_type"],
            "total_records": len(payload["reviews"])
        }

        reviews = payload["reviews"]
        total_batches = (len(reviews) + batch_size - 1) // batch_size
        print(f"[{self.request.id}] Sequentially processing {len(reviews)} reviews in {total_batches} batches...")

        for batch_index, i in enumerate(range(0, len(reviews), batch_size)):
            batch = reviews[i:i+batch_size]
            print(f"[{self.request.id}] Processing batch {batch_index + 1} of {total_batches}...")

            # Pass batch_index to metadata so it's available for Pusher calc
            batch_metadata = {**metadata, "batch_index": batch_index+1}

            # Call batch analyzer inline (synchronously)
            analyze_review_batch(batch, batch_metadata)

            # Determine max delay in batch
            max_comment_len = max(len(r["comment"]) for r in batch)
            delay = 5 if max_comment_len <= 200 else min(20, round(max_comment_len / 20))

            print(f"[{self.request.id}] Sleeping {delay}s before next batch to avoid rate limits...")
            time.sleep(delay)

        update_task_status(self.request.id, "completed")
        db.commit()

        print(f"[{self.request.id}] All batches processed. Generating recommendations...")
        generate_recommendations_task(payload["branch_id"])

    except Exception as e:
        print(f"[{self.request.id}] Error in parent task: {str(e)}")
        update_task_status(self.request.id, "failed")
        db.rollback()

        # 🔔 Pusher failure notification
        try:
            pusher_client.trigger(
                "import-update",
                event_name="progress-update",
                data={
                    "datasource_id": payload.get("datasource_id"),
                    "processed": 0,
                    "total": len(payload.get("reviews", [])),
                    "percent": 0,
                    "status": "failed",
                    "status_msg": "Processing failed"
                }
            )
        except Exception as pusher_err:
            print(f"[{self.request.id}] Pusher error in parent failure notification: {str(pusher_err)}")

        

    finally:
        db.close()
        print(f"[{self.request.id}] Parent task DB session closed.")



@celery_app.task(bind=True)
def analyze_bright_review_batch(self, reviews, metadata):
    db = SessionLocal()
    try:
        print(f"[{self.request.id}] Processing Bright Data batch of {len(reviews)} reviews...")

        results = analyze_batch_sentiment(
            [review["comment"] for review in reviews],
            metadata["store_id"]
        )

        if len(results) != len(reviews):
            print(f"[{self.request.id}] ❌ Mismatch: {len(results)} results vs {len(reviews)} reviews")
            for i, review in enumerate(reviews):
                print(f"[{self.request.id}] Review {i+1}: {review.get('comment')[:50]}")
            raise ValueError("Mismatch in number of reviews and sentiment results.")

        for i, result in enumerate(results):
            if isinstance(result, tuple):
                result = result[0]
            reviews[i]["sentiment_result"] = result
            print(f"[{self.request.id}] Review {i + 1} Sentiment: {result['sentiment']}")
            print(f"[{self.request.id}] Topics: {list(result['relevant_topics_sentiment'].keys())}")

        batch_payload = {
            "store_id": metadata["store_id"],
            "branch_id": metadata["branch_id"],
            "snapshot_id": metadata["snapshot_id"],
            "datasource_id": metadata["datasource_id"],
            "datasource_source": metadata["datasource_source"],
            "reviews": reviews,
        }

        store_reviews(db, batch_payload)

        # 🔔 Updated Pusher progress update
        try:
            if metadata.get("total_records"):
                batch_index = max(metadata.get("batch_index", 1), 1)
                processed = min(((batch_index - 1) * batch_size + len(reviews)), metadata["total_records"])
                pct = round((processed / metadata["total_records"]) * 100)
                pct = min(pct, 100)
            else:
                processed = pct = 0

            channel_name = f"import-update"
            pusher_client.trigger(
                channel_name,
                event_name="progress-update",
                data={
                    "datasource_id": metadata["datasource_id"],
                    "processed": processed,
                    "total": metadata["total_records"],
                    "percent": pct,
                    "status": "processing" if pct < 100 else "completed",
                    "status_msg": f"Analyzing {processed} of {metadata['total_records']} reviews" if pct < 100 else "Completed",
                }
            )
            print(f"[{self.request.id}] Pusher notification sent: {channel_name} - {pct}%")
        except Exception as e:
            print(f"[{self.request.id}] Pusher error: {str(e)}")

        print(f"[{self.request.id}] Batch stored successfully.")

    except Exception as e:
        print(f"[{self.request.id}] Error during Bright Data batch processing: {str(e)}")
        db.rollback()

        # 🔔 Send Pusher failure update
        try:
            pusher_client.trigger(
                "import-update",
                event_name="progress-update",
                data={
                    "datasource_id": metadata.get("datasource_id"),
                    "processed": 0,
                    "total": metadata.get("total_records", 0),
                    "percent": 0,
                    "status": "failed",
                    "status_msg": "Batch failed"
                }
            )
        except Exception as pusher_err:
            print(f"[{self.request.id}] Pusher error in failure notification: {str(pusher_err)}")

        

    finally:
        db.close()
        print(f"[{self.request.id}] DB session closed.")
        
   


@celery_app.task(bind=True)
def process_bright_review_data(self, bright_data):
    db = SessionLocal()
    payload = {}
    try:
        print(f"[{self.request.id}] Starting Bright Data review processing...")

        # Extract payload and review list
        payload = extract_review_details(bright_data)
        store_id = payload["store_id"]
        branch_id = payload["branch_id"]
        snapshot_id = payload["snapshot_id"]
        datasource_id = payload["datasource_id"]
        datasource_source = payload["datasource_source"]

        print(f"[{self.request.id}] Creating parent task entry in DB...")
        create_task_entry(db, self.request.id, store_id, branch_id, snapshot_id, datasource_id)
        db.commit()
        print(f"[{self.request.id}] Task entry created.")

        reviews = payload["reviews"]
        total_records = len(reviews)
        total_batches = (total_records + batch_size - 1) // batch_size

        print(f"[{self.request.id}] Processing {total_records} Bright Data reviews in {total_batches} batches...")

        base_metadata = {
            "store_id": store_id,
            "branch_id": branch_id,
            "snapshot_id": snapshot_id,
            "datasource_id": datasource_id,
            "datasource_source": datasource_source,
            "total_records": total_records,
        }

        for batch_index, i in enumerate(range(0, total_records, batch_size)):
            batch = reviews[i:i + batch_size]
            print(f"[{self.request.id}] Processing batch {batch_index + 1} of {total_batches}...")

            batch_metadata = {**base_metadata, "batch_index": batch_index+1}

            analyze_bright_review_batch(batch, batch_metadata)

            # Delay logic
            max_comment_len = max(len(r["comment"]) for r in batch)
            delay = 5 if max_comment_len <= 200 else min(20, round(max_comment_len / 20))
            print(f"[{self.request.id}] Sleeping {delay}s before next batch...")
            time.sleep(delay)

        update_task_status(self.request.id, "completed")
        db.commit()

        print(f"[{self.request.id}] All Bright Data batches processed. Generating recommendations...")
        generate_recommendations_task(branch_id)

    except Exception as e:
        print(f"[{self.request.id}] Error in Bright Data parent task: {str(e)}")
        update_task_status(self.request.id, "failed")
        db.rollback()

        # 🔔 Pusher failure notification
        try:
            pusher_client.trigger(
                "import-update",
                event_name="progress-update",
                data={
                    "datasource_id": payload.get("datasource_id"),
                    "processed": 0,
                    "total": len(payload.get("reviews", [])),
                    "percent": 0,
                    "status": "failed",
                    "status_msg": "Processing failed"
                }
            )
        except Exception as pusher_err:
            print(f"[{self.request.id}] Pusher error in parent failure notification: {str(pusher_err)}")

        

    finally:
        db.close()
        print(f"[{self.request.id}] Parent task DB session closed.")
 
