# app/analytics/service.py
from __future__ import annotations
from datetime import datetime, date, timedelta
from typing import Dict, Any, List, Optional
# from sqlalchemy.ext.asyncio import AsyncSession

from sqlalchemy.orm import Session
from sqlalchemy import func, desc, case, and_, or_
import pandas as pd
import logging
import json
# app/analytics/service.py

from .repository import AnalyticsRepository
from src.marketing.apps.Analytics.twitter_analytics import provider_for
from src.marketing.core.Analytics.providers.base import OAuthToken, DateRange
from src.marketing.apps.Analytics.model import PostMetricsDaily, AccountMetricsDaily
from src.marketing.apps.post.model import CalendarPostType
from src.marketing.apps.Account.model import ConnectedAccount, MasterAccount
from src.marketing.apps.Analytics import schema

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

class AnalyticsIngestionService:
    def __init__(self, session: Session):
        self.session = session
        self.repo = AnalyticsRepository(session)

    @staticmethod
    def _to_date(d: str | date) -> date:
        return d if isinstance(d, date) else datetime.strptime(d, "%Y-%m-%d").date()

    async def ingest_account_for_day(
        self,
        connected_account_id: int,
        network: str,
        metric_date: str | date,
        token_json: Dict[str, Any],
        account_ref: Dict[str, Any]
    ) -> Dict[str, Any]:
        """
        Calls provider to fetch account metrics & posts for the given day and upserts via ORM.
        """
        d = self._to_date(metric_date)
        conn = await self.repo.load_connected_account(connected_account_id)
        if not conn or (conn.is_deleted is True):
            return {"status": "skipped", "reason": "account missing or deleted"}

        prov = provider_for(network)
        tok = OAuthToken(**token_json)
        try:
            tok = prov.refresh(tok)  # no-op for some providers
        except Exception:
            pass

        dr = DateRange(start=d.strftime("%Y-%m-%d"), end=d.strftime("%Y-%m-%d"))

        # ---- Account metrics
        acct = prov.fetch_account_metrics(tok, account_ref, dr) or {}
        await self.repo.upsert_account_metrics(
            connected_account_id,
            d,
            {
                "followers": acct.get("followers"),
                "new_followers": acct.get("new_followers"),
                "impressions": acct.get("impressions"),
                "profile_visits": acct.get("profile_visits"),
                "messages_received": acct.get("messages_received"),
                "median_response_minutes": acct.get("median_response_minutes"),
            },
        )

        # ---- Posts metrics (paged)
        total_posts = 0
        pagination = None
        while True:
            posts, pagination = prov.fetch_post_metrics(tok, account_ref, dr, pagination=pagination)
            if not posts:
                break

            ext_ids = [p.get("external_post_id") for p in posts if p.get("external_post_id")]
            mapping = await self.repo.map_external_posts_to_post_types(connected_account_id, ext_ids)

            batch: List[Dict] = []
            for p in posts:
                ext_id = p.get("external_post_id")
                pt_id = mapping.get(ext_id)
                if not pt_id:
                    # If unmapped, you can auto-create a PostType here (optional).
                    continue
                batch.append({
                    "post_type_id": pt_id,
                    "metric_date": d,
                    "metrics": p.get("metrics", {}),
                    "external_post_id": ext_id,
                    "permalink": p.get("permalink")
                })

            if batch:
                total_posts += await self.repo.upsert_post_metrics_batch(connected_account_id, batch)

            if not pagination:
                break

        await self.session.commit()
        return {"status": "ok", "posts_written": total_posts}

