"""Prior-art search + benzerlik analizi tek endpoint'te.

POST /api/v1/prior-art/search:
- Input: query (doğal dil) + invention_summary + kaynak seçimi
- EPO ve/veya Google Patents paralel arama
- Birleştir, dedup (patent_no bazlı), limit'le
- Claude Haiku 4.5 ile her hit için similarity_score + rationale
- similarity_score'a göre DESC rank'lenmiş sonuç
"""

from __future__ import annotations

import asyncio

from fastapi import APIRouter
from pydantic import BaseModel, Field

from app.config import get_settings
from app.services.llm import LLMNotConfiguredError
from app.services.prior_art import (
    AnalysisError,
    AnalysisItem,
    EspacenetAuthError,
    EspacenetClient,
    EspacenetError,
    EspacenetNotConfigured,
    GooglePatentsClient,
    GooglePatentsError,
    PriorArtHit,
    analyze_prior_art,
)

router = APIRouter(prefix="/prior-art", tags=["prior-art"])


class PriorArtSearchRequest(BaseModel):
    query: str = Field(..., min_length=3, max_length=500)
    invention_summary: str = Field(
        ...,
        min_length=30,
        max_length=5000,
        description="Buluşun kısa özeti — AI skorlama için.",
    )
    include_epo: bool = Field(default=True)
    include_google: bool = Field(default=True)
    limit_per_source: int = Field(default=10, ge=1, le=25)


class PriorArtSearchHit(BaseModel):
    """Hit + analiz birleşimi (frontend'e tek obje)."""

    # PriorArtHit alanları
    source: str
    patent_no: str
    title: str | None = None
    abstract: str | None = None
    applicant: str | None = None
    inventors: list[str] | None = None
    filing_date: str | None = None
    publication_date: str | None = None
    cpc_classes: list[str] | None = None
    url: str | None = None
    # Analiz alanları — AI çalıştıysa dolar
    similarity_score: int | None = None
    risk_level: str | None = None
    rationale: str | None = None


class PriorArtSearchResponse(BaseModel):
    query: str
    source_counts: dict[str, int]
    total_hits: int
    analyzed: bool
    hits: list[PriorArtSearchHit]


def _merge_and_dedup(
    *hit_lists: list[PriorArtHit],
) -> list[PriorArtHit]:
    """Birden fazla kaynağın hit'lerini patent_no bazlı dedup eder.

    Aynı patent hem EPO hem Google'dan gelirse EPO kaydı tutulur (daha güvenilir).
    """
    seen: dict[str, PriorArtHit] = {}
    for hits in hit_lists:
        for hit in hits:
            key = hit.patent_no.upper()
            if key not in seen:
                seen[key] = hit
            elif seen[key].source == "google" and hit.source == "epo":
                # EPO kaydını tercih et
                seen[key] = hit
    return list(seen.values())


@router.post("/search", response_model=PriorArtSearchResponse)
async def prior_art_search(request: PriorArtSearchRequest) -> PriorArtSearchResponse:
    """Prior-art arama + AI similarity skorlama."""
    settings = get_settings()

    # ---------- 1. Paralel arama ----------
    epo_hits: list[PriorArtHit] = []
    google_hits: list[PriorArtHit] = []
    source_counts: dict[str, int] = {}

    tasks: list[asyncio.Task] = []

    # EPO
    epo_configured = bool(settings.epo_ops_key and settings.epo_ops_secret)
    if request.include_epo and epo_configured:
        epo_client = EspacenetClient(settings.epo_ops_key, settings.epo_ops_secret)

        async def _epo() -> list[PriorArtHit]:
            try:
                return await epo_client.search(request.query, limit=request.limit_per_source)
            finally:
                await epo_client.close()

        tasks.append(asyncio.create_task(_epo()))
    elif request.include_epo and not epo_configured:
        source_counts["epo_error"] = 0  # EPO istendi ama konfigüre değil

    # Google
    if request.include_google:
        google_client = GooglePatentsClient()

        async def _google() -> list[PriorArtHit]:
            try:
                return await google_client.search(request.query, limit=request.limit_per_source)
            finally:
                await google_client.close()

        tasks.append(asyncio.create_task(_google()))

    # Çalıştır
    if tasks:
        results = await asyncio.gather(*tasks, return_exceptions=True)
        idx = 0
        if request.include_epo and epo_configured:
            res = results[idx]
            idx += 1
            if isinstance(res, list):
                epo_hits = res
            elif isinstance(res, EspacenetNotConfigured | EspacenetAuthError):
                # Credentials sorunu — kullanıcıya bildir ama diğer kaynaklarla devam
                source_counts["epo_error"] = 0
            elif isinstance(res, EspacenetError):
                source_counts["epo_error"] = 0
        if request.include_google:
            res = results[idx]
            if isinstance(res, list):
                google_hits = res
            elif isinstance(res, GooglePatentsError):
                source_counts["google_error"] = 0

    source_counts["epo"] = len(epo_hits)
    source_counts["google"] = len(google_hits)

    # ---------- 2. Birleştir + dedup ----------
    merged = _merge_and_dedup(epo_hits, google_hits)

    # ---------- 3. AI analiz (opsiyonel, ANTHROPIC_API_KEY varsa) ----------
    analyses_by_patent: dict[str, AnalysisItem] = {}
    analyzed = False

    if merged and settings.anthropic_api_key:
        try:
            analyses = await analyze_prior_art(request.invention_summary, merged)
            analyses_by_patent = {a.patent_no: a for a in analyses}
            analyzed = True
        except (LLMNotConfiguredError, AnalysisError):
            # Analiz başarısız — arama sonuçlarını analizsiz dön
            analyzed = False

    # ---------- 4. Combined hits + rank ----------
    combined: list[PriorArtSearchHit] = []
    for hit in merged:
        analysis = analyses_by_patent.get(hit.patent_no)
        combined.append(
            PriorArtSearchHit(
                source=hit.source,
                patent_no=hit.patent_no,
                title=hit.title,
                abstract=hit.abstract,
                applicant=hit.applicant,
                inventors=hit.inventors,
                filing_date=hit.filing_date,
                publication_date=hit.publication_date,
                cpc_classes=hit.cpc_classes,
                url=hit.url,
                similarity_score=analysis.similarity_score if analysis else None,
                risk_level=analysis.risk_level if analysis else None,
                rationale=analysis.rationale if analysis else None,
            )
        )

    # similarity_score DESC (None'lar sona)
    combined.sort(key=lambda h: (h.similarity_score is None, -(h.similarity_score or 0)))

    return PriorArtSearchResponse(
        query=request.query,
        source_counts=source_counts,
        total_hits=len(combined),
        analyzed=analyzed,
        hits=combined,
    )
