"""
PottSEO – Automated SEO Audit Pipeline
Backend für Multi-Client Dashboard

Verbindet: Google Search Console API + Google Analytics 4 API + DataForSEO API
Erzeugt: clients_data.json → wird vom Dashboard geladen

Setup:
  1. pip install google-auth google-auth-oauthlib google-api-python-client google-analytics-data requests
  2. config.json ausfüllen (siehe unten)
  3. python seo_pipeline.py

Scheduling (Cronjob):
  0 6 * * 1  cd /pfad/zum/projekt && python seo_pipeline.py >> logs/pipeline.log 2>&1
"""

import json
import os
import sys
import time
import base64
import logging
from datetime import datetime, timedelta
from pathlib import Path

# ─── LOGGING ───
logging.basicConfig(
    level=logging.INFO,
    format="%(asctime)s [%(levelname)s] %(message)s",
    handlers=[
        logging.StreamHandler(),
        logging.FileHandler("pipeline.log", encoding="utf-8")
    ]
)
log = logging.getLogger("seo_pipeline")

# ─── CONFIG ───
CONFIG_FILE = "config.json"
OUTPUT_FILE = "clients_data.json"

DEFAULT_CONFIG = {
    "_comment": "PottSEO Pipeline Konfiguration – Fülle die Felder aus und starte seo_pipeline.py",
    
    "google": {
        "service_account_key_file": "service-account-key.json",
        "_help": "Erstelle einen Service Account in Google Cloud Console und lade den JSON-Key herunter"
    },
    
    "dataforseo": {
        "login": "DEIN_DATAFORSEO_LOGIN",
        "password": "DEIN_DATAFORSEO_PASSWORT",
        "_help": "Erstelle einen Account auf app.dataforseo.com – Startguthaben: $1"
    },
    
    "settings": {
        "date_range_days": 30,
        "comparison_range_days": 30,
        "gsc_row_limit": 5000,
        "dataforseo_max_crawl_pages": 300,
        "location_code": 2276,
        "language_code": "de",
        "_help_location": "2276 = Deutschland. Andere Codes: https://api.dataforseo.com/v3/appendix/locations"
    },
    
    "clients": [
        {
            "name": "Beispiel Zahnarzt",
            "domain": "zahnarzt-muelheim.de",
            "gsc_property": "https://zahnarzt-muelheim.de/",
            "ga4_property_id": "properties/123456789",
            "city": "Mülheim",
            "industry": "Zahnarzt",
            "target_keywords": ["zahnarzt mülheim", "zahnarzt mülheim ruhr", "zahnreinigung mülheim"],
            "active": True
        },
        {
            "name": "Beispiel Steuerberater",
            "domain": "steuerberater-essen.de",
            "gsc_property": "https://steuerberater-essen.de/",
            "ga4_property_id": "properties/987654321",
            "city": "Essen",
            "industry": "Steuerberater",
            "target_keywords": ["steuerberater essen", "steuererklärung essen", "steuerberatung essen"],
            "active": True
        }
    ]
}


def load_config():
    """Lädt config.json oder erstellt eine Vorlage."""
    if not Path(CONFIG_FILE).exists():
        with open(CONFIG_FILE, "w", encoding="utf-8") as f:
            json.dump(DEFAULT_CONFIG, f, indent=2, ensure_ascii=False)
        log.info(f"✅ {CONFIG_FILE} erstellt – bitte ausfüllen und erneut starten.")
        sys.exit(0)
    
    with open(CONFIG_FILE, "r", encoding="utf-8") as f:
        return json.load(f)


# ══════════════════════════════════════════════════════════════
# GOOGLE SEARCH CONSOLE
# ══════════════════════════════════════════════════════════════

class GSCClient:
    def __init__(self, key_file):
        self.service = None
        try:
            from google.oauth2 import service_account
            from googleapiclient.discovery import build
            import httplib2
            
            creds = service_account.Credentials.from_service_account_file(
                key_file,
                scopes=["https://www.googleapis.com/auth/webmasters.readonly"]
            )
            # Timeout auf 300 Sekunden erhöhen (Standard: 60)
            http = httplib2.Http(timeout=300)
            self.service = build("searchconsole", "v1", credentials=creds)
            self.creds = creds
            log.info("✅ GSC API verbunden")
        except FileNotFoundError:
            log.warning(f"⚠️  GSC Key-Datei nicht gefunden: {key_file}")
        except Exception as e:
            log.warning(f"⚠️  GSC Verbindung fehlgeschlagen: {e}")
    
    def _execute_with_timeout(self, request, timeout=300):
        """Führt einen API-Request mit erhöhtem Timeout aus."""
        import httplib2
        import google_auth_httplib2
        http = httplib2.Http(timeout=timeout)
        authed_http = google_auth_httplib2.AuthorizedHttp(self.creds, http=http)
        return request.execute(http=authed_http)
    
    def fetch_performance(self, site_url, start_date, end_date, row_limit=5000):
        """Holt Klicks, Impressionen, CTR, Position pro Query + Page."""
        if not self.service:
            return None
        
        try:
            request = self.service.searchanalytics().query(
                siteUrl=site_url,
                body={
                    "startDate": start_date,
                    "endDate": end_date,
                    "dimensions": ["query", "page"],
                    "rowLimit": row_limit,
                    "dataState": "final"
                }
            )
            response = self._execute_with_timeout(request)
            return response.get("rows", [])
        except Exception as e:
            log.error(f"  GSC Fehler für {site_url}: {e}")
            return None
    
    def fetch_daily_clicks(self, site_url, start_date, end_date):
        """Holt tägliche Klicks für Sparkline-Trend."""
        if not self.service:
            return None
        
        try:
            request = self.service.searchanalytics().query(
                siteUrl=site_url,
                body={
                    "startDate": start_date,
                    "endDate": end_date,
                    "dimensions": ["date"],
                    "rowLimit": 60
                }
            )
            response = self._execute_with_timeout(request)
            return response.get("rows", [])
        except Exception as e:
            log.error(f"  GSC daily Fehler: {e}")
            return None
    
    def fetch_index_status(self, site_url):
        """Holt Indexierungsstatus."""
        if not self.service:
            return None
        try:
            request = self.service.sitemaps().list(siteUrl=site_url)
            response = self._execute_with_timeout(request)
            return response.get("sitemap", [])
        except Exception as e:
            log.error(f"  GSC Index Fehler: {e}")
            return None


# ══════════════════════════════════════════════════════════════
# GOOGLE ANALYTICS 4
# ══════════════════════════════════════════════════════════════

class GA4Client:
    def __init__(self, key_file):
        self.client = None
        try:
            from google.analytics.data_v1beta import BetaAnalyticsDataClient
            
            os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = key_file
            self.client = BetaAnalyticsDataClient()
            log.info("✅ GA4 API verbunden")
        except Exception as e:
            log.warning(f"⚠️  GA4 Verbindung fehlgeschlagen: {e}")
    
    def fetch_organic_performance(self, property_id, start_date, end_date):
        """Holt organischen Traffic, Conversions, Engagement."""
        if not self.client:
            return None
        
        try:
            from google.analytics.data_v1beta.types import (
                RunReportRequest, Dimension, Metric, DateRange, FilterExpression, Filter
            )
            
            request = RunReportRequest(
                property=property_id,
                dimensions=[
                    Dimension(name="landingPage"),
                ],
                metrics=[
                    Metric(name="sessions"),
                    Metric(name="conversions"),
                    Metric(name="engagedSessions"),
                    Metric(name="bounceRate"),
                    Metric(name="averageSessionDuration"),
                    Metric(name="engagementRate"),
                ],
                date_ranges=[DateRange(start_date=start_date, end_date=end_date)],
                dimension_filter=FilterExpression(
                    filter=Filter(
                        field_name="sessionDefaultChannelGroup",
                        string_filter=Filter.StringFilter(value="Organic Search")
                    )
                ),
                limit=500
            )
            
            response = self.client.run_report(request)
            
            results = []
            for row in response.rows:
                results.append({
                    "page": row.dimension_values[0].value,
                    "sessions": int(row.metric_values[0].value),
                    "conversions": int(float(row.metric_values[1].value)),
                    "engaged_sessions": int(row.metric_values[2].value),
                    "bounce_rate": float(row.metric_values[3].value),
                    "avg_duration": float(row.metric_values[4].value),
                    "engagement_rate": float(row.metric_values[5].value),
                })
            return results
        except Exception as e:
            log.error(f"  GA4 Fehler für {property_id}: {e}")
            return None
    
    def fetch_conversion_total(self, property_id, start_date, end_date):
        """Holt Gesamt-Conversions aus organischem Traffic."""
        if not self.client:
            return None
        
        try:
            from google.analytics.data_v1beta.types import (
                RunReportRequest, Metric, DateRange, FilterExpression, Filter
            )
            
            request = RunReportRequest(
                property=property_id,
                metrics=[
                    Metric(name="sessions"),
                    Metric(name="conversions"),
                    Metric(name="totalRevenue"),
                ],
                date_ranges=[DateRange(start_date=start_date, end_date=end_date)],
                dimension_filter=FilterExpression(
                    filter=Filter(
                        field_name="sessionDefaultChannelGroup",
                        string_filter=Filter.StringFilter(value="Organic Search")
                    )
                )
            )
            
            response = self.client.run_report(request)
            if response.rows:
                row = response.rows[0]
                return {
                    "sessions": int(row.metric_values[0].value),
                    "conversions": int(float(row.metric_values[1].value)),
                    "revenue": float(row.metric_values[2].value)
                }
            return {"sessions": 0, "conversions": 0, "revenue": 0}
        except Exception as e:
            log.error(f"  GA4 Conversion Fehler: {e}")
            return None


# ══════════════════════════════════════════════════════════════
# DATAFORSEO
# ══════════════════════════════════════════════════════════════

class DataForSEOClient:
    BASE = "https://api.dataforseo.com/v3"
    
    def __init__(self, login, password):
        self.headers = {
            "Authorization": "Basic " + base64.b64encode(f"{login}:{password}".encode()).decode(),
            "Content-Type": "application/json"
        }
        self.connected = False
        
        if login == "DEIN_DATAFORSEO_LOGIN":
            log.warning("⚠️  DataForSEO Login nicht konfiguriert")
            return
        
        # Verbindung testen
        try:
            import requests
            r = requests.get(f"{self.BASE}/appendix/user_data", headers=self.headers, timeout=10)
            if r.status_code == 200:
                data = r.json()
                balance = data.get("tasks", [{}])[0].get("result", [{}])[0].get("money", {}).get("balance", 0)
                log.info(f"✅ DataForSEO verbunden – Guthaben: ${balance:.2f}")
                self.connected = True
            else:
                log.warning(f"⚠️  DataForSEO Auth fehlgeschlagen: {r.status_code}")
        except Exception as e:
            log.warning(f"⚠️  DataForSEO Verbindung fehlgeschlagen: {e}")
    
    def _post(self, endpoint, payload):
        import requests
        try:
            r = requests.post(f"{self.BASE}{endpoint}", headers=self.headers, json=payload, timeout=60)
            return r.json() if r.status_code == 200 else None
        except Exception as e:
            log.error(f"  DFSEO Fehler {endpoint}: {e}")
            return None
    
    def fetch_keyword_volumes(self, keywords, location_code=2276, language_code="de"):
        """Holt Suchvolumen, CPC, Wettbewerb für Keywords."""
        if not self.connected:
            return None
        
        payload = [{
            "keywords": keywords[:700],  # Max 700 pro Request
            "location_code": location_code,
            "language_code": language_code
        }]
        
        result = self._post("/keywords_data/google_ads/search_volume/live", payload)
        if not result:
            return None
        
        volumes = {}
        try:
            for task in result.get("tasks", []):
                for item in task.get("result", []):
                    volumes[item["keyword"]] = {
                        "volume": item.get("search_volume", 0),
                        "cpc": item.get("cpc", 0),
                        "competition": item.get("competition_level", ""),
                    }
        except (KeyError, TypeError):
            pass
        return volumes
    
    def start_onpage_audit(self, domain, max_pages=300):
        """Startet ein On-Page Audit (asynchron)."""
        if not self.connected:
            return None
        
        payload = [{
            "target": domain,
            "max_crawl_pages": max_pages,
            "load_resources": True,
            "enable_javascript": True,
            "check_spell": False
        }]
        
        result = self._post("/on_page/task_post", payload)
        if result and result.get("tasks"):
            task_id = result["tasks"][0].get("id")
            log.info(f"  On-Page Audit gestartet: {task_id}")
            return task_id
        return None
    
    def get_onpage_summary(self, task_id):
        """Holt das Ergebnis eines On-Page Audits."""
        if not self.connected or not task_id:
            return None
        
        result = self._post(f"/on_page/summary/{task_id}", None)
        if not result:
            return None
        
        try:
            task = result["tasks"][0]["result"][0]
            crawl = task.get("crawl_progress", "")
            if crawl != "finished":
                return {"status": "crawling", "progress": crawl}
            
            pages = task.get("crawl_status", {})
            checks = task.get("page_metrics", {}).get("checks", {})
            
            return {
                "status": "finished",
                "pages_crawled": pages.get("pages_crawled", 0),
                "pages_with_issues": sum(1 for v in checks.values() if v > 0),
                "no_title": checks.get("no_title", 0),
                "no_description": checks.get("no_description", 0),
                "duplicate_title": checks.get("duplicate_title", 0),
                "duplicate_description": checks.get("duplicate_description", 0),
                "no_h1": checks.get("no_h1_tag", 0),
                "duplicate_h1": checks.get("duplicate_h1_tag", 0),
                "no_image_alt": checks.get("no_image_alt", 0),
                "broken_links": checks.get("is_broken", 0),
                "redirect_chains": checks.get("is_redirect", 0),
                "no_favicon": checks.get("no_favicon", 0),
                "large_page_size": checks.get("large_page_size", 0),
                "slow_loading": checks.get("slow_loading", 0),
            }
        except (KeyError, TypeError, IndexError) as e:
            log.error(f"  On-Page Parse Fehler: {e}")
            return None
    
    def fetch_serp_rankings(self, keywords, location_code=2276, language_code="de"):
        """Prüft SERP-Rankings für gegebene Keywords."""
        if not self.connected:
            return None
        
        results = {}
        for kw in keywords[:10]:  # Max 10 um Kosten zu sparen
            payload = [{
                "keyword": kw,
                "location_code": location_code,
                "language_code": language_code,
                "depth": 50
            }]
            
            data = self._post("/serp/google/organic/live/regular", payload)
            if data:
                try:
                    items = data["tasks"][0]["result"][0].get("items", [])
                    results[kw] = items[:50]
                except (KeyError, TypeError, IndexError):
                    pass
            
            time.sleep(0.5)  # Rate limiting
        
        return results
    
    def fetch_backlinks_summary(self, domain):
        """Holt Backlink-Zusammenfassung."""
        if not self.connected:
            return None
        
        payload = [{"target": domain, "internal_list_limit": 0, "external_list_limit": 0}]
        result = self._post("/backlinks/summary/live", payload)
        
        if not result:
            return None
        
        try:
            data = result["tasks"][0]["result"][0]
            return {
                "total_backlinks": data.get("total_backlinks", 0),
                "referring_domains": data.get("referring_domains", 0),
                "domain_rank": data.get("rank", 0),
                "dofollow": data.get("referring_links_types", {}).get("anchor", 0),
                "nofollow": data.get("referring_links_types", {}).get("no_follow", 0),
            }
        except (KeyError, TypeError, IndexError):
            return None


# ══════════════════════════════════════════════════════════════
# ANALYSE-ENGINE
# ══════════════════════════════════════════════════════════════

def analyze_gsc_data(current_rows, previous_rows):
    """Analysiert GSC-Daten: Klicks, Impressionen, CTR, Position, Decay, Quick Wins, Cannibalization."""
    
    if not current_rows:
        return {}
    
    total_clicks = sum(r.get("clicks", 0) for r in current_rows)
    total_impressions = sum(r.get("impressions", 0) for r in current_rows)
    avg_ctr = round((total_clicks / max(total_impressions, 1)) * 100, 2)
    
    # Gewichtete durchschnittliche Position
    weighted_pos = sum(r.get("position", 0) * r.get("impressions", 0) for r in current_rows)
    total_impr_for_pos = sum(r.get("impressions", 0) for r in current_rows)
    avg_position = round(weighted_pos / max(total_impr_for_pos, 1), 1)
    
    # Vergleich mit Vorperiode
    prev_clicks = sum(r.get("clicks", 0) for r in (previous_rows or []))
    clicks_delta = round(((total_clicks - prev_clicks) / max(prev_clicks, 1)) * 100, 1) if previous_rows else 0
    
    prev_impressions = sum(r.get("impressions", 0) for r in (previous_rows or []))
    impressions_delta = round(((total_impressions - prev_impressions) / max(prev_impressions, 1)) * 100, 1) if previous_rows else 0
    
    # Top Keywords aggregieren
    kw_data = {}
    for row in current_rows:
        query = row["keys"][0]
        if query not in kw_data:
            kw_data[query] = {"clicks": 0, "impressions": 0, "position": 0, "count": 0}
        kw_data[query]["clicks"] += row.get("clicks", 0)
        kw_data[query]["impressions"] += row.get("impressions", 0)
        kw_data[query]["position"] += row.get("position", 0)
        kw_data[query]["count"] += 1
    
    for q in kw_data:
        kw_data[q]["position"] = round(kw_data[q]["position"] / kw_data[q]["count"], 1)
    
    # Quick Wins: Position 5-15, hohe Impressions
    quick_wins = [
        {"keyword": q, **d}
        for q, d in kw_data.items()
        if 5 <= d["position"] <= 15 and d["impressions"] > 50
    ]
    quick_wins.sort(key=lambda x: x["impressions"], reverse=True)
    
    # Content Decay: Seiten mit Klick-Rückgang
    decay_pages = []
    if previous_rows:
        prev_page_clicks = {}
        for row in previous_rows:
            page = row["keys"][1] if len(row.get("keys", [])) > 1 else ""
            prev_page_clicks[page] = prev_page_clicks.get(page, 0) + row.get("clicks", 0)
        
        curr_page_clicks = {}
        for row in current_rows:
            page = row["keys"][1] if len(row.get("keys", [])) > 1 else ""
            curr_page_clicks[page] = curr_page_clicks.get(page, 0) + row.get("clicks", 0)
        
        for page, prev_c in prev_page_clicks.items():
            curr_c = curr_page_clicks.get(page, 0)
            if prev_c > 10 and curr_c < prev_c * 0.7:  # >30% Rückgang
                decay_pages.append({
                    "page": page,
                    "prev_clicks": prev_c,
                    "curr_clicks": curr_c,
                    "drop_pct": round(((curr_c - prev_c) / prev_c) * 100, 1)
                })
        decay_pages.sort(key=lambda x: x["drop_pct"])
    
    # Keyword Cannibalization: Mehrere Seiten für gleiches Keyword
    kw_pages = {}
    for row in current_rows:
        query = row["keys"][0]
        page = row["keys"][1] if len(row.get("keys", [])) > 1 else ""
        if query not in kw_pages:
            kw_pages[query] = []
        kw_pages[query].append({"page": page, "position": row.get("position", 0), "clicks": row.get("clicks", 0)})
    
    cannibalization = [
        {"keyword": q, "pages": pages}
        for q, pages in kw_pages.items()
        if len(pages) > 1 and any(p["clicks"] > 5 for p in pages)
    ]
    
    # Keyword-Bewegungen
    kw_up = 0
    kw_down = 0
    if previous_rows:
        prev_kw = {}
        for row in previous_rows:
            q = row["keys"][0]
            if q not in prev_kw or row.get("position", 100) < prev_kw[q]:
                prev_kw[q] = row.get("position", 100)
        
        for q, d in kw_data.items():
            if q in prev_kw:
                if d["position"] < prev_kw[q] - 1:
                    kw_up += 1
                elif d["position"] > prev_kw[q] + 1:
                    kw_down += 1
    
    return {
        "clicks": total_clicks,
        "clicks_delta": clicks_delta,
        "impressions": total_impressions,
        "impressions_delta": impressions_delta,
        "ctr": avg_ctr,
        "position": avg_position,
        "kw_up": kw_up,
        "kw_down": kw_down,
        "quick_wins": quick_wins[:15],
        "decay_pages": decay_pages[:10],
        "cannibalization": cannibalization[:10],
        "top_keywords": sorted(kw_data.items(), key=lambda x: x[1]["clicks"], reverse=True)[:20]
    }


def calculate_seo_score(gsc_data, ga4_data, tech_data, backlink_data):
    """Berechnet einen gewichteten SEO-Score von 0-100."""
    score = 50  # Basis
    
    if gsc_data:
        # CTR-Bonus
        if gsc_data.get("ctr", 0) > 3.5:
            score += 8
        elif gsc_data.get("ctr", 0) > 2.5:
            score += 4
        elif gsc_data.get("ctr", 0) < 1.5:
            score -= 5
        
        # Position-Bonus
        if gsc_data.get("position", 50) < 10:
            score += 10
        elif gsc_data.get("position", 50) < 20:
            score += 5
        
        # Klick-Wachstum
        if gsc_data.get("clicks_delta", 0) > 20:
            score += 8
        elif gsc_data.get("clicks_delta", 0) > 5:
            score += 4
        elif gsc_data.get("clicks_delta", 0) < -10:
            score -= 8
        
        # Decay-Abzug
        score -= min(len(gsc_data.get("decay_pages", [])) * 2, 10)
        
        # Cannibalization-Abzug
        score -= min(len(gsc_data.get("cannibalization", [])) * 2, 8)
    
    if tech_data:
        # Technische Issues
        issues = sum(v for k, v in tech_data.items() if isinstance(v, int) and k not in ["pages_crawled", "pages_with_issues"])
        if issues > 50:
            score -= 10
        elif issues > 20:
            score -= 5
    
    if backlink_data:
        dr = backlink_data.get("domain_rank", 0)
        if dr > 50:
            score += 8
        elif dr > 30:
            score += 4
    
    if ga4_data:
        conv = ga4_data.get("conversions", 0)
        if conv > 50:
            score += 6
        elif conv > 10:
            score += 3
    
    return max(0, min(100, score))


# ══════════════════════════════════════════════════════════════
# MAIN PIPELINE
# ══════════════════════════════════════════════════════════════

def run_pipeline(days=None):
    log.info("=" * 60)
    log.info("🚀 PottSEO Pipeline gestartet")
    log.info("=" * 60)
    
    config = load_config()
    settings = config["settings"]
    
    # Datumsbereiche (kann per CLI überschrieben werden)
    range_days = days if days else settings["date_range_days"]
    end_date = datetime.now().date()
    start_date = end_date - timedelta(days=range_days)
    prev_end = start_date - timedelta(days=1)
    prev_start = prev_end - timedelta(days=range_days)
    
    log.info(f"📅 Zeitraum: {start_date} – {end_date} ({range_days} Tage)")
    log.info(f"📅 Vergleich: {prev_start} – {prev_end}")
    
    # API-Clients initialisieren
    gsc = GSCClient(config["google"]["service_account_key_file"])
    ga4 = GA4Client(config["google"]["service_account_key_file"])
    dfseo = DataForSEOClient(config["dataforseo"]["login"], config["dataforseo"]["password"])
    
    # Ergebnisse sammeln
    all_clients = []
    active_clients = [c for c in config["clients"] if c.get("active", True)]
    
    log.info(f"\n📋 {len(active_clients)} aktive Kunden gefunden\n")
    
    for i, client in enumerate(active_clients):
        log.info(f"─── [{i+1}/{len(active_clients)}] {client['name']} ({client['domain']}) ───")
        
        result = {
            "id": i,
            "name": client["name"],
            "domain": client["domain"],
            "city": client.get("city", ""),
            "industry": client.get("industry", ""),
        }
        
        # ── GSC Daten ──
        log.info("  📊 GSC: Performance abrufen...")
        current_rows = gsc.fetch_performance(
            client["gsc_property"],
            str(start_date), str(end_date),
            settings["gsc_row_limit"]
        )
        
        previous_rows = gsc.fetch_performance(
            client["gsc_property"],
            str(prev_start), str(prev_end),
            settings["gsc_row_limit"]
        )
        
        gsc_analysis = analyze_gsc_data(current_rows, previous_rows)
        result.update({
            "clicks": gsc_analysis.get("clicks", 0),
            "clicks_delta": gsc_analysis.get("clicks_delta", 0),
            "impressions": gsc_analysis.get("impressions", 0),
            "impressions_delta": gsc_analysis.get("impressions_delta", 0),
            "ctr": gsc_analysis.get("ctr", 0),
            "position": gsc_analysis.get("position", 0),
            "kw_up": gsc_analysis.get("kw_up", 0),
            "kw_down": gsc_analysis.get("kw_down", 0),
            "quick_wins_count": len(gsc_analysis.get("quick_wins", [])),
            "decay_count": len(gsc_analysis.get("decay_pages", [])),
            "cannibal_count": len(gsc_analysis.get("cannibalization", [])),
            "top_keywords": [
                {"keyword": kw, "clicks": d["clicks"], "impressions": d["impressions"], "position": d["position"]}
                for kw, d in gsc_analysis.get("top_keywords", [])
            ],
            "quick_wins": gsc_analysis.get("quick_wins", [])[:10],
            "decay_pages": gsc_analysis.get("decay_pages", [])[:10],
            "cannibalization": gsc_analysis.get("cannibalization", [])[:10],
        })
        
        # Tägliche Klicks für Sparkline
        daily = gsc.fetch_daily_clicks(client["gsc_property"], str(start_date), str(end_date))
        if daily:
            result["spark_clicks"] = [r.get("clicks", 0) for r in daily[-14:]]
        
        # ── GA4 Daten ──
        log.info("  🎯 GA4: Conversions abrufen...")
        ga4_totals = ga4.fetch_conversion_total(
            client.get("ga4_property_id", ""),
            str(start_date), str(end_date)
        )
        
        ga4_prev = ga4.fetch_conversion_total(
            client.get("ga4_property_id", ""),
            str(prev_start), str(prev_end)
        )
        
        if ga4_totals:
            conv = ga4_totals.get("conversions", 0)
            prev_conv = ga4_prev.get("conversions", 0) if ga4_prev else 0
            conv_delta = round(((conv - prev_conv) / max(prev_conv, 1)) * 100, 1) if prev_conv else 0
            
            result.update({
                "conv": conv,
                "conv_delta": conv_delta,
                "revenue": ga4_totals.get("revenue", 0),
            })
        else:
            result.update({"conv": 0, "conv_delta": 0, "revenue": 0})
        
        # ── DataForSEO ──
        log.info("  ⚙️  DataForSEO: Technisches Audit...")
        
        # Keyword-Volumen für Target Keywords
        if client.get("target_keywords"):
            volumes = dfseo.fetch_keyword_volumes(
                client["target_keywords"],
                settings["location_code"],
                settings["language_code"]
            )
            if volumes:
                for kw in result.get("top_keywords", []):
                    vol_data = volumes.get(kw["keyword"], {})
                    kw["volume"] = vol_data.get("volume", 0)
        
        # On-Page Audit starten (async – Ergebnis beim nächsten Run)
        audit_id_file = f".audit_task_{client['domain'].replace('.', '_')}.json"
        
        if Path(audit_id_file).exists():
            with open(audit_id_file) as f:
                audit_info = json.load(f)
            
            tech_data = dfseo.get_onpage_summary(audit_info.get("task_id"))
            if tech_data and tech_data.get("status") == "finished":
                result["tech_issues"] = tech_data
                result["tech_issues_count"] = sum(
                    v for k, v in tech_data.items()
                    if isinstance(v, int) and k not in ["pages_crawled", "pages_with_issues"]
                )
                os.remove(audit_id_file)
                log.info(f"  ✅ Audit abgeschlossen: {tech_data.get('pages_crawled', 0)} Seiten")
            else:
                log.info(f"  ⏳ Audit läuft noch...")
                result["tech_issues"] = audit_info.get("last_result", {})
                result["tech_issues_count"] = audit_info.get("last_issues_count", 0)
        else:
            task_id = dfseo.start_onpage_audit(client["domain"], settings["dataforseo_max_crawl_pages"])
            if task_id:
                with open(audit_id_file, "w") as f:
                    json.dump({"task_id": task_id, "started": str(datetime.now())}, f)
            result["tech_issues"] = {}
            result["tech_issues_count"] = 0
        
        # Backlinks
        log.info("  🔗 DataForSEO: Backlinks...")
        backlink_data = dfseo.fetch_backlinks_summary(client["domain"])
        if backlink_data:
            result["backlinks"] = backlink_data
        
        # ── SEO Score berechnen ──
        result["score"] = calculate_seo_score(
            gsc_analysis,
            ga4_totals,
            result.get("tech_issues", {}),
            backlink_data
        )
        
        # Status ableiten
        if result["score"] >= 70:
            result["status"] = "good"
        elif result["score"] >= 45:
            result["status"] = "warn"
        else:
            result["status"] = "bad"
        
        all_clients.append(result)
        log.info(f"  ✅ Score: {result['score']}/100 ({result['status']})")
        log.info("")
    
    # ── Output ──
    # Dateiname basiert auf Zeitraum
    range_label = f"{range_days}d"
    output_file = f"clients_data_{range_label}.json"
    
    output = {
        "generated_at": datetime.now().isoformat(),
        "range_days": range_days,
        "range_label": range_label,
        "date_range": {"start": str(start_date), "end": str(end_date)},
        "comparison_range": {"start": str(prev_start), "end": str(prev_end)},
        "clients": all_clients
    }
    
    with open(output_file, "w", encoding="utf-8") as f:
        json.dump(output, f, indent=2, ensure_ascii=False, default=str)
    
    # Auch als Standard-Datei speichern
    with open(OUTPUT_FILE, "w", encoding="utf-8") as f:
        json.dump(output, f, indent=2, ensure_ascii=False, default=str)
    
    log.info("=" * 60)
    log.info(f"✅ Pipeline abgeschlossen – {len(all_clients)} Kunden verarbeitet")
    log.info(f"📄 Output: {OUTPUT_FILE}")
    log.info("=" * 60)
    
    # Zusammenfassung
    good = sum(1 for c in all_clients if c["status"] == "good")
    warn = sum(1 for c in all_clients if c["status"] == "warn")
    bad = sum(1 for c in all_clients if c["status"] == "bad")
    log.info(f"   ✅ Gut: {good}  ⚠️ Achtung: {warn}  🔴 Kritisch: {bad}")
    
    return output


if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser(description="PottSEO Automated SEO Audit Pipeline")
    parser.add_argument("--days", type=int, help="Zeitraum in Tagen (1=heute, 7=letzte Woche, 30=letzter Monat, 365=letztes Jahr)")
    parser.add_argument("--all", action="store_true", help="Alle Zeiträume auf einmal generieren (1, 7, 30, 365 Tage)")
    args = parser.parse_args()
    
    if args.all:
        for d in [1, 7, 30, 365]:
            log.info(f"\n{'='*60}\n⏱  Starte Zeitraum: {d} Tage\n{'='*60}")
            run_pipeline(days=d)
    else:
        run_pipeline(days=args.days)
