# uploads_debug.py mit Analytics-Tabelle und Debug/Analyse-Erweiterung
import sqlite3
import uuid
import argparse
from datetime import datetime, timedelta
import os
from apify_client import ApifyClient

DB_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'database.sqlite')
APIFY_TOKEN = 'apify_api_SVnhoQDjE9n5zME491CywOtqgWXhS91TRxrG'  # Setze deinen Apify-Token als Umgebungsvariable
ACTOR_ID = "xMc5Ga1oCONPmWJIa"


def get_connection():
    conn = sqlite3.connect(DB_PATH)
    conn.row_factory = sqlite3.Row
    return conn


def create_tables():
    """
    Erstellt die Tabellen uploads und analytics, falls sie nicht existieren
    """
    conn = get_connection()
    with conn:
        # uploads-Tabelle
        conn.execute(
            """
            CREATE TABLE IF NOT EXISTS uploads (
                batch_id TEXT PRIMARY KEY,
                account TEXT NOT NULL,
                upload_count INTEGER DEFAULT 0,
                created_at TEXT NOT NULL,
                expires_at TEXT NOT NULL,
                status TEXT NOT NULL DEFAULT 'waiting'
            )
            """
        )
        # analytics-Tabelle
        conn.execute(
            """
            CREATE TABLE IF NOT EXISTS analytics (
                account TEXT NOT NULL,
                datum TEXT NOT NULL,
                views INTEGER DEFAULT 0,
                likes INTEGER DEFAULT 0,
                posts INTEGER DEFAULT 0,
                sales INTEGER DEFAULT 0,
                clicks INTEGER DEFAULT 0,
                PRIMARY KEY (account, datum)
            )
            """
        )
    conn.close()


def create_batch(account: str) -> str:
    batch_id = uuid.uuid4().hex
    now = datetime.now()
    created_at = now.strftime('%Y-%m-%d %H:%M:%S')
    expires_at = (now + timedelta(hours=24)).strftime('%Y-%m-%d %H:%M:%S')
    conn = get_connection()
    with conn:
        conn.execute(
            "INSERT INTO uploads (batch_id, account, upload_count, created_at, expires_at, status) VALUES (?, ?, 0, ?, ?, 'waiting')",
            (batch_id, account, created_at, expires_at),
        )
    conn.close()
    return batch_id


def increment_count(batch_id: str):
    conn = get_connection()
    with conn:
        conn.execute(
            "UPDATE uploads SET upload_count = upload_count + 1 WHERE batch_id = ?",
            (batch_id,),
        )
    conn.close()


def parse_timestamp(item: dict) -> datetime:
    ts = item.get('timestamp') or item.get('createdAt') or item.get('crawlTime')
    if not ts:
        return datetime.max
    ts_clean = ts.rstrip('Z')
    try:
        return datetime.fromisoformat(ts_clean)
    except ValueError:
        return datetime.max


def save_analytics(account: str, datum: str, views: int, likes: int, posts: int):
    """
    Speichert oder aktualisiert die Analytics-Zeile für account+datum
    """
    conn = get_connection()
    with conn:
        cur = conn.execute(
            "SELECT views, likes, posts FROM analytics WHERE account = ? AND datum = ?",
            (account, datum)
        )
        row = cur.fetchone()
        if row:
            conn.execute(
                "UPDATE analytics SET views = views + ?, likes = likes + ?, posts = posts + ? WHERE account = ? AND datum = ?",
                (views, likes, posts, account, datum)
            )
        else:
            conn.execute(
                "INSERT INTO analytics (account, datum, views, likes, posts, sales, clicks) VALUES (?, ?, ?, ?, ?, 0, 0)",
                (account, datum, views, likes, posts)
            )
    conn.close()


def debug_batches():
    if not APIFY_TOKEN:
        print("Fehler: APIFY_TOKEN nicht gesetzt.")
        return
    client = ApifyClient(APIFY_TOKEN)

    conn = get_connection()
    cur = conn.cursor()
    now_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    today = datetime.now().strftime('%Y-%m-%d')

    # Status aktualisieren nur für waiting, wenn abgelaufen
    with conn:
        conn.execute(
            "UPDATE uploads SET status = 'expired' WHERE status = 'waiting' AND datetime(expires_at) <= datetime(?)",
            (now_str,)
        )
    # Summen je Account nur für status expired und waiting
    cur.execute("SELECT account, upload_count, status FROM uploads WHERE status IN ('expired', 'waiting')")
    rows = cur.fetchall()
    conn.close()

    accounts = {}
    for row in rows:
        acc = row['account']
        accounts.setdefault(acc, {'expired': 0, 'active': 0})
        if row['status'] == 'expired':
            accounts[acc]['expired'] += row['upload_count']
        else:
            accounts[acc]['active'] += row['upload_count']

    print(f"--- DEBUG Batches per Account @ {now_str} ---")
    for acc, counts in accounts.items():
        exp_cnt = counts['expired']
        act_cnt = counts['active']
        print(f"Account: {acc}")
        print(f"  Abgelaufen (gesamt count): {exp_cnt}")
        print(f"  Aktiv (gesamt count):   {act_cnt}")

        # Wenn keine expired Batches, keine API-Requests
        if exp_cnt == 0:
            print("  Keine abgelaufenen Batches, überspringe API-Request.")
            continue

        total = exp_cnt + act_cnt
        run_input = {"username": [acc], "resultsLimit": total}
        run = client.actor(ACTOR_ID).call(run_input=run_input)
        items = list(client.dataset(run['defaultDatasetId']).iterate_items())

        sorted_items = sorted(items, key=parse_timestamp)
        expired_items = sorted_items[:exp_cnt]

        total_likes = sum(item.get('likesCount', 0) for item in expired_items)
        total_plays = sum(item.get('videoPlayCount', 0) for item in expired_items)
        posts = exp_cnt  # nur abgelaufene Posts

        print(f"  -> Gefundene abgelaufene Items (älteste {exp_cnt}): {len(expired_items)}")
        for item in expired_items:
            ts_val = parse_timestamp(item)
            print(
                f"     - id={item.get('id')} timestamp={ts_val} "
                f"likes={item.get('likesCount')} plays={item.get('videoPlayCount')}"
            )
        print(f"  -> Summen: likesCount={total_likes}, videoPlayCount={total_plays}\n")

        save_analytics(acc, today, total_plays, total_likes, posts)

    # Nur expired auf analysed setzen, waiting bleibt
    conn = get_connection()
    with conn:
        conn.execute("UPDATE uploads SET status = 'analysed' WHERE status = 'expired'")
    conn.close()


def main():
    parser = argparse.ArgumentParser(description="Manage uploads mit Expiry-Logik, Debug, Analytics und Apify-Calls per Account")
    sub = parser.add_subparsers(dest="cmd")
    sub.add_parser("init", help="create tables")
    pc = sub.add_parser("create", help="create new batch")
    pc.add_argument("account", help="account name oder id")
    pi = sub.add_parser("increment", help="increment upload_count")
    pi.add_argument("batch_id", help="batch id")
    sub.add_parser("debug", help="zeige und speichere Analytics nach Debug per Account")
    args = parser.parse_args()

    if args.cmd == "init":
        create_tables()
    elif args.cmd == "create":
        create_tables()
        print("Created batch", create_batch(args.account))
    elif args.cmd == "increment":
        increment_count(args.batch_id)
        print("Incremented", args.batch_id)
    elif args.cmd == "debug":
        debug_batches()
    else:
        parser.print_help()

if __name__ == "__main__":
    main()


#script erklärung
#python uploads_debug.py increment aeb20943e8954b1d8b9979d632122c56 fügt ein upload hinzu also beim bot script erstellt man python uploads_debug.py create sinkmate1 
# dann speichert man die variable von batchid und fügt jedes mal mit increment das hinzu wenn ein upload geklappt hat
# dann immer debug jede 5 min zmb und dann speichert er die daten in der datenbank -> datenbank auf php website auslesen