"""
SQLite storage backend — drop-in replacement for JSON file I/O.

Exposes load_json_file / save_json_file with identical signatures to the
previous file-based helpers, so request handlers need no change.

- WAL mode: concurrent reads while a writer is active
- timeout=30s: writers queue instead of failing under contention
- ACID: no partial writes; crash-safe
- Auto-migrate: first run imports existing *.json files from BASE_DIR
"""

import json
import os
import sqlite3
import threading

BASE_DIR = os.path.dirname(os.path.abspath(__file__))
DB_PATH = os.path.join(BASE_DIR, "database.db")

_write_lock = threading.Lock()
_init_done = False
_init_lock = threading.Lock()

MIGRATABLE_FILES = [
    "products.json",
    "customers.json",
    "cert_history.json",
    "dictionary.json",
    "visual_template_configs.json",
    "cert_config.json",
    "template_fields.json",
    "fastener_specs.json",
    "packing.json",
    "field_mapping.json",
]


def _get_conn():
    conn = sqlite3.connect(DB_PATH, timeout=30, isolation_level=None)
    conn.execute("PRAGMA journal_mode=WAL")
    conn.execute("PRAGMA synchronous=NORMAL")
    conn.execute("PRAGMA foreign_keys=ON")
    return conn


def _ensure_init():
    global _init_done
    if _init_done:
        return
    with _init_lock:
        if _init_done:
            return
        conn = _get_conn()
        try:
            conn.execute("""
                CREATE TABLE IF NOT EXISTS kv_store (
                    key TEXT PRIMARY KEY,
                    value TEXT NOT NULL,
                    updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
                )
            """)
            row = conn.execute("SELECT COUNT(*) FROM kv_store").fetchone()
            if row[0] == 0:
                _auto_migrate(conn)
        finally:
            conn.close()
        _init_done = True


def _auto_migrate(conn):
    """Import existing *.json files into kv_store on first run."""
    imported = []
    for fn in MIGRATABLE_FILES:
        path = os.path.join(BASE_DIR, fn)
        if not os.path.exists(path):
            continue
        try:
            with open(path, "r", encoding="utf-8") as f:
                data = json.load(f)
            key = fn[:-5]  # strip .json
            value = json.dumps(data, ensure_ascii=False, indent=2)
            conn.execute(
                "INSERT OR REPLACE INTO kv_store (key, value) VALUES (?, ?)",
                (key, value),
            )
            imported.append(fn)
        except Exception as e:
            print(f"  [migrate] skip {fn}: {e}")
    if imported:
        print(f"  [migrate] imported {len(imported)} files into SQLite: {', '.join(imported)}")


def load_json_file(filename):
    """Drop-in replacement. Returns [] if key not found (matches old behavior)."""
    _ensure_init()
    key = filename[:-5] if filename.endswith(".json") else filename
    conn = _get_conn()
    try:
        row = conn.execute("SELECT value FROM kv_store WHERE key = ?", (key,)).fetchone()
    finally:
        conn.close()
    if row:
        return json.loads(row[0])
    return []


def save_json_file(filename, data):
    """Drop-in replacement. Atomic write via SQLite UPSERT."""
    _ensure_init()
    key = filename[:-5] if filename.endswith(".json") else filename
    value = json.dumps(data, ensure_ascii=False, indent=2)
    with _write_lock:
        conn = _get_conn()
        try:
            conn.execute(
                "INSERT INTO kv_store (key, value, updated_at) VALUES (?, ?, CURRENT_TIMESTAMP) "
                "ON CONFLICT(key) DO UPDATE SET value = excluded.value, updated_at = CURRENT_TIMESTAMP",
                (key, value),
            )
        finally:
            conn.close()


def list_keys():
    """Debug helper — returns all keys in kv_store."""
    _ensure_init()
    conn = _get_conn()
    try:
        return [r[0] for r in conn.execute("SELECT key FROM kv_store ORDER BY key")]
    finally:
        conn.close()


if __name__ == "__main__":
    _ensure_init()
    print("DB:", DB_PATH)
    print("Keys:", list_keys())
