from dotenv import load_dotenv
from pathlib import Path

ROOT_DIR = Path(__file__).parent
LOCAL_UPLOAD_ROOT = ROOT_DIR / "local_uploads"
load_dotenv(ROOT_DIR / '.env')

import os
from contextlib import asynccontextmanager
import uuid
import jwt
import bcrypt
import logging
import json
from datetime import datetime, timezone, timedelta
from typing import List, Optional, Dict, Any, Literal

from fastapi import FastAPI, APIRouter, HTTPException, Request, Response, Depends, Query, UploadFile, File, Header
from fastapi.responses import RedirectResponse
from starlette.middleware.cors import CORSMiddleware
import aiomysql
from pydantic import BaseModel, Field, EmailStr, ConfigDict
import requests

# ---------- App (FastAPI instance created after seed helpers; see lifespan below) ----------
api_router = APIRouter(prefix="/api")

JWT_ALGORITHM = "HS256"

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# ---------- MySQL (env: MYSQL_HOST, MYSQL_PORT, MYSQL_USER, MYSQL_PASSWORD, MYSQL_DATABASE or DB_NAME) ----------
_mysql_pool: Optional[aiomysql.Pool] = None

DOCUMENT_TABLES = frozenset({
    "vehicles", "bookings", "customers", "drivers", "branches", "maintenance",
    "payments", "promotions", "reviews", "blogs", "tickets", "notifications",
    "cms_pages", "vehicle_inspections",
})


def _mysql_db_name() -> str:
    return os.environ.get("MYSQL_DATABASE") or os.environ.get("DB_NAME", "carrental")


def _json_dumps(obj: Any) -> str:
    return json.dumps(obj, default=str)


def _json_loads(raw: Any) -> Dict[str, Any]:
    if raw is None:
        return {}
    if isinstance(raw, dict):
        return dict(raw)
    if isinstance(raw, (bytes, bytearray)):
        raw = raw.decode("utf-8")
    if isinstance(raw, str):
        return json.loads(raw)
    return json.loads(str(raw))


def _iso_to_naive_utc(ts: str) -> datetime:
    s = str(ts).replace(" ", "T")
    if s.endswith("Z"):
        s = s[:-1] + "+00:00"
    dt = datetime.fromisoformat(s)
    if dt.tzinfo is not None:
        dt = dt.astimezone(timezone.utc).replace(tzinfo=None)
    return dt


async def mysql_get_pool() -> aiomysql.Pool:
    global _mysql_pool
    if _mysql_pool is None:
        _mysql_pool = await aiomysql.create_pool(
            host=os.environ["MYSQL_HOST"],
            port=int(os.environ.get("MYSQL_PORT", "3306")),
            user=os.environ["MYSQL_USER"],
            password=os.environ["MYSQL_PASSWORD"],
            db=_mysql_db_name(),
            charset="utf8mb4",
            autocommit=True,
            minsize=1,
            maxsize=16,
        )
    return _mysql_pool


async def mysql_init_schema() -> None:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute("""
                CREATE TABLE IF NOT EXISTS users (
                    id VARCHAR(36) NOT NULL PRIMARY KEY,
                    email VARCHAR(255) NOT NULL,
                    password_hash VARCHAR(255) NOT NULL,
                    name VARCHAR(255) NOT NULL,
                    role VARCHAR(64) NOT NULL,
                    created_at VARCHAR(64) NOT NULL,
                    UNIQUE KEY ux_users_email (email)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)
            await cur.execute("""
                CREATE TABLE IF NOT EXISTS activity_logs (
                    id VARCHAR(36) NOT NULL PRIMARY KEY,
                    actor VARCHAR(255) NOT NULL,
                    action VARCHAR(255) NOT NULL,
                    target TEXT,
                    meta JSON NULL,
                    timestamp VARCHAR(64) NOT NULL,
                    ts_sort DATETIME(6) NOT NULL,
                    INDEX idx_activity_ts (ts_sort)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)
            await cur.execute("""
                CREATE TABLE IF NOT EXISTS files (
                    id VARCHAR(36) NOT NULL PRIMARY KEY,
                    storage_path VARCHAR(512) NOT NULL,
                    original_filename VARCHAR(512) NULL,
                    content_type VARCHAR(128) NULL,
                    size BIGINT NULL,
                    uploaded_by VARCHAR(255) NULL,
                    is_deleted TINYINT(1) NOT NULL DEFAULT 0,
                    created_at VARCHAR(64) NOT NULL,
                    INDEX idx_files_path (storage_path)
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)
            await cur.execute("""
                CREATE TABLE IF NOT EXISTS settings (
                    id VARCHAR(36) NOT NULL PRIMARY KEY,
                    document JSON NOT NULL
                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
            """)
            for tbl in DOCUMENT_TABLES:
                idx_name = ("idx_" + tbl + "_created")[:64]
                await cur.execute(f"""
                    CREATE TABLE IF NOT EXISTS `{tbl}` (
                        id VARCHAR(36) NOT NULL PRIMARY KEY,
                        document JSON NOT NULL,
                        created_at DATETIME(6) NOT NULL,
                        updated_at DATETIME(6) NOT NULL,
                        INDEX `{idx_name}` (created_at)
                    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
                """)


async def mysql_close() -> None:
    global _mysql_pool
    if _mysql_pool is not None:
        _mysql_pool.close()
        await _mysql_pool.wait_closed()
        _mysql_pool = None


async def mysql_user_find_by_id(user_id: str, exclude_password: bool = False) -> Optional[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(
                "SELECT id, email, password_hash, name, role, created_at FROM users WHERE id = %s",
                (user_id,),
            )
            row = await cur.fetchone()
    if not row:
        return None
    d = dict(row)
    if exclude_password:
        d.pop("password_hash", None)
    return d


async def mysql_user_find_by_email(email: str, exclude_password: bool = False) -> Optional[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(
                "SELECT id, email, password_hash, name, role, created_at FROM users WHERE email = %s",
                (email,),
            )
            row = await cur.fetchone()
    if not row:
        return None
    d = dict(row)
    if exclude_password:
        d.pop("password_hash", None)
    return d


async def mysql_user_insert(doc: Dict[str, Any]) -> None:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                """INSERT INTO users (id, email, password_hash, name, role, created_at)
                   VALUES (%s, %s, %s, %s, %s, %s)""",
                (doc["id"], doc["email"], doc["password_hash"], doc["name"], doc["role"], doc["created_at"]),
            )


async def mysql_user_update_password(email: str, password_hash: str) -> None:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute("UPDATE users SET password_hash = %s WHERE email = %s", (password_hash, email))


async def mysql_users_list(limit: int = 500) -> List[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(
                "SELECT id, email, name, role, created_at FROM users ORDER BY email LIMIT %s",
                (limit,),
            )
            rows = await cur.fetchall()
    return [dict(r) for r in rows]


async def mysql_activity_insert(actor: str, action: str, target: str, meta: Dict[str, Any], ts: str) -> None:
    pool = await mysql_get_pool()
    log_id = str(uuid.uuid4())
    ts_sort = _iso_to_naive_utc(ts)
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                """INSERT INTO activity_logs (id, actor, action, target, meta, timestamp, ts_sort)
                   VALUES (%s, %s, %s, %s, %s, %s, %s)""",
                (log_id, actor, action, target, _json_dumps(meta), ts, ts_sort),
            )


async def mysql_activity_list(limit: int) -> List[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(
                "SELECT id, actor, action, target, meta, timestamp FROM activity_logs ORDER BY ts_sort DESC LIMIT %s",
                (limit,),
            )
            rows = await cur.fetchall()
    out: List[Dict[str, Any]] = []
    for r in rows:
        d = dict(r)
        m = d.get("meta")
        if m is not None and not isinstance(m, dict):
            d["meta"] = _json_loads(m)
        elif m is None:
            d["meta"] = {}
        out.append(d)
    return out


async def mysql_doc_insert(table: str, doc: Dict[str, Any]) -> None:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                f"INSERT INTO `{table}` (id, document, created_at, updated_at) VALUES (%s, %s, %s, %s)",
                (
                    doc["id"],
                    _json_dumps(doc),
                    _iso_to_naive_utc(doc["created_at"]),
                    _iso_to_naive_utc(doc["updated_at"]),
                ),
            )


async def mysql_doc_insert_many(table: str, docs: List[Dict[str, Any]]) -> None:
    if table not in DOCUMENT_TABLES or not docs:
        return
    pool = await mysql_get_pool()
    rows = [
        (d["id"], _json_dumps(d), _iso_to_naive_utc(d["created_at"]), _iso_to_naive_utc(d["updated_at"]))
        for d in docs
    ]
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.executemany(
                f"INSERT INTO `{table}` (id, document, created_at, updated_at) VALUES (%s, %s, %s, %s)",
                rows,
            )


async def mysql_doc_find_one(table: str, item_id: str) -> Optional[Dict[str, Any]]:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(f"SELECT document FROM `{table}` WHERE id = %s", (item_id,))
            row = await cur.fetchone()
    if not row:
        return None
    return _json_loads(row[0])


async def mysql_doc_list(
    table: str,
    limit: int,
    status: Optional[str] = None,
    eq_filters: Optional[Dict[str, str]] = None,
) -> List[Dict[str, Any]]:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    parts: List[str] = []
    params: List[Any] = []
    if status:
        parts.append("JSON_UNQUOTE(JSON_EXTRACT(document, '$.status')) <=> %s")
        params.append(status)
    if eq_filters:
        for k, v in eq_filters.items():
            parts.append(f"JSON_UNQUOTE(JSON_EXTRACT(document, '$.{k}')) <=> %s")
            params.append(str(v))
    clause = (" WHERE " + " AND ".join(parts)) if parts else ""
    sql = f"SELECT document FROM `{table}`{clause} ORDER BY created_at DESC LIMIT %s"
    params.append(limit)
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(sql, tuple(params))
            fetched = await cur.fetchall()
    return [_json_loads(r[0]) for r in fetched]


async def mysql_doc_count(table: str) -> int:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(f"SELECT COUNT(*) FROM `{table}`")
            row = await cur.fetchone()
    return int(row[0])


async def mysql_doc_update(table: str, item_id: str, doc: Dict[str, Any]) -> int:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                f"UPDATE `{table}` SET document = %s, updated_at = %s WHERE id = %s",
                (_json_dumps(doc), _iso_to_naive_utc(doc["updated_at"]), item_id),
            )
            return cur.rowcount


async def mysql_doc_delete(table: str, item_id: str) -> int:
    if table not in DOCUMENT_TABLES:
        raise ValueError("invalid table")
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(f"DELETE FROM `{table}` WHERE id = %s", (item_id,))
            return cur.rowcount


async def mysql_file_insert(doc: Dict[str, Any]) -> None:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                """INSERT INTO files (id, storage_path, original_filename, content_type, size, uploaded_by, is_deleted, created_at)
                   VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""",
                (
                    doc["id"],
                    doc["storage_path"],
                    doc.get("original_filename"),
                    doc["content_type"],
                    doc.get("size"),
                    doc.get("uploaded_by"),
                    0 if not doc.get("is_deleted") else 1,
                    doc["created_at"],
                ),
            )


async def mysql_file_find_by_path(path: str) -> Optional[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor(aiomysql.DictCursor) as cur:
            await cur.execute(
                """SELECT id, storage_path, original_filename, content_type, size, uploaded_by, is_deleted, created_at
                   FROM files WHERE storage_path = %s AND is_deleted = 0""",
                (path,),
            )
            row = await cur.fetchone()
    return dict(row) if row else None


async def mysql_file_soft_delete(file_id: str) -> int:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute("UPDATE files SET is_deleted = 1 WHERE id = %s", (file_id,))
            return cur.rowcount


async def mysql_settings_get() -> Optional[Dict[str, Any]]:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute("SELECT document FROM settings WHERE id = %s", ("global",))
            row = await cur.fetchone()
    if not row:
        return None
    return _json_loads(row[0])


async def mysql_settings_upsert(doc: Dict[str, Any]) -> None:
    pool = await mysql_get_pool()
    async with pool.acquire() as conn:
        async with conn.cursor() as cur:
            await cur.execute(
                """INSERT INTO settings (id, document) VALUES (%s, %s)
                   ON DUPLICATE KEY UPDATE document = VALUES(document)""",
                ("global", _json_dumps(doc)),
            )


# ---------- Helpers ----------
def now_iso() -> str:
    return datetime.now(timezone.utc).isoformat()

def hash_password(password: str) -> str:
    salt = bcrypt.gensalt()
    return bcrypt.hashpw(password.encode("utf-8"), salt).decode("utf-8")

def verify_password(plain: str, hashed: str) -> bool:
    try:
        return bcrypt.checkpw(plain.encode("utf-8"), hashed.encode("utf-8"))
    except Exception:
        return False

def get_jwt_secret() -> str:
    return os.environ["JWT_SECRET"]

def create_access_token(user_id: str, email: str) -> str:
    payload = {
        "sub": user_id, "email": email,
        "exp": datetime.now(timezone.utc) + timedelta(hours=12),
        "type": "access",
    }
    return jwt.encode(payload, get_jwt_secret(), algorithm=JWT_ALGORITHM)

def create_refresh_token(user_id: str) -> str:
    payload = {
        "sub": user_id,
        "exp": datetime.now(timezone.utc) + timedelta(days=7),
        "type": "refresh",
    }
    return jwt.encode(payload, get_jwt_secret(), algorithm=JWT_ALGORITHM)

def set_auth_cookies(response: Response, access: str, refresh: str):
    response.set_cookie("access_token", access, httponly=True, secure=False, samesite="lax", max_age=43200, path="/")
    response.set_cookie("refresh_token", refresh, httponly=True, secure=False, samesite="lax", max_age=604800, path="/")

async def get_current_user(request: Request) -> dict:
    token = request.cookies.get("access_token")
    if not token:
        auth_header = request.headers.get("Authorization", "")
        if auth_header.startswith("Bearer "):
            token = auth_header[7:]
    if not token:
        raise HTTPException(status_code=401, detail="Not authenticated")
    try:
        payload = jwt.decode(token, get_jwt_secret(), algorithms=[JWT_ALGORITHM])
        if payload.get("type") != "access":
            raise HTTPException(status_code=401, detail="Invalid token type")
        user = await mysql_user_find_by_id(payload["sub"], exclude_password=True)
        if not user:
            raise HTTPException(status_code=401, detail="User not found")
        return user
    except jwt.ExpiredSignatureError:
        raise HTTPException(status_code=401, detail="Token expired")
    except jwt.InvalidTokenError:
        raise HTTPException(status_code=401, detail="Invalid token")

async def log_activity(actor: str, action: str, target: str = "", meta: Optional[dict] = None):
    await mysql_activity_insert(actor, action, target, meta or {}, now_iso())

# ---------- Pydantic Models ----------
class LoginIn(BaseModel):
    email: EmailStr
    password: str

class RegisterIn(BaseModel):
    email: EmailStr
    password: str
    name: str

class UserOut(BaseModel):
    model_config = ConfigDict(extra="ignore")
    id: str
    email: str
    name: str
    role: str

# Generic CRUD models use dict for flexibility
class VehicleIn(BaseModel):
    name: str
    brand: str
    model_name: str
    category: str = "Sedan"
    registration_number: str
    year: int = 2024
    fuel_type: str = "Petrol"
    transmission: str = "Automatic"
    seats: int = 5
    daily_rate: float = 50.0
    discount_rate: Optional[float] = None
    status: str = "available"  # available|reserved|rented|maintenance|out_of_service
    mileage: float = 0.0
    image_url: Optional[str] = None
    insurance_expiry: Optional[str] = None
    branch_id: Optional[str] = None
    features: List[str] = []
    condition: str = "Excellent"
    gps_enabled: bool = False
    gallery_images: List[str] = []  # list of storage paths

class VehicleInspectionIn(BaseModel):
    vehicle_id: str
    inspector: str
    inspection_date: str
    overall_condition: str = "Good"  # Excellent|Good|Fair|Poor
    mileage: float = 0.0
    notes: str = ""
    issues_found: List[str] = []
    photo_paths: List[str] = []
    next_inspection_date: Optional[str] = None

class BookingIn(BaseModel):
    customer_id: str
    vehicle_id: str
    driver_id: Optional[str] = None
    pickup_date: str
    return_date: str
    pickup_location: str
    return_location: str
    total_amount: float = 0.0
    status: str = "pending"  # pending|confirmed|ongoing|completed|cancelled
    payment_status: str = "pending"
    notes: Optional[str] = None

class CustomerIn(BaseModel):
    full_name: str
    email: EmailStr
    phone: str
    license_number: Optional[str] = None
    address: Optional[str] = None
    city: Optional[str] = None
    country: Optional[str] = None
    emergency_contact: Optional[str] = None
    is_blacklisted: bool = False
    loyalty_points: int = 0
    wallet_balance: float = 0.0

class DriverIn(BaseModel):
    full_name: str
    phone: str
    email: Optional[EmailStr] = None
    license_number: str
    is_available: bool = True
    salary: float = 0.0
    rating: float = 4.5
    trips_completed: int = 0
    branch_id: Optional[str] = None

class BranchIn(BaseModel):
    name: str
    city: str
    address: str
    manager: Optional[str] = None
    phone: Optional[str] = None
    email: Optional[str] = None
    is_pickup: bool = True
    is_return: bool = True
    lat: Optional[float] = None
    lng: Optional[float] = None

class MaintenanceIn(BaseModel):
    vehicle_id: str
    type: str  # oil_change|tire|repair|inspection|other
    description: str
    cost: float = 0.0
    scheduled_date: str
    completed_date: Optional[str] = None
    status: str = "scheduled"
    garage: Optional[str] = None

class PaymentIn(BaseModel):
    booking_id: Optional[str] = None
    customer_id: Optional[str] = None
    amount: float
    method: str = "card"  # card|cash|bank_transfer|mobile_money
    type: str = "payment"  # payment|deposit|refund
    status: str = "completed"
    reference: Optional[str] = None

class PromotionIn(BaseModel):
    code: str
    description: str
    discount_percent: float = 0.0
    discount_amount: float = 0.0
    valid_from: str
    valid_until: str
    is_active: bool = True
    usage_limit: int = 100
    used_count: int = 0

class ReviewIn(BaseModel):
    customer_id: str
    vehicle_id: Optional[str] = None
    rating: float
    comment: str
    is_approved: bool = False
    is_hidden: bool = False

class BlogIn(BaseModel):
    title: str
    slug: str
    category: str = "General"
    tags: List[str] = []
    content: str
    seo_title: Optional[str] = None
    seo_description: Optional[str] = None
    cover_image: Optional[str] = None
    is_published: bool = False
    author: str = "Admin"

class TicketIn(BaseModel):
    subject: str
    customer_email: str
    description: str
    priority: str = "medium"  # low|medium|high
    status: str = "open"  # open|in_progress|resolved|closed

class NotificationIn(BaseModel):
    type: str  # booking|payment|maintenance|system
    title: str
    message: str
    channel: str = "in_app"  # in_app|email|sms|whatsapp
    is_read: bool = False

class CMSPageIn(BaseModel):
    key: str  # about|services|contact|faq|privacy|terms|home_hero
    title: str
    content: str
    seo_title: Optional[str] = None
    seo_description: Optional[str] = None
    is_published: bool = True

class SettingsIn(BaseModel):
    company_name: str = "NovaCar Admin"
    logo_url: Optional[str] = None
    currency: Literal["USD", "RWF"] = "USD"
    timezone: str = "UTC"
    language: str = "en"
    deposit_rule: str = "20% of total"
    late_return_charge_per_hour: float = 10.0
    fuel_policy: str = "Return with full tank"
    smtp_host: Optional[str] = None
    smtp_user: Optional[str] = None
    payment_gateway: str = "Stripe"
    sms_provider: str = "Twilio"

# ---------- Auth Endpoints ----------
@api_router.post("/auth/login")
async def login(payload: LoginIn, response: Response):
    email = payload.email.lower()
    user = await mysql_user_find_by_email(email, exclude_password=False)
    if not user or not verify_password(payload.password, user["password_hash"]):
        raise HTTPException(status_code=401, detail="Invalid email or password")
    access = create_access_token(user["id"], email)
    refresh = create_refresh_token(user["id"])
    set_auth_cookies(response, access, refresh)
    await log_activity(user["email"], "login")
    return {"user": UserOut(**user).model_dump(), "access_token": access, "refresh_token": refresh}

@api_router.post("/auth/register")
async def register(payload: RegisterIn, response: Response):
    email = payload.email.lower()
    if await mysql_user_find_by_email(email, exclude_password=False):
        raise HTTPException(status_code=400, detail="Email already registered")
    user_id = str(uuid.uuid4())
    doc = {
        "id": user_id,
        "email": email,
        "name": payload.name,
        "password_hash": hash_password(payload.password),
        "role": "super_admin",
        "created_at": now_iso(),
    }
    await mysql_user_insert(doc)
    access = create_access_token(user_id, email)
    refresh = create_refresh_token(user_id)
    set_auth_cookies(response, access, refresh)
    return {"user": UserOut(id=user_id, email=email, name=payload.name, role="super_admin").model_dump(), "access_token": access, "refresh_token": refresh}

@api_router.post("/auth/logout")
async def logout(response: Response):
    response.delete_cookie("access_token", path="/")
    response.delete_cookie("refresh_token", path="/")
    return {"ok": True}

@api_router.get("/auth/me")
async def me(user=Depends(get_current_user)):
    return user

@api_router.post("/auth/refresh")
async def refresh_token(request: Request, response: Response):
    rt = request.cookies.get("refresh_token")
    if not rt:
        raise HTTPException(status_code=401, detail="No refresh token")
    try:
        payload = jwt.decode(rt, get_jwt_secret(), algorithms=[JWT_ALGORITHM])
        if payload.get("type") != "refresh":
            raise HTTPException(status_code=401, detail="Invalid token type")
        user = await mysql_user_find_by_id(payload["sub"], exclude_password=False)
        if not user:
            raise HTTPException(status_code=401, detail="User not found")
        new_access = create_access_token(user["id"], user["email"])
        response.set_cookie("access_token", new_access, httponly=True, secure=False, samesite="lax", max_age=43200, path="/")
        return {"ok": True}
    except jwt.InvalidTokenError:
        raise HTTPException(status_code=401, detail="Invalid refresh token")

# ---------- Generic CRUD Factory ----------
def make_crud_routes(name: str, collection: str, model_cls):
    @api_router.get(f"/{name}")
    async def list_items(user=Depends(get_current_user), q: Optional[str] = None, status: Optional[str] = None, limit: int = 500):
        items = await mysql_doc_list(collection, limit, status=status, eq_filters=None)
        if q:
            ql = q.lower()
            items = [i for i in items if any(ql in str(v).lower() for v in i.values())]
        return items

    @api_router.post(f"/{name}")
    async def create_item(payload: model_cls, user=Depends(get_current_user)):
        doc = payload.model_dump()
        doc["id"] = str(uuid.uuid4())
        doc["created_at"] = now_iso()
        doc["updated_at"] = now_iso()
        await mysql_doc_insert(collection, doc)
        await log_activity(user["email"], f"create_{name}", doc["id"])
        doc.pop("_id", None)
        return doc

    @api_router.get(f"/{name}/{{item_id}}")
    async def get_item(item_id: str, user=Depends(get_current_user)):
        item = await mysql_doc_find_one(collection, item_id)
        if not item:
            raise HTTPException(status_code=404, detail="Not found")
        return item

    @api_router.put(f"/{name}/{{item_id}}")
    async def update_item(item_id: str, payload: model_cls, user=Depends(get_current_user)):
        existing = await mysql_doc_find_one(collection, item_id)
        if not existing:
            raise HTTPException(status_code=404, detail="Not found")
        doc = {**existing, **payload.model_dump()}
        doc["updated_at"] = now_iso()
        await mysql_doc_update(collection, item_id, doc)
        await log_activity(user["email"], f"update_{name}", item_id)
        updated = await mysql_doc_find_one(collection, item_id)
        return updated

    @api_router.delete(f"/{name}/{{item_id}}")
    async def delete_item(item_id: str, user=Depends(get_current_user)):
        n = await mysql_doc_delete(collection, item_id)
        if n == 0:
            raise HTTPException(status_code=404, detail="Not found")
        await log_activity(user["email"], f"delete_{name}", item_id)
        return {"ok": True}

    return list_items, create_item, get_item, update_item, delete_item

# Register CRUD endpoints
make_crud_routes("vehicles", "vehicles", VehicleIn)
make_crud_routes("bookings", "bookings", BookingIn)
make_crud_routes("customers", "customers", CustomerIn)
make_crud_routes("drivers", "drivers", DriverIn)
make_crud_routes("branches", "branches", BranchIn)
make_crud_routes("maintenance", "maintenance", MaintenanceIn)
make_crud_routes("payments", "payments", PaymentIn)
make_crud_routes("promotions", "promotions", PromotionIn)
make_crud_routes("reviews", "reviews", ReviewIn)
make_crud_routes("blogs", "blogs", BlogIn)
make_crud_routes("tickets", "tickets", TicketIn)
make_crud_routes("notifications", "notifications", NotificationIn)
make_crud_routes("cms_pages", "cms_pages", CMSPageIn)
make_crud_routes("vehicle_inspections", "vehicle_inspections", VehicleInspectionIn)

# ---------- Object Storage ----------
STORAGE_URL = "https://integrations.emergentagent.com/objstore/api/v1/storage"
APP_NAME = "carrental"
storage_key: Optional[str] = None
MAX_FILE_SIZE = 10 * 1024 * 1024  # 10 MB
ALLOWED_CONTENT_TYPES = {
    "image/jpeg", "image/png", "image/webp", "image/gif", "application/pdf",
}


def _safe_local_upload_path(storage_path: str) -> Path:
    """Resolve storage_path under LOCAL_UPLOAD_ROOT (blocks path traversal)."""
    normalized = (storage_path or "").replace("\\", "/").strip().lstrip("/")
    if not normalized:
        raise HTTPException(status_code=400, detail="Invalid path")
    base = LOCAL_UPLOAD_ROOT.resolve()
    full = (base / normalized).resolve()
    try:
        full.relative_to(base)
    except ValueError:
        raise HTTPException(status_code=400, detail="Invalid path")
    return full


def try_read_local_upload(storage_path: str) -> Optional[tuple[bytes, str]]:
    """Return (bytes, content_type) if file exists on disk under local_uploads/."""
    try:
        p = _safe_local_upload_path(storage_path)
    except HTTPException:
        return None
    if not p.is_file():
        return None
    data = p.read_bytes()
    ext = p.suffix.lower()
    ct = {
        ".jpg": "image/jpeg",
        ".jpeg": "image/jpeg",
        ".png": "image/png",
        ".gif": "image/gif",
        ".webp": "image/webp",
        ".pdf": "application/pdf",
    }.get(ext, "application/octet-stream")
    return data, ct


def save_local_upload(storage_path: str, content: bytes) -> None:
    """Persist bytes on server disk (used when remote object storage is unavailable)."""
    p = _safe_local_upload_path(storage_path)
    p.parent.mkdir(parents=True, exist_ok=True)
    p.write_bytes(content)


def fetch_file_bytes(storage_path: str) -> tuple[bytes, str]:
    """Load file from local disk if present, otherwise from remote object storage."""
    local = try_read_local_upload(storage_path)
    if local:
        return local
    return get_object(storage_path)


def init_storage() -> Optional[str]:
    global storage_key
    if storage_key:
        return storage_key
    key = os.environ.get("EMERGENT_LLM_KEY")
    if not key:
        logger.warning("EMERGENT_LLM_KEY not set; object storage disabled")
        return None
    try:
        resp = requests.post(f"{STORAGE_URL}/init", json={"emergent_key": key}, timeout=30)
        resp.raise_for_status()
        storage_key = resp.json()["storage_key"]
        logger.info("Object storage initialized")
        return storage_key
    except Exception as e:
        logger.error(f"Storage init failed: {e}")
        return None

def put_object(path: str, data: bytes, content_type: str) -> dict:
    key = init_storage()
    if not key:
        raise HTTPException(status_code=503, detail="Storage not configured")
    resp = requests.put(
        f"{STORAGE_URL}/objects/{path}",
        headers={"X-Storage-Key": key, "Content-Type": content_type},
        data=data, timeout=120,
    )
    if resp.status_code == 403:
        # refresh key and retry once
        global storage_key
        storage_key = None
        key = init_storage()
        resp = requests.put(
            f"{STORAGE_URL}/objects/{path}",
            headers={"X-Storage-Key": key, "Content-Type": content_type},
            data=data, timeout=120,
        )
    resp.raise_for_status()
    return resp.json()

def get_object(path: str) -> tuple:
    key = init_storage()
    if not key:
        raise HTTPException(status_code=503, detail="Storage not configured")
    resp = requests.get(
        f"{STORAGE_URL}/objects/{path}",
        headers={"X-Storage-Key": key}, timeout=60,
    )
    if resp.status_code == 403:
        global storage_key
        storage_key = None
        key = init_storage()
        resp = requests.get(
            f"{STORAGE_URL}/objects/{path}",
            headers={"X-Storage-Key": key}, timeout=60,
        )
    if resp.status_code == 404:
        raise HTTPException(status_code=404, detail="File not found")
    resp.raise_for_status()
    return resp.content, resp.headers.get("Content-Type", "application/octet-stream")

@api_router.post("/uploads")
async def upload_file(
    file: UploadFile = File(...),
    folder: str = Query("misc", description="Sub-folder, e.g. vehicles/{id}, inspections, customers"),
    user=Depends(get_current_user),
):
    content = await file.read()
    if len(content) > MAX_FILE_SIZE:
        raise HTTPException(status_code=413, detail=f"File too large (max {MAX_FILE_SIZE // 1024 // 1024} MB)")
    ct = file.content_type or "application/octet-stream"
    if ct not in ALLOWED_CONTENT_TYPES:
        raise HTTPException(status_code=415, detail=f"Content type '{ct}' not allowed")
    ext = (file.filename or "file.bin").rsplit(".", 1)[-1].lower() if "." in (file.filename or "") else "bin"
    safe_folder = folder.strip("/").replace("..", "")
    path = f"{APP_NAME}/{safe_folder}/{uuid.uuid4()}.{ext}"
    file_id = str(uuid.uuid4())
    try:
        result = put_object(path, content, ct)
        storage_path = result["path"]
    except Exception as e:
        logger.warning(
            "Object storage upload failed (%s); saving to local_uploads/ under backend/ (path still in MySQL files + blog document).",
            e,
        )
        save_local_upload(path, content)
        storage_path = path
    await mysql_file_insert({
        "id": file_id,
        "storage_path": storage_path,
        "original_filename": file.filename,
        "content_type": ct,
        "size": len(content),
        "uploaded_by": user["email"],
        "is_deleted": False,
        "created_at": now_iso(),
    })
    return {
        "id": file_id,
        "path": storage_path,
        "url": f"/api/files/{storage_path}",
        "content_type": ct,
        "size": len(content),
        "original_filename": file.filename,
    }

@api_router.get("/files/{path:path}")
async def serve_file(path: str, request: Request, auth: Optional[str] = Query(None)):
    # Allow either Authorization header OR ?auth=token query param (for <img src>)
    token = None
    if auth:
        token = auth
    else:
        ah = request.headers.get("Authorization", "")
        if ah.startswith("Bearer "):
            token = ah[7:]
        else:
            token = request.cookies.get("access_token")
    if not token:
        raise HTTPException(status_code=401, detail="Not authenticated")
    try:
        payload = jwt.decode(token, get_jwt_secret(), algorithms=[JWT_ALGORITHM])
        if payload.get("type") != "access":
            raise HTTPException(status_code=401, detail="Invalid token")
    except jwt.InvalidTokenError:
        raise HTTPException(status_code=401, detail="Invalid token")

    record = await mysql_file_find_by_path(path)
    if not record:
        raise HTTPException(status_code=404, detail="File not found")
    data, ct = fetch_file_bytes(path)
    return Response(content=data, media_type=record.get("content_type", ct))

@api_router.delete("/uploads/{file_id}")
async def delete_file(file_id: str, user=Depends(get_current_user)):
    n = await mysql_file_soft_delete(file_id)
    if n == 0:
        raise HTTPException(status_code=404, detail="Not found")
    await log_activity(user["email"], "delete_file", file_id)
    return {"ok": True}

# ---------- Dashboard Stats ----------
@api_router.get("/dashboard/stats")
async def dashboard_stats(user=Depends(get_current_user)):
    vehicles = await mysql_doc_list("vehicles", 2000)
    bookings = await mysql_doc_list("bookings", 5000)
    customers_count = await mysql_doc_count("customers")
    drivers = await mysql_doc_list("drivers", 1000)
    payments = await mysql_doc_list("payments", 5000)

    total_cars = len(vehicles)
    available = sum(1 for v in vehicles if v.get("status") == "available")
    rented = sum(1 for v in vehicles if v.get("status") == "rented")
    maintenance = sum(1 for v in vehicles if v.get("status") == "maintenance")

    active_bookings = sum(1 for b in bookings if b.get("status") in ("confirmed", "ongoing"))
    completed_rentals = sum(1 for b in bookings if b.get("status") == "completed")
    pending_requests = sum(1 for b in bookings if b.get("status") == "pending")

    total_revenue = sum(p.get("amount", 0) for p in payments if p.get("type") == "payment" and p.get("status") == "completed")
    pending_payments = sum(1 for p in payments if p.get("status") == "pending")

    available_drivers = sum(1 for d in drivers if d.get("is_available"))

    # Revenue by month (last 6)
    revenue_by_month: Dict[str, float] = {}
    for p in payments:
        if p.get("type") != "payment" or p.get("status") != "completed":
            continue
        ts = p.get("created_at", "")
        if len(ts) >= 7:
            key = ts[:7]
            revenue_by_month[key] = revenue_by_month.get(key, 0) + float(p.get("amount", 0))
    revenue_chart = sorted([{"month": k, "revenue": round(v, 2)} for k, v in revenue_by_month.items()], key=lambda x: x["month"])[-6:]

    # Bookings by status
    booking_status_chart = []
    for st in ["pending", "confirmed", "ongoing", "completed", "cancelled"]:
        booking_status_chart.append({"status": st.capitalize(), "count": sum(1 for b in bookings if b.get("status") == st)})

    # Most rented vehicle (by booking count)
    veh_count: Dict[str, int] = {}
    for b in bookings:
        vid = b.get("vehicle_id")
        if vid:
            veh_count[vid] = veh_count.get(vid, 0) + 1
    most_rented = None
    if veh_count:
        top_id = max(veh_count, key=veh_count.get)
        v = next((x for x in vehicles if x["id"] == top_id), None)
        if v:
            most_rented = {"name": v["name"], "bookings": veh_count[top_id]}

    # Category distribution
    cat_chart: Dict[str, int] = {}
    for b in bookings:
        v = next((x for x in vehicles if x["id"] == b.get("vehicle_id")), None)
        if v:
            cat = v.get("category", "Other")
            cat_chart[cat] = cat_chart.get(cat, 0) + 1
    category_chart = [{"category": k, "count": v} for k, v in cat_chart.items()]

    # Recent activity
    activities = await mysql_activity_list(20)

    return {
        "total_cars": total_cars,
        "available_cars": available,
        "rented_cars": rented,
        "maintenance_cars": maintenance,
        "total_customers": customers_count,
        "active_bookings": active_bookings,
        "completed_rentals": completed_rentals,
        "pending_requests": pending_requests,
        "total_revenue": round(total_revenue, 2),
        "pending_payments": pending_payments,
        "available_drivers": available_drivers,
        "total_drivers": len(drivers),
        "website_visitors": 12483,  # placeholder traffic counter
        "most_rented": most_rented,
        "revenue_chart": revenue_chart,
        "booking_status_chart": booking_status_chart,
        "category_chart": category_chart,
        "activities": activities,
    }

# ---------- Activity Logs / Settings ----------
@api_router.get("/activity-logs")
async def get_activity_logs(user=Depends(get_current_user), limit: int = 100):
    return await mysql_activity_list(limit)


def _settings_default_dict() -> Dict[str, Any]:
    d = SettingsIn().model_dump()
    d["id"] = "global"
    return d


async def _effective_settings_doc() -> Dict[str, Any]:
    s = await mysql_settings_get()
    if not s:
        return {**_settings_default_dict(), "updated_at": now_iso()}
    out = {**_settings_default_dict(), **s}
    if out.get("currency") not in ("USD", "RWF"):
        out["currency"] = "USD"
    return out


@api_router.get("/branding")
async def get_public_branding():
    """Public: company name / logo ref / currency for login & shell (no auth)."""
    doc = await _effective_settings_doc()
    return {
        "company_name": doc.get("company_name") or "NovaCar Admin",
        "logo_url": doc.get("logo_url"),
        "currency": doc.get("currency") or "USD",
        "updated_at": doc.get("updated_at"),
    }


def _resolved_branding_storage_path(logo_raw: str) -> Optional[str]:
    """Return object-store path under {APP_NAME}/branding/ or None if not an internal logo."""
    logo = (logo_raw or "").strip()
    if not logo or logo.startswith("http://") or logo.startswith("https://"):
        return None
    p = logo.strip()
    if "/api/files/" in p:
        idx = p.index("/api/files/")
        p = p[idx + len("/api/files/") :]
    p = p.lstrip("/")
    prefix = f"{APP_NAME}/branding/"
    if not p.startswith(prefix):
        return None
    return p


@api_router.get("/public/branding-asset")
async def public_branding_asset():
    """Serve uploaded company logo without auth (path allowlist: carrental/branding/*). External URLs redirect."""
    doc = await _effective_settings_doc()
    logo = (doc.get("logo_url") or "").strip()
    if not logo:
        raise HTTPException(status_code=404, detail="No logo configured")
    if logo.startswith("http://") or logo.startswith("https://"):
        return RedirectResponse(url=logo, status_code=302)
    path = _resolved_branding_storage_path(logo)
    if not path:
        raise HTTPException(status_code=404, detail="Invalid logo path")
    record = await mysql_file_find_by_path(path)
    if not record:
        raise HTTPException(status_code=404, detail="Logo file not found")
    data, ct = fetch_file_bytes(path)
    return Response(content=data, media_type=record.get("content_type", ct or "application/octet-stream"))


@api_router.get("/settings")
async def get_settings(user=Depends(get_current_user)):
    s = await mysql_settings_get()
    if not s:
        default = {**_settings_default_dict(), "updated_at": now_iso()}
        await mysql_settings_upsert(default)
        default.pop("_id", None)
        return default
    merged = {**_settings_default_dict(), **s}
    merged.pop("_id", None)
    if merged.get("currency") not in ("USD", "RWF"):
        merged["currency"] = "USD"
    return merged

@api_router.put("/settings")
async def update_settings(payload: SettingsIn, user=Depends(get_current_user)):
    doc = payload.model_dump()
    doc["id"] = "global"
    doc["updated_at"] = now_iso()
    await mysql_settings_upsert(doc)
    await log_activity(user["email"], "update_settings")
    return doc

@api_router.get("/users")
async def list_users(user=Depends(get_current_user)):
    return await mysql_users_list(500)

# ---------- Reports ----------
@api_router.get("/reports/financial")
async def financial_report(user=Depends(get_current_user)):
    payments = await mysql_doc_list("payments", 5000)
    total_in = sum(p["amount"] for p in payments if p.get("type") == "payment" and p.get("status") == "completed")
    total_refunds = sum(p["amount"] for p in payments if p.get("type") == "refund")
    deposits = sum(p["amount"] for p in payments if p.get("type") == "deposit")
    return {
        "total_revenue": round(total_in, 2),
        "total_refunds": round(total_refunds, 2),
        "deposits": round(deposits, 2),
        "net": round(total_in - total_refunds, 2),
        "tax_estimate": round(total_in * 0.18, 2),
    }

@api_router.get("/reports/operational")
async def operational_report(user=Depends(get_current_user)):
    bookings = await mysql_doc_list("bookings", 5000)
    vehicles = await mysql_doc_list("vehicles", 2000)
    return {
        "total_bookings": len(bookings),
        "vehicle_utilization_pct": round((sum(1 for v in vehicles if v.get("status") == "rented") / max(len(vehicles), 1)) * 100, 2),
        "avg_booking_value": round(sum(b.get("total_amount", 0) for b in bookings) / max(len(bookings), 1), 2),
    }

# ---------- Exports (CSV / Excel / PDF) ----------
import io
import csv as _csv
from fastapi.responses import StreamingResponse
from openpyxl import Workbook
from reportlab.lib.pagesizes import letter
from reportlab.lib import colors as rl_colors
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph, Spacer
from reportlab.lib.styles import getSampleStyleSheet

EXPORTABLE = {
    "vehicles": ["name", "brand", "category", "registration_number", "year", "fuel_type", "transmission", "daily_rate", "status", "mileage"],
    "bookings": ["id", "customer_id", "vehicle_id", "pickup_date", "return_date", "total_amount", "status", "payment_status"],
    "customers": ["full_name", "email", "phone", "license_number", "city", "country", "loyalty_points", "wallet_balance", "is_blacklisted"],
    "drivers": ["full_name", "phone", "email", "license_number", "is_available", "salary", "rating", "trips_completed"],
    "payments": ["reference", "booking_id", "amount", "method", "type", "status", "created_at"],
    "maintenance": ["vehicle_id", "type", "description", "cost", "scheduled_date", "status", "garage"],
    "promotions": ["code", "description", "discount_percent", "valid_from", "valid_until", "is_active", "used_count", "usage_limit"],
    "reviews": ["customer_id", "vehicle_id", "rating", "comment", "is_approved"],
    "branches": ["name", "city", "address", "manager", "phone", "email"],
    "tickets": ["subject", "customer_email", "priority", "status", "created_at"],
}

@api_router.get("/exports/{resource}.{fmt}")
async def export_resource(resource: str, fmt: str, user=Depends(get_current_user)):
    if resource not in EXPORTABLE:
        raise HTTPException(status_code=400, detail=f"Resource '{resource}' is not exportable")
    if fmt not in ("csv", "xlsx", "pdf"):
        raise HTTPException(status_code=400, detail="Format must be csv, xlsx, or pdf")

    company_name = (await _effective_settings_doc()).get("company_name") or "NovaCar Admin"

    cols = EXPORTABLE[resource]
    items = await mysql_doc_list(resource, 5000)
    rows = [[str(i.get(c, ""))[:200] for c in cols] for i in items]

    filename = f"{resource}_export_{datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S')}"

    if fmt == "csv":
        buf = io.StringIO()
        writer = _csv.writer(buf)
        writer.writerow([c.replace("_", " ").upper() for c in cols])
        writer.writerows(rows)
        await log_activity(user["email"], f"export_{resource}_csv")
        return StreamingResponse(
            iter([buf.getvalue()]),
            media_type="text/csv",
            headers={"Content-Disposition": f'attachment; filename="{filename}.csv"'},
        )

    if fmt == "xlsx":
        wb = Workbook()
        ws = wb.active
        ws.title = resource[:31]
        ws.append([c.replace("_", " ").upper() for c in cols])
        for row in rows:
            ws.append(row)
        for i, _ in enumerate(cols, 1):
            ws.column_dimensions[chr(64 + i)].width = 18
        out = io.BytesIO()
        wb.save(out)
        out.seek(0)
        await log_activity(user["email"], f"export_{resource}_xlsx")
        return StreamingResponse(
            iter([out.getvalue()]),
            media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
            headers={"Content-Disposition": f'attachment; filename="{filename}.xlsx"'},
        )

    # PDF
    out = io.BytesIO()
    doc = SimpleDocTemplate(out, pagesize=letter, leftMargin=24, rightMargin=24, topMargin=32, bottomMargin=24)
    styles = getSampleStyleSheet()
    story = [
        Paragraph(f"<b>{company_name} — {resource.replace('_', ' ').title()} Report</b>", styles["Title"]),
        Paragraph(f"Generated: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')} · {len(items)} records", styles["Normal"]),
        Spacer(1, 12),
    ]
    table_data = [[c.replace("_", " ").upper() for c in cols]] + rows
    tbl = Table(table_data, repeatRows=1)
    tbl.setStyle(TableStyle([
        ("BACKGROUND", (0, 0), (-1, 0), rl_colors.HexColor("#E60000")),
        ("TEXTCOLOR", (0, 0), (-1, 0), rl_colors.white),
        ("FONTNAME", (0, 0), (-1, 0), "Helvetica-Bold"),
        ("FONTSIZE", (0, 0), (-1, -1), 7),
        ("BOTTOMPADDING", (0, 0), (-1, 0), 8),
        ("GRID", (0, 0), (-1, -1), 0.25, rl_colors.HexColor("#CCCCCC")),
        ("ROWBACKGROUNDS", (0, 1), (-1, -1), [rl_colors.white, rl_colors.HexColor("#FAFAFA")]),
        ("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
    ]))
    story.append(tbl)
    doc.build(story)
    out.seek(0)
    await log_activity(user["email"], f"export_{resource}_pdf")
    return StreamingResponse(
        iter([out.getvalue()]),
        media_type="application/pdf",
        headers={"Content-Disposition": f'attachment; filename="{filename}.pdf"'},
    )

# ---------- AI Smart Features ----------
try:
    from emergentintegrations.llm.chat import LlmChat, UserMessage
except ImportError:
    LlmChat = None  # type: ignore[misc, assignment]
    UserMessage = None  # type: ignore[misc, assignment]

EMERGENT_LLM_KEY = os.environ.get("EMERGENT_LLM_KEY", "")

class AIRecommendIn(BaseModel):
    customer_id: Optional[str] = None
    preferences: Optional[str] = None  # free-text "I need an SUV for 5 days, family trip"

@api_router.post("/ai/recommend")
async def ai_recommend(payload: AIRecommendIn, user=Depends(get_current_user)):
    if LlmChat is None or UserMessage is None:
        raise HTTPException(
            status_code=503,
            detail="AI features require the proprietary emergentintegrations package (not published on PyPI).",
        )
    if not EMERGENT_LLM_KEY:
        raise HTTPException(status_code=503, detail="LLM key not configured")
    vehicles = await mysql_doc_list("vehicles", 50, status="available")
    veh_text = "\n".join([f"- {v['name']} | {v.get('category')} | seats {v.get('seats')} | {v.get('fuel_type')} | ${v.get('daily_rate')}/day | {v.get('transmission')}" for v in vehicles[:25]])
    customer_ctx = ""
    if payload.customer_id:
        c = await mysql_doc_find_one("customers", payload.customer_id)
        if c:
            past = await mysql_doc_list("bookings", 20, eq_filters={"customer_id": payload.customer_id})
            customer_ctx = f"Customer: {c.get('full_name')}, loyalty {c.get('loyalty_points')} pts, past bookings: {len(past)}"

    chat = LlmChat(
        api_key=EMERGENT_LLM_KEY,
        session_id=f"recommend-{payload.customer_id or 'guest'}",
        system_message="You are a car rental concierge. Recommend the top 3 vehicles from the provided fleet that best match the customer needs. Reply in concise plain text with: 1) Top pick (name + 1 sentence reason), 2) Alternative (name + 1 sentence reason), 3) Budget option (name + 1 sentence reason). No markdown, no greeting."
    ).with_model("anthropic", "claude-sonnet-4-5-20250929")

    msg = UserMessage(text=f"AVAILABLE FLEET:\n{veh_text}\n\n{customer_ctx}\n\nCUSTOMER PREFERENCES: {payload.preferences or 'No specific preferences provided'}")
    try:
        response = await chat.send_message(msg)
        await log_activity(user["email"], "ai_recommend")
        return {"recommendation": response, "available_count": len(vehicles)}
    except Exception as e:
        logger.exception("ai_recommend failed")
        raise HTTPException(status_code=500, detail=f"AI request failed: {str(e)}")

@api_router.post("/ai/dynamic-pricing")
async def ai_dynamic_pricing(user=Depends(get_current_user)):
    if LlmChat is None or UserMessage is None:
        raise HTTPException(
            status_code=503,
            detail="AI features require the proprietary emergentintegrations package (not published on PyPI).",
        )
    if not EMERGENT_LLM_KEY:
        raise HTTPException(status_code=503, detail="LLM key not configured")
    vehicles = await mysql_doc_list("vehicles", 50)
    bookings = await mysql_doc_list("bookings", 500)

    veh_summary = []
    for v in vehicles[:15]:
        veh_bookings = sum(1 for b in bookings if b.get("vehicle_id") == v["id"])
        veh_summary.append(f"- {v['name']} | category {v.get('category')} | base ${v.get('daily_rate')}/day | status {v.get('status')} | bookings: {veh_bookings}")

    chat = LlmChat(
        api_key=EMERGENT_LLM_KEY,
        session_id="pricing-advisor",
        system_message="You are a revenue management AI for a car rental company. Analyze the fleet utilization and recommend dynamic price adjustments. Output ONLY a plain-text list with format: 'Vehicle Name → suggested multiplier (e.g., 1.15x or 0.90x) → 1-line reason'. Cover top 5 highest-impact recommendations."
    ).with_model("anthropic", "claude-sonnet-4-5-20250929")

    msg = UserMessage(text=f"FLEET UTILIZATION DATA:\n{chr(10).join(veh_summary)}\n\nTotal bookings: {len(bookings)}\nCurrent month: {datetime.now(timezone.utc).strftime('%B %Y')}")
    try:
        response = await chat.send_message(msg)
        await log_activity(user["email"], "ai_dynamic_pricing")
        return {"suggestions": response, "vehicles_analyzed": len(vehicles)}
    except Exception as e:
        logger.exception("ai_dynamic_pricing failed")
        raise HTTPException(status_code=500, detail=f"AI request failed: {str(e)}")

@api_router.post("/ai/demand-prediction")
async def ai_demand_prediction(user=Depends(get_current_user)):
    if LlmChat is None or UserMessage is None:
        raise HTTPException(
            status_code=503,
            detail="AI features require the proprietary emergentintegrations package (not published on PyPI).",
        )
    if not EMERGENT_LLM_KEY:
        raise HTTPException(status_code=503, detail="LLM key not configured")
    bookings = await mysql_doc_list("bookings", 1000)
    vehicles = await mysql_doc_list("vehicles", 100)

    # Build category breakdown
    cat_counts: Dict[str, int] = {}
    for b in bookings:
        v = next((x for x in vehicles if x["id"] == b.get("vehicle_id")), None)
        if v:
            cat = v.get("category", "Other")
            cat_counts[cat] = cat_counts.get(cat, 0) + 1
    breakdown = "\n".join([f"- {k}: {v} bookings" for k, v in cat_counts.items()])

    chat = LlmChat(
        api_key=EMERGENT_LLM_KEY,
        session_id="demand-predictor",
        system_message="You are a demand forecasting AI for a car rental business. Provide a brief forecast (4-6 short sentences) covering: expected demand trend next 30 days, top categories to stock up, any oversupply risks. Plain text only."
    ).with_model("anthropic", "claude-sonnet-4-5-20250929")

    msg = UserMessage(text=f"HISTORICAL BOOKINGS BY CATEGORY:\n{breakdown}\n\nTotal fleet: {len(vehicles)} vehicles\nTotal bookings: {len(bookings)}\nCurrent month: {datetime.now(timezone.utc).strftime('%B %Y')}")
    try:
        response = await chat.send_message(msg)
        await log_activity(user["email"], "ai_demand_prediction")
        return {"forecast": response}
    except Exception as e:
        logger.exception("ai_demand_prediction failed")
        raise HTTPException(status_code=500, detail=f"AI request failed: {str(e)}")

# ---------- Seed ----------
async def seed_admin():
    admin_email = os.environ.get("ADMIN_EMAIL", "admin@carrental.com").lower()
    admin_password = os.environ.get("ADMIN_PASSWORD", "Admin@123")
    existing = await mysql_user_find_by_email(admin_email, exclude_password=False)
    if not existing:
        await mysql_user_insert({
            "id": str(uuid.uuid4()),
            "email": admin_email,
            "name": "Super Admin",
            "password_hash": hash_password(admin_password),
            "role": "super_admin",
            "created_at": now_iso(),
        })
        logger.info(f"Seeded admin: {admin_email}")
    else:
        if not verify_password(admin_password, existing["password_hash"]):
            await mysql_user_update_password(admin_email, hash_password(admin_password))

async def seed_demo_data():
    if await mysql_doc_count("vehicles") > 0:
        return
    logger.info("Seeding demo data...")

    # Branches
    branches = [
        {"name": "Downtown Hub", "city": "New York", "address": "120 Park Ave", "manager": "James Carter", "phone": "+1-212-555-0101", "email": "ny@eliteride.com", "is_pickup": True, "is_return": True, "lat": 40.7549, "lng": -73.9840},
        {"name": "Airport Branch", "city": "Los Angeles", "address": "1 World Way, LAX", "manager": "Sarah Lin", "phone": "+1-310-555-0188", "email": "lax@eliteride.com", "is_pickup": True, "is_return": True, "lat": 33.9416, "lng": -118.4085},
        {"name": "Beach Office", "city": "Miami", "address": "1500 Ocean Drive", "manager": "Carlos Vega", "phone": "+1-305-555-0142", "email": "mia@eliteride.com", "is_pickup": True, "is_return": True, "lat": 25.7825, "lng": -80.1340},
    ]
    branch_ids = []
    for b in branches:
        b["id"] = str(uuid.uuid4())
        b["created_at"] = now_iso()
        b["updated_at"] = now_iso()
        branch_ids.append(b["id"])
    await mysql_doc_insert_many("branches", branches)

    # Vehicles
    vehicle_data = [
        ("Tesla Model S", "Tesla", "Model S Plaid", "Luxury", "NY-2024-001", 2024, "Electric", "Automatic", 5, 220, "available", 8500, "https://images.unsplash.com/photo-1755880107456-8becf9a985bb?w=600"),
        ("Range Rover Sport", "Land Rover", "Sport HSE", "SUV", "LA-2023-014", 2023, "Petrol", "Automatic", 7, 180, "rented", 12400, "https://images.unsplash.com/photo-1763443536611-6fa59a9a4ed9?w=600"),
        ("Toyota Camry", "Toyota", "Camry XSE", "Sedan", "MIA-2024-007", 2024, "Hybrid", "Automatic", 5, 75, "available", 5200, None),
        ("BMW X5", "BMW", "X5 xDrive40i", "SUV", "NY-2023-022", 2023, "Petrol", "Automatic", 7, 150, "available", 9100, None),
        ("Mercedes E-Class", "Mercedes", "E350", "Luxury", "LA-2024-031", 2024, "Petrol", "Automatic", 5, 165, "maintenance", 6800, None),
        ("Honda Civic", "Honda", "Civic Touring", "Sedan", "MIA-2023-019", 2023, "Petrol", "Automatic", 5, 55, "available", 14200, None),
        ("Ford Mustang GT", "Ford", "Mustang GT", "Sports", "NY-2024-008", 2024, "Petrol", "Manual", 4, 140, "rented", 3400, None),
        ("Audi Q7", "Audi", "Q7 Premium", "SUV", "LA-2023-027", 2023, "Petrol", "Automatic", 7, 175, "available", 11500, None),
        ("Jeep Wrangler", "Jeep", "Wrangler Rubicon", "SUV", "MIA-2024-012", 2024, "Petrol", "Automatic", 4, 120, "out_of_service", 7800, None),
        ("Hyundai Elantra", "Hyundai", "Elantra SEL", "Sedan", "NY-2024-018", 2024, "Petrol", "Automatic", 5, 50, "available", 2900, None),
        ("Porsche 911", "Porsche", "911 Carrera", "Sports", "LA-2024-002", 2024, "Petrol", "Automatic", 4, 350, "reserved", 1800, None),
        ("Chevrolet Tahoe", "Chevrolet", "Tahoe LT", "SUV", "MIA-2023-033", 2023, "Petrol", "Automatic", 8, 130, "available", 10200, None),
    ]
    vehicles = []
    for name, brand, model_name, cat, reg, yr, fuel, trans, seats, rate, status, mile, img in vehicle_data:
        vehicles.append({
            "id": str(uuid.uuid4()),
            "name": name, "brand": brand, "model_name": model_name, "category": cat,
            "registration_number": reg, "year": yr, "fuel_type": fuel, "transmission": trans,
            "seats": seats, "daily_rate": rate, "discount_rate": None, "status": status,
            "mileage": mile, "image_url": img,
            "insurance_expiry": (datetime.now(timezone.utc) + timedelta(days=180)).isoformat(),
            "branch_id": branch_ids[hash(reg) % 3],
            "features": ["GPS", "Bluetooth", "AC", "Sunroof"] if cat != "Sedan" else ["Bluetooth", "AC"],
            "condition": "Excellent", "gps_enabled": True,
            "created_at": now_iso(), "updated_at": now_iso(),
        })
    await mysql_doc_insert_many("vehicles", vehicles)

    # Customers
    customer_data = [
        ("Emily Johnson", "emily.j@example.com", "+1-555-0101", "DL-NY-9821", "USA"),
        ("Michael Chen", "m.chen@example.com", "+1-555-0102", "DL-CA-4412", "USA"),
        ("Olivia Martinez", "olivia.m@example.com", "+1-555-0103", "DL-FL-7733", "USA"),
        ("Daniel Smith", "d.smith@example.com", "+1-555-0104", "DL-NY-2256", "USA"),
        ("Ava Williams", "ava.w@example.com", "+1-555-0105", "DL-CA-9087", "USA"),
        ("Liam Garcia", "liam.g@example.com", "+1-555-0106", "DL-FL-1199", "USA"),
        ("Sophia Brown", "sophia.b@example.com", "+1-555-0107", "DL-NY-3344", "USA"),
        ("Noah Davis", "noah.d@example.com", "+1-555-0108", "DL-CA-5678", "USA"),
    ]
    customers = []
    for name, email, phone, dl, country in customer_data:
        customers.append({
            "id": str(uuid.uuid4()),
            "full_name": name, "email": email, "phone": phone, "license_number": dl,
            "address": "123 Demo Street", "city": "New York", "country": country,
            "emergency_contact": "+1-555-9999", "is_blacklisted": False,
            "loyalty_points": (hash(email) % 500) + 50, "wallet_balance": float((hash(email) % 200)),
            "created_at": now_iso(), "updated_at": now_iso(),
        })
    await mysql_doc_insert_many("customers", customers)

    # Drivers
    drivers = []
    for n in ["Robert King", "Patricia Lee", "Henry Wright", "Linda Scott", "Patrick O'Brien"]:
        drivers.append({
            "id": str(uuid.uuid4()),
            "full_name": n, "phone": f"+1-555-0{(hash(n) % 900):03d}",
            "email": f"{n.split()[0].lower()}@eliteride.com",
            "license_number": f"DR-{hash(n) % 99999}",
            "is_available": (hash(n) % 2 == 0),
            "salary": 3500.0, "rating": round(4 + (hash(n) % 100) / 100, 1),
            "trips_completed": hash(n) % 200, "branch_id": branch_ids[hash(n) % 3],
            "created_at": now_iso(), "updated_at": now_iso(),
        })
    await mysql_doc_insert_many("drivers", drivers)

    # Bookings + Payments (last 6 months)
    bookings = []
    payments = []
    statuses = ["pending", "confirmed", "ongoing", "completed", "completed", "completed", "cancelled"]
    for i in range(40):
        v = vehicles[i % len(vehicles)]
        c = customers[i % len(customers)]
        st = statuses[i % len(statuses)]
        days = (i % 7) + 1
        amount = v["daily_rate"] * days
        booked_days_ago = (i * 5) % 180
        ts = (datetime.now(timezone.utc) - timedelta(days=booked_days_ago)).isoformat()
        bookings.append({
            "id": str(uuid.uuid4()),
            "customer_id": c["id"], "vehicle_id": v["id"], "driver_id": None,
            "pickup_date": ts, "return_date": ts,
            "pickup_location": "Downtown Hub", "return_location": "Downtown Hub",
            "total_amount": amount, "status": st, "payment_status": "paid" if st == "completed" else "pending",
            "notes": "", "created_at": ts, "updated_at": ts,
        })
        if st == "completed":
            payments.append({
                "id": str(uuid.uuid4()),
                "booking_id": bookings[-1]["id"], "customer_id": c["id"],
                "amount": amount, "method": ["card", "cash", "bank_transfer", "mobile_money"][i % 4],
                "type": "payment", "status": "completed",
                "reference": f"TX-{1000 + i}",
                "created_at": ts, "updated_at": ts,
            })
    await mysql_doc_insert_many("bookings", bookings)
    if payments:
        await mysql_doc_insert_many("payments", payments)

    # Reviews
    reviews = []
    for i, c in enumerate(customers[:5]):
        reviews.append({
            "id": str(uuid.uuid4()),
            "customer_id": c["id"], "vehicle_id": vehicles[i]["id"],
            "rating": [5, 4, 5, 3, 4][i], "comment": ["Outstanding!", "Great car, smooth ride.", "Loved the experience.", "Decent value.", "Will rent again."][i],
            "is_approved": i != 3, "is_hidden": False,
            "created_at": now_iso(), "updated_at": now_iso(),
        })
    await mysql_doc_insert_many("reviews", reviews)

    # Promotions
    promos = [
        {"code": "SUMMER25", "description": "25% off summer rentals", "discount_percent": 25, "discount_amount": 0,
         "valid_from": now_iso(), "valid_until": (datetime.now(timezone.utc) + timedelta(days=60)).isoformat(),
         "is_active": True, "usage_limit": 200, "used_count": 47},
        {"code": "WELCOME10", "description": "First-time customer 10% off", "discount_percent": 10, "discount_amount": 0,
         "valid_from": now_iso(), "valid_until": (datetime.now(timezone.utc) + timedelta(days=365)).isoformat(),
         "is_active": True, "usage_limit": 1000, "used_count": 312},
    ]
    for p in promos:
        p["id"] = str(uuid.uuid4()); p["created_at"] = now_iso(); p["updated_at"] = now_iso()
    await mysql_doc_insert_many("promotions", promos)

    # Maintenance
    maint = []
    for i, v in enumerate(vehicles[:5]):
        maint.append({
            "id": str(uuid.uuid4()),
            "vehicle_id": v["id"],
            "type": ["oil_change", "tire", "repair", "inspection", "oil_change"][i],
            "description": ["Routine oil change", "Front tire replacement", "Brake pad repair", "Annual inspection", "Oil & filter"][i],
            "cost": [80, 320, 450, 120, 90][i],
            "scheduled_date": (datetime.now(timezone.utc) + timedelta(days=i * 7)).isoformat(),
            "completed_date": None,
            "status": ["scheduled", "in_progress", "completed", "scheduled", "scheduled"][i],
            "garage": "EliteRide Garage",
            "created_at": now_iso(), "updated_at": now_iso(),
        })
    await mysql_doc_insert_many("maintenance", maint)

    # Tickets
    tickets = [
        {"subject": "Late return penalty inquiry", "customer_email": "emily.j@example.com", "description": "I returned the car 30 minutes late and was charged...", "priority": "medium", "status": "open"},
        {"subject": "Refund request for cancelled booking", "customer_email": "m.chen@example.com", "description": "Booking #4421 was cancelled due to weather.", "priority": "high", "status": "in_progress"},
        {"subject": "Damaged seat reported", "customer_email": "olivia.m@example.com", "description": "Found a scratch on the rear seat after pickup.", "priority": "low", "status": "resolved"},
    ]
    for t in tickets:
        t["id"] = str(uuid.uuid4()); t["created_at"] = now_iso(); t["updated_at"] = now_iso()
    await mysql_doc_insert_many("tickets", tickets)

    # Notifications
    notes = [
        {"type": "booking", "title": "New booking request", "message": "Emily Johnson booked Tesla Model S", "channel": "in_app", "is_read": False},
        {"type": "payment", "title": "Payment received", "message": "$650 received from Michael Chen", "channel": "in_app", "is_read": False},
        {"type": "maintenance", "title": "Oil change due", "message": "Range Rover Sport needs oil change in 3 days", "channel": "in_app", "is_read": True},
        {"type": "system", "title": "Insurance expiring soon", "message": "BMW X5 insurance expires in 14 days", "channel": "in_app", "is_read": False},
    ]
    for n in notes:
        n["id"] = str(uuid.uuid4()); n["created_at"] = now_iso(); n["updated_at"] = now_iso()
    await mysql_doc_insert_many("notifications", notes)

    # Blogs
    blogs = [
        {"title": "Top 10 Road Trips in the USA", "slug": "top-10-road-trips-usa", "category": "Travel",
         "tags": ["roadtrip", "usa", "travel"], "content": "Discover stunning routes...",
         "seo_title": "Top 10 USA Road Trips", "seo_description": "Best road trips to take in the USA",
         "cover_image": None, "is_published": True, "author": "Admin"},
        {"title": "How to Choose the Right Rental Car", "slug": "choose-rental-car", "category": "Guides",
         "tags": ["rental", "guide"], "content": "When choosing a rental car, consider...",
         "seo_title": "Rental Car Buying Guide", "seo_description": "Complete guide for choosing the right rental car",
         "cover_image": None, "is_published": True, "author": "Admin"},
    ]
    for b in blogs:
        b["id"] = str(uuid.uuid4()); b["created_at"] = now_iso(); b["updated_at"] = now_iso()
    await mysql_doc_insert_many("blogs", blogs)

    # CMS Pages
    cms = [
        {"key": "about", "title": "About EliteRide", "content": "EliteRide is a premium car rental service...", "is_published": True},
        {"key": "services", "title": "Our Services", "content": "We offer luxury sedans, SUVs, sports cars...", "is_published": True},
        {"key": "contact", "title": "Contact Us", "content": "Reach us at hello@eliteride.com", "is_published": True},
        {"key": "faq", "title": "Frequently Asked Questions", "content": "Q: What documents do I need? A: A valid driver's license...", "is_published": True},
        {"key": "privacy", "title": "Privacy Policy", "content": "We respect your privacy...", "is_published": True},
        {"key": "terms", "title": "Terms & Conditions", "content": "By using EliteRide, you agree...", "is_published": True},
    ]
    for c in cms:
        c["id"] = str(uuid.uuid4()); c["seo_title"] = None; c["seo_description"] = None
        c["created_at"] = now_iso(); c["updated_at"] = now_iso()
    await mysql_doc_insert_many("cms_pages", cms)

    logger.info("Demo data seeded.")

@asynccontextmanager
async def lifespan(app: FastAPI):
    await mysql_init_schema()
    await seed_admin()
    await seed_demo_data()
    init_storage()
    yield
    await mysql_close()

app = FastAPI(title="Car Rental Super Admin API", lifespan=lifespan)


@app.get("/", include_in_schema=False)
async def root():
    """So opening http://127.0.0.1:8000/ in a browser is not a blind 404 (API lives under /api)."""
    return {
        "service": "Car Rental Super Admin API",
        "openapi": "/openapi.json",
        "docs": "/docs",
        "redoc": "/redoc",
        "api": "/api",
    }


@app.get("/favicon.ico", include_in_schema=False)
async def favicon_placeholder():
    """Avoid 404 noise when browsers request a favicon from the API origin."""
    return Response(status_code=204)


@app.get("/.well-known/appspecific/com.chrome.devtools.json", include_in_schema=False)
async def chrome_devtools_wellknown():
    """Chrome DevTools probes this path; harmless empty reply."""
    return Response(status_code=204)


# ---------- Mount ----------
app.include_router(api_router)

app.add_middleware(
    CORSMiddleware,
    allow_credentials=True,
    allow_origins=os.environ.get('CORS_ORIGINS', '*').split(','),
    allow_methods=["*"],
    allow_headers=["*"],
)

if __name__ == "__main__":
    import uvicorn

    port = int(os.environ.get("PORT", "8000"))
    logger.info("Starting API at http://0.0.0.0:%s (API routes under /api)", port)
    uvicorn.run("server:app", host="0.0.0.0", port=port)
