

from __future__ import annotations

import json
import logging
import os
import sys
import time
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Dict, List, Tuple

import mysql.connector
from mysql.connector import pooling
import requests
from zoneinfo import ZoneInfo

# ---------------------------------------------------------------------------
# Configuração geral
# ---------------------------------------------------------------------------
TZ = ZoneInfo("America/Fortaleza")
POLL_INTERVAL = 300        # segundos
PAGE_SIZE = 50
STATUSES = [1, 2, 3, 4]    # Confirmada, Cancelada, Processada, Bloqueada
TYPES = [1, 2]             # 1 = inclusão, 2 = atualização (0 = check-in se desejar)
FULL_SYNC_START = "2025-05-01T00:00:00Z"

BASE_DIR  = Path(sys.executable if getattr(sys, "frozen", False) else __file__).resolve().parent
DATA_DIR  = BASE_DIR / "data"
LOG_ROOT  = BASE_DIR / "logs"
FLAG_FILE = DATA_DIR / "flag_fullsync.ok"

for p in (DATA_DIR, LOG_ROOT):
    p.mkdir(parents=True, exist_ok=True)

LOG_DIRS = {n: LOG_ROOT / n for n in
            ["errors", "updates", "inserts", "deletes", "operations", "api"]}
for p in LOG_DIRS.values():
    p.mkdir(exist_ok=True)

FMT = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S")

def _logger(name: str, path: Path, level=logging.INFO):
    lg = logging.getLogger(name)
    lg.propagate = False
    if lg.handlers:
        return lg
    lg.setLevel(level)
    fh = logging.FileHandler(path, encoding="utf-8")
    fh.setFormatter(FMT)
    lg.addHandler(fh)
    sh = logging.StreamHandler()
    sh.setFormatter(FMT)
    lg.addHandler(sh)
    lg.info("%s logger iniciado (%s)", name, path)
    return lg

log_ops = _logger("operations", LOG_DIRS["operations"] / "operations.log")
log_err = _logger("errors",     LOG_DIRS["errors"]     / "errors.log", level=logging.ERROR)
log_upd = _logger("updates",    LOG_DIRS["updates"]    / "updates.log")
log_ins = _logger("inserts",    LOG_DIRS["inserts"]    / "inserts.log")
log_del = _logger("deletes",    LOG_DIRS["deletes"]    / "deletes.log")
log_api = _logger("api",        LOG_DIRS["api"]        / "api.log")

# ---------------------------------------------------------------------------
# Pool de conexões MySQL
# ---------------------------------------------------------------------------
POOL = pooling.MySQLConnectionPool(
    pool_name   = "hits_pool",
    pool_size   = 6,
    host        = os.getenv("DB_HOST", "localhost"),
    user        = os.getenv("DB_USER", "root"),
    password    = os.getenv("DB_PASS", ""),
    database    = os.getenv("DB_NAME", "bihits"),
    charset     = "utf8mb4",
    autocommit  = False,
)

def db_conn():
    return POOL.get_connection()

# ---------------------------------------------------------------------------
# Helpers de data / cache
# ---------------------------------------------------------------------------
def parse_iso(dt: str | None):
    if not dt:
        return None
    if dt.endswith("Z"):
        dt = dt[:-1] + "+00:00"
    return datetime.fromisoformat(dt).astimezone(timezone.utc).replace(tzinfo=None)

def load_cache(path: Path):
    if not path.is_file():
        return {}
    with path.open("r", encoding="utf-8") as f:
        data = json.load(f)
    return {(i["idReservation"], i.get("idEntity") or i.get("idEntityCompany")): i
            for i in data}

def save_cache(path: Path, data: Dict[Tuple[int, int], dict]):
    with path.open("w", encoding="utf-8") as f:
        json.dump(list(data.values()), f, ensure_ascii=False, indent=2)

# ---------------------------------------------------------------------------
# API helpers
# ---------------------------------------------------------------------------
SESSION  = requests.Session()
API_BASE = "https://api.hitspms.net/Datashare/WebCheckinOut"

def make_headers(token: str):
    return {
        "Accept"             : "application/json",
        "X-API-VERSION"      : "1",
        "X-API-TENANT-NAME"  : "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id"       : "THECORALBR",
        "Authorization"     : f"Bearer {token}",
    }

def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    try:
        r = SESSION.post(url, data={"username": os.getenv("API_USER"),
                                    "password": os.getenv("API_PASS")}, timeout=30)
        r.raise_for_status()
        txt = r.text.strip()
        return txt.split(":", 1)[1].strip() if txt.startswith("Token obtido:") else r.json().get("token")
    except Exception as exc:
        log_err.error("Erro ao obter token: %s", exc)
        return None

def safe_get(url: str, headers: dict, params: dict | None = None, timeout: int = 30):
    delay = 5
    for _ in range(6):
        try:
            r = SESSION.get(url, headers=headers, params=params, timeout=timeout)
            log_api.info("GET %s → %s", r.url, r.status_code)
        except requests.RequestException as exc:
            log_err.error("RequestException: %s", exc)
            time.sleep(delay); delay = min(delay * 2, 60)
            continue
        # 401 – renova token
        if r.status_code == 401:
            new_tk = get_token()
            if new_tk:
                headers["Authorization"] = f"Bearer {new_tk}"
                continue
            return None
        # 429 – back-off
        if r.status_code == 429:
            wait = int(r.headers.get("Retry-After", delay))
            time.sleep(wait)
            continue
        if r.ok:
            return r
        time.sleep(delay)
    return None

# ---------------------------------------------------------------------------
# Fetchers
# ---------------------------------------------------------------------------
def fetch_reservations(token: str, start: str, end: str, t: int, st: int):
    url  = f"{API_BASE}/Reservations"
    hdr  = make_headers(token)
    out: List[dict] = []
    page = 0
    while True:
        resp = safe_get(url, hdr, {
            "Type"       : t,
            "Status"     : st,
            "InitialDate": start,
            "FinalDate"  : end,
            "Page"       : page,
            "Size"       : PAGE_SIZE,
        })
        if not resp:
            break
        data = resp.json()
        if not data:
            break
        out.extend(data)
        if len(data) < PAGE_SIZE:
            break
        page += 1
    return out

def fetch_detail(token: str, rid: int):
    resp = safe_get(f"{API_BASE}/Reservation/{rid}", make_headers(token))
    return resp.json() if resp else None

# ---------------------------------------------------------------------------
# SQL statements
# ---------------------------------------------------------------------------
SQL_INS_RES = (
    "INSERT INTO reservations "
    "(idReservation,idEntity,name,phone,main,mail,zipCode,addressDetails,address,neighborhood,number,"
    "city,country,stateName,stateCode,federalRegistrationNumber,documentType,checkIn,checkOut,"
    "dateAdd,dateUp,status,integrator,channel,raw_json) "
    "VALUES (%(idReservation)s,%(idEntity)s,%(name)s,%(phone)s,%(main)s,%(mail)s,%(zipCode)s,"
    "%(addressDetails)s,%(address)s,%(neighborhood)s,%(number)s,%(city)s,%(country)s,%(stateName)s,"
    "%(stateCode)s,%(federalRegistrationNumber)s,%(documentType)s,%(checkIn)s,%(checkOut)s,"
    "%(dateAdd)s,%(dateUp)s,%(status)s,%(integrator)s,%(channel)s,%(raw_json)s) "
    "ON DUPLICATE KEY UPDATE "
    "name=VALUES(name),phone=VALUES(phone),main=VALUES(main),mail=VALUES(mail),zipCode=VALUES(zipCode),"
    "addressDetails=VALUES(addressDetails),address=VALUES(address),neighborhood=VALUES(neighborhood),"
    "number=VALUES(number),city=VALUES(city),country=VALUES(country),stateName=VALUES(stateName),"
    "stateCode=VALUES(stateCode),federalRegistrationNumber=VALUES(federalRegistrationNumber),"
    "documentType=VALUES(documentType),checkIn=VALUES(checkIn),checkOut=VALUES(checkOut),"
    "dateUp=VALUES(dateUp),status=VALUES(status),integrator=VALUES(integrator),"
    "channel=VALUES(channel),raw_json=VALUES(raw_json)"
)

SQL_INS_DET_MAIN = (
    "INSERT INTO reservation_details "
    "(idReservation,idEntityCompany,companyName,idRequesterCompany,requesterCompanyName,groupName,"
    "contactName,contact1,contact2,dateAdd,dateUp,creditState) "
    "VALUES (%(idReservation)s,%(idEntityCompany)s,%(companyName)s,%(idRequesterCompany)s,"
    "%(requesterCompanyName)s,%(groupName)s,%(contactName)s,%(contact1)s,%(contact2)s,"
    "%(dateAdd)s,%(dateUp)s,%(creditState)s) "
    "ON DUPLICATE KEY UPDATE "
    "companyName=VALUES(companyName),idRequesterCompany=VALUES(idRequesterCompany),"
    "requesterCompanyName=VALUES(requesterCompanyName),groupName=VALUES(groupName),"
    "contactName=VALUES(contactName),contact1=VALUES(contact1),contact2=VALUES(contact2),"
    "dateUp=VALUES(dateUp),creditState=VALUES(creditState)"
)

SQL_NOTE  = "INSERT INTO reservation_notes (idReservation,idEntityCompany,note_index,note) VALUES (%s,%s,%s,%s)"
SQL_ROOM  = ("INSERT INTO reservation_rooms "
             "(idReservation,idEntityCompany,room_index,idRoom,code,checkIn,checkOut,idRoomType,"
             "roomTypeName,amount,status,pax,chd1,chd2,chd3,"
             "ratePlanId,ratePlanName,mealPlanDesc) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")
SQL_REQ   = "INSERT INTO reservation_room_requirements (idReservation,idEntityCompany,room_index,req_index,requirement) VALUES (%s,%s,%s,%s,%s)"
SQL_GUEST = ("INSERT INTO reservation_guests "
             "(idReservation,idEntityCompany,guest_index,idEntity,name,idRoom,contactMail,contactPhone,main,"
             "federalRegistrationNumber,documentType,gender,birthDate,mainDocType,docCpfCnpjPassport,"
             "addressZipCode,addressCountry,addressStateCode,addressStateName,addressCity,addressNeighborhood,"
             "addressAddress,addressDetails,addressNumber) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")
SQL_GNOTE = "INSERT INTO reservation_guest_notes (idReservation,idEntityCompany,guest_index,note_index,note_text) VALUES (%s,%s,%s,%s,%s)"
SQL_COMM  = "INSERT INTO reservation_commissions (idReservation,idEntityCompany,comm_index,idEntity,percentage,value) VALUES (%s,%s,%s,%s,%s,%s)"
SQL_REV   = ("INSERT INTO reservation_revenue "
             "(idReservation,idEntityCompany,marketSegmentId,marketSegmentName,idReservationChannel,nameChannel,"
             "purposeStayId,purpose,managerID,managerName) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")

# ---------------------------------------------------------------------------
# Funções de upsert
# ---------------------------------------------------------------------------
def get_existing_reservations_map():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SELECT idReservation,idEntity,dateUp FROM reservations")
    m = {(r,e): du for r,e,du in cur}
    cur.close(); conn.close()
    return m

def get_existing_details_map():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SELECT idReservation,idEntityCompany,dateUp FROM reservation_details")
    m = {(r,e): du for r,e,du in cur}
    cur.close(); conn.close()
    return m

def upsert_reservations(recs: List[dict]):
    if not recs:
        return
    existing = get_existing_reservations_map()
    rows = []
    for r in recs:
        key = (r["idReservation"], r["idEntity"])
        if key not in existing or parse_iso(r["dateUp"]) > existing[key]:
            rows.append({
                "idReservation": r["idReservation"],
                "idEntity":      r["idEntity"],
                "name":          r.get("name"),
                "phone":         r.get("phone"),
                "main":          r.get("main"),
                "mail":          r.get("mail"),
                "zipCode":       r.get("zipCode"),
                "addressDetails":r.get("addressDetails"),
                "address":       r.get("address"),
                "neighborhood":  r.get("neighborhood"),
                "number":        r.get("number"),
                "city":          r.get("city"),
                "country":       r.get("country"),
                "stateName":     r.get("stateName"),
                "stateCode":     r.get("stateCode"),
                "federalRegistrationNumber": r.get("federalRegistrationNumber"),
                "documentType":  r.get("documentType"),
                "checkIn":       r.get("checkIn"),
                "checkOut":      r.get("checkOut"),
                "dateAdd":       r.get("dateAdd"),
                "dateUp":        r.get("dateUp"),
                "status":        r.get("status"),
                "integrator":    r.get("integrator"),
                "channel":       r.get("channel"),
                "raw_json":      json.dumps(r, ensure_ascii=False),
            })
            (log_ins if key not in existing else log_upd).info("reservations %s", key)
    if not rows:
        return
    conn = db_conn(); cur = conn.cursor()
    try:
        cur.executemany(SQL_INS_RES, rows)
        conn.commit()
    except Exception:
        conn.rollback(); log_err.exception("upsert_reservations")
        raise
    finally:
        cur.close(); conn.close()

def upsert_reservation_details(details: List[dict]):
    if not details:
        return
    existing = get_existing_details_map()
    conn = db_conn(); cur = conn.cursor()
    try:
        conn.start_transaction()
        for det in details:
            key  = (det["idReservation"], det["idEntityCompany"])
            dt_a = parse_iso(det["dateUp"])
            if key in existing and dt_a <= existing[key]:
                continue
            # upsert registro principal
            cur.execute(SQL_INS_DET_MAIN, {
                "idReservation"      : det["idReservation"],
                "idEntityCompany"    : det["idEntityCompany"],
                "companyName"        : det.get("companyName"),
                "idRequesterCompany" : det.get("idRequesterCompany"),
                "requesterCompanyName":det.get("requesterCompanyName"),
                "groupName"          : det.get("groupName"),
                "contactName"        : det.get("contactName"),
                "contact1"           : det.get("contact1"),
                "contact2"           : det.get("contact2"),
                "dateAdd"            : det.get("dateAdd"),
                "dateUp"             : det.get("dateUp"),
                "creditState"        : det.get("creditState"),
            })
            # limpa filhas
            for tbl in ["reservation_notes","reservation_room_requirements",
                        "reservation_rooms","reservation_guest_notes",
                        "reservation_guests","reservation_commissions",
                        "reservation_revenue"]:
                cur.execute(f"DELETE FROM {tbl} WHERE idReservation=%s AND idEntityCompany=%s", key)
            # notas
            cur.executemany(SQL_NOTE, [
                (det["idReservation"], det["idEntityCompany"], i, n.get("note"))
                for i,n in enumerate(det.get("notes", []))
            ])
            # quartos + reqs
            room_rows, req_rows = [], []
            for r_idx, room in enumerate(det.get("rooms", [])):
                room_rows.append((
                    det["idReservation"], det["idEntityCompany"], r_idx,
                    room.get("idRoom"), room.get("code"), room.get("checkIn"), room.get("checkOut"),
                    room.get("idRoomType"), room.get("roomTypeName"), room.get("amount"),
                    room.get("status"), room.get("pax"), room.get("chd1"), room.get("chd2"),
                    room.get("chd3"), room.get("ratePlanId"), room.get("ratePlanName"),
                    room.get("mealPlanDesc"),
                ))
                req_rows.extend([
                    (det["idReservation"], det["idEntityCompany"], r_idx, q_idx,
                     json.dumps(req, ensure_ascii=False))
                    for q_idx, req in enumerate(room.get("requirementReservation", []))
                ])
            if room_rows: cur.executemany(SQL_ROOM, room_rows)
            if req_rows:  cur.executemany(SQL_REQ,  req_rows)
            # guests + notes
            g_rows, gn_rows = [], []
            for g_idx, g in enumerate(det.get("guests", [])):
                g_rows.append((
                    det["idReservation"], det["idEntityCompany"], g_idx,
                    g.get("idEntity"), g.get("name"), g.get("idRoom"), g.get("contactMail"),
                    g.get("contactPhone"), g.get("main"), g.get("federalRegistrationNumber"),
                    g.get("documentType"), g.get("gender"), g.get("birthDate"), g.get("mainDocType"),
                    g.get("docCpfCnpjPassport"), g.get("addressZipCode"), g.get("addressCountry"),
                    g.get("addressStateCode"), g.get("addressStateName"), g.get("addressCity"),
                    g.get("addressNeighborhood"), g.get("addressAddress"), g.get("addressDetails"),
                    g.get("addressNumber"),
                ))
                gn_rows.extend([
                    (det["idReservation"], det["idEntityCompany"], g_idx, n_idx, n.get("note"))
                    for n_idx, n in enumerate(g.get("notes", []))
                ])
            if g_rows:  cur.executemany(SQL_GUEST, g_rows)
            if gn_rows: cur.executemany(SQL_GNOTE, gn_rows)
            # commissions
            c_rows = [
                (det["idReservation"], det["idEntityCompany"], i, c.get("idEntity"),
                 c.get("percentage"), c.get("value"))
                for i,c in enumerate(det.get("commissions", []))
            ]
            if c_rows: cur.executemany(SQL_COMM, c_rows)
            # revenue
            rm = det.get("revenueManagement", {})
            if rm:
                cur.execute(SQL_REV, (
                    det["idReservation"], det["idEntityCompany"],
                    rm.get("marketSegmentId"), rm.get("marketSegmentName"),
                    rm.get("idReservationChannel"), rm.get("nameChannel"),
                    rm.get("purposeStayId"), rm.get("purpose"),
                    rm.get("managerID"), rm.get("managerName"),
                ))
            (log_ins if key not in existing else log_upd).info("reservation_details %s", key)
        conn.commit()
    except Exception:
        conn.rollback(); log_err.exception("upsert_reservation_details")
        raise
    finally:
        cur.close(); conn.close()

# ---------------------------------------------------------------------------
# Truncamento (full-sync inicial)
# ---------------------------------------------------------------------------
def truncate_tables():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SET FOREIGN_KEY_CHECKS=0")
    for tbl in ["reservation_room_requirements","reservation_rooms",
                "reservation_guest_notes","reservation_guests",
                "reservation_commissions","reservation_revenue",
                "reservation_notes","reservation_details","reservations"]:
        cur.execute(f"TRUNCATE TABLE {tbl}")
    cur.execute("SET FOREIGN_KEY_CHECKS=1")
    conn.commit()
    cur.close(); conn.close()
    log_ops.info("Tabelas truncadas – full-sync inicial")

# ---------------------------------------------------------------------------
# Loop principal
# ---------------------------------------------------------------------------
def main():
    path_res = DATA_DIR / "reservations.json"
    path_det = DATA_DIR / "reservation_details.json"

    if not FLAG_FILE.exists():                       # primeira execução
        truncate_tables()
        FLAG_FILE.touch()
        save_cache(path_res, {})
        save_cache(path_det, {})

    old_res = load_cache(path_res)
    old_det = load_cache(path_det)
    last_dt = max((parse_iso(d["dateUp"]) for d in old_res.values()),
                  default=parse_iso(FULL_SYNC_START))

    while True:
        win_start = (last_dt - timedelta(minutes=5)).strftime("%Y-%m-%dT%H:%M:%SZ")
        win_end   = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        log_ops.info("Window %s → %s", win_start, win_end)

        token = get_token()
        if not token:
            time.sleep(POLL_INTERVAL); continue

        # 1. Lista rasa acumulada de todos os status/types
        all_api: Dict[Tuple[int,int],dict] = {}
        res_last_dt: Dict[int, datetime] = defaultdict(
            lambda: datetime(1970,1,1,tzinfo=timezone.utc))

        for t in TYPES:
            for st in STATUSES:
                for r in fetch_reservations(token, win_start, win_end, t, st):
                    k  = (r["idReservation"], r["idEntity"])
                    dt = parse_iso(r["dateUp"])
                    if dt > parse_iso(all_api.get(k, {}).get("dateUp","1970-01-01T00:00:00Z")):
                        all_api[k] = r
                    res_last_dt[r["idReservation"]] = max(res_last_dt[r["idReservation"]], dt)

        # 2. Quais reservas precisam de detalhe
        to_fetch_ids = {
            rid for rid, dt in res_last_dt.items()
            if rid not in {k[0] for k in old_res} or
               dt > max(parse_iso(v["dateUp"])
                        for k,v in old_res.items() if k[0]==rid)
        }

        new_details: List[dict] = []
        for idx, rid in enumerate(to_fetch_ids, 1):
            log_ops.info("[%d/%d] detail %s", idx, len(to_fetch_ids), rid)
            det = fetch_detail(token, rid)
            if not det:
                continue
            real_key = (det["idReservation"], det["idEntityCompany"])
            old_det[real_key] = det
            new_details.append(det)
            # atualiza cache rasa correspondente
            rasa_key = (det["idReservation"], det.get("idEntity"))
            if rasa_key in all_api:
                old_res[rasa_key] = all_api[rasa_key]
            time.sleep(1.0)

        # 3. Upserts
        try:
            upsert_reservations(list(all_api.values()))
            upsert_reservation_details(new_details)
        except Exception:
            log_err.exception("Falha em upserts; ciclo interrompido")
            break

        # 4. Deleções (estrita sincronia)
        current_keys  = set(all_api)
        previous_keys = set(old_res)
        missing       = previous_keys - current_keys

        if missing:
            conn = db_conn(); cur = conn.cursor()
            conn.start_transaction()
            for k in missing:
                rid, ent = k
                cur.execute("DELETE FROM reservation_details WHERE idReservation=%s", (rid,))
                cur.execute("DELETE FROM reservations WHERE idReservation=%s AND idEntity=%s", (rid,ent))
                log_del.info("reservation %s", k)
                old_res.pop(k, None)
                for dk in [d for d in old_det if d[0] == rid]:
                    old_det.pop(dk, None)
            conn.commit(); cur.close(); conn.close()

        # 5. Salva caches e avança ponteiro
        save_cache(path_res, all_api)
        save_cache(path_det, old_det)
        if res_last_dt:
            last_dt = max(res_last_dt.values())

        log_ops.info("Ciclo completo — inserts/updates: %d, deletes: %d",
                     len(to_fetch_ids), len(missing))
        time.sleep(POLL_INTERVAL)

if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        log_ops.info("Interrompido — encerrando")
