# -*- coding: utf-8 -*-
"""
Integrador HITS-PMS – Reservas & Detalhes
----------------------------------------
Versão: 20/05/2025

Sincronia estrita entre API ↔ JSON ↔ MySQL:
• INSERT de reservas novas
• UPDATE quando dateUp muda
• DELETE quando a reserva some da API

Principais recursos
• requests.Session()  |  MySQLConnectionPool  |  Transações atômicas
• Chaves de cache corretas: (idReservation,idEntity) e (idReservation,idEntityCompany)
• Logs separados: operations, inserts, updates, deletes, errors, api
"""

from __future__ import annotations
import json
import logging
import os
import sys
import time
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Dict, List, Tuple

import mysql.connector
from mysql.connector import pooling
import requests
from zoneinfo import ZoneInfo

# ---------------------------------------------------------------------------
# Configuração geral
# ---------------------------------------------------------------------------
TZ = ZoneInfo("America/Fortaleza")
POLL_INTERVAL = 300        # segundos
PAGE_SIZE = 50
STATUSES = [0, 1, 2, 3, 4]    # Confirmada, Cancelada, Processada, Bloqueada
TYPES = [0, 1, 2]             # 1 = inclusão, 2 = atualização (0 = check-in se desejar)
FULL_SYNC_START = "2025-03-01T00:00:00Z"

BASE_DIR  = Path(sys.executable if getattr(sys, "frozen", False) else __file__).resolve().parent
DATA_DIR  = BASE_DIR / "data"
LOG_ROOT  = BASE_DIR / "logs"
FLAG_FILE = DATA_DIR / "flag_fullsync.ok"

for p in (DATA_DIR, LOG_ROOT):
    p.mkdir(parents=True, exist_ok=True)

LOG_DIRS = {n: LOG_ROOT / n for n in
            ["errors", "updates", "inserts", "deletes", "operations", "api"]}
for p in LOG_DIRS.values():
    p.mkdir(exist_ok=True)

FMT = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s", "%Y-%m-%d %H:%M:%S")
def _logger(name: str, path: Path, level=logging.INFO):
    lg = logging.getLogger(name)
    lg.propagate = False
    if lg.handlers:
        return lg
    lg.setLevel(level)
    fh = logging.FileHandler(path, encoding="utf-8"); fh.setFormatter(FMT); lg.addHandler(fh)
    sh = logging.StreamHandler();             sh.setFormatter(FMT); lg.addHandler(sh)
    lg.info(f"{name} logger iniciado ({path})")
    return lg

log_ops = _logger("operations", LOG_DIRS["operations"] / "operations.log")
log_err = _logger("errors",     LOG_DIRS["errors"]     / "errors.log", level=logging.ERROR)
log_upd = _logger("updates",    LOG_DIRS["updates"]    / "updates.log")
log_ins = _logger("inserts",    LOG_DIRS["inserts"]    / "inserts.log")
log_del = _logger("deletes",    LOG_DIRS["deletes"]    / "deletes.log")
log_api = _logger("api",        LOG_DIRS["api"]        / "api.log")

# ---------------------------------------------------------------------------
# Pool de conexões MySQL
# ---------------------------------------------------------------------------
POOL = pooling.MySQLConnectionPool(
    pool_name   = "hits_pool",
    pool_size   = 6,
    host        = os.getenv("DB_HOST", "localhost"),
    user        = os.getenv("DB_USER", "root"),
    password    = os.getenv("DB_PASS", ""),
    database    = os.getenv("DB_NAME", "bihits"),
    charset     = "utf8mb4",
    autocommit  = False,
)
def db_conn():
    return POOL.get_connection()

# ---------------------------------------------------------------------------
# Helpers de data / cache
# ---------------------------------------------------------------------------
def parse_iso(dt: str | None):
    if not dt:
        return None
    try:
        if dt.endswith("Z"):
            dt = dt[:-1] + "+00:00"
        return datetime.fromisoformat(dt).astimezone(TZ).replace(tzinfo=None)
    except Exception:
        log_err.exception("Erro ao converter data ISO: %s", dt)
        return None


def load_cache(path: Path):
    if not path.is_file():
        return {}
    with path.open("r", encoding="utf-8") as f:
        data = json.load(f)
    return {(i["idReservation"], i.get("idEntity") or i.get("idEntityCompany")): i
            for i in data}

# ---------------------------------------------------------------------------
# JSON upsert helper
# ---------------------------------------------------------------------------
def upsert_json_file(path: Path, new_recs: List[dict], key_fields: Tuple[str,...]):
    if not new_recs:
        return

    # mapeia novos por chave
    new_map = { tuple(rec[k] for k in key_fields): rec for rec in new_recs }
    seen = set()
    tmp = path.with_suffix(path.suffix + ".tmp")

    with path.open("r", encoding="utf-8") as fin, tmp.open("w", encoding="utf-8") as fout:
        data = json.load(fin)
        fout.write("[\n")
        first = True

        # escreve existentes (atualizados ou não)
        for old in data:
            key = tuple(old[k] for k in key_fields)
            out = new_map[key] if key in new_map else old
            if key in new_map:
                seen.add(key)
            if not first:
                fout.write(",\n")
            fout.write(json.dumps(out, ensure_ascii=False, indent=2))
            first = False

        # adiciona os novos que não existiam
        for key, rec in new_map.items():
            if key in seen:
                continue
            if not first:
                fout.write(",\n")
            fout.write(json.dumps(rec, ensure_ascii=False, indent=2))
            first = False

        fout.write("\n]\n")

    os.replace(tmp, path)
    log_ops.info(
        "upsert_json_file: %s atualizado — %d updates, %d inserts",
        path.name, len(seen), len(new_recs)-len(seen)
    )

# ---------------------------------------------------------------------------
# API helpers
# ---------------------------------------------------------------------------
SESSION  = requests.Session()
API_BASE = "https://api.hitspms.net/Datashare/WebCheckinOut"

def make_headers(token: str):
    return {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}",
    }

def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    try:
        r = SESSION.post(url, data={"username": os.getenv("API_USER"), "password": os.getenv("API_PASS")}, timeout=30)
        r.raise_for_status()
        txt = r.text.strip()
        return txt.split(":",1)[1].strip() if txt.startswith("Token obtido:") else r.json().get("token")
    except Exception as exc:
        log_err.error("Erro ao obter token: %s", exc)
        return None

def safe_get(url: str, headers: dict, params: dict=None, timeout: int=30):
    delay = 5
    for _ in range(6):
        try:
            r = SESSION.get(url, headers=headers, params=params, timeout=timeout)
            log_api.info("GET %s → %s", r.url, r.status_code)
        except Exception as exc:
            log_err.error("RequestException: %s", exc)
            time.sleep(delay); delay = min(delay*2,60)
            continue
        if r.status_code == 401:
            new_tk = get_token()
            if new_tk:
                headers["Authorization"] = f"Bearer {new_tk}"
                continue
            return None
        if r.status_code == 429:
            wait = int(r.headers.get("Retry-After", delay))
            log_api.warning("429 recebido, aguardando %ds", wait)
            time.sleep(wait)
            continue
        if r.ok:
            return r
        time.sleep(delay)
    log_err.error("safe_get: esgotou retries para %s", url)
    return None

# ---------------------------------------------------------------------------
# Fetchers
# ---------------------------------------------------------------------------
def fetch_reservations(token: str, start: str, end: str, t: int, st: int):
    url = f"{API_BASE}/Reservations"
    hdr = make_headers(token)
    out: List[dict] = []
    page = 0
    while True:
        resp = safe_get(url, hdr, {"Type":t,"Status":st,"InitialDate":start,"FinalDate":end,"Page":page,"Size":PAGE_SIZE})
        if not resp:
            log_err.error("fetch_reservations: falhou page %d (Type=%d Status=%d)", page, t, st)
            break
        data = resp.json()
        log_api.info("fetch_reservations: Type=%d Status=%d page %d → %d regs", t, st, page, len(data))
        if not data:
            break
        out.extend(data)
        if len(data) < PAGE_SIZE:
            break
        page += 1
    log_api.info("fetch_reservations: total único Type=%d Status=%d → %d", t, st, len(out))
    return out

def fetch_detail(token: str, rid: int):
    resp = safe_get(f"{API_BASE}/Reservation/{rid}", make_headers(token))
    if not resp:
        log_err.error("fetch_detail: sem resposta para %s", rid)
        return None
    return resp.json()

# ---------------------------------------------------------------------------
# SQL statements & upserts
# ---------------------------------------------------------------------------
SQL_INS_RES = (
    "INSERT INTO reservations "
    "(idReservation,idEntity,name,phone,main,mail,zipCode,addressDetails,address,neighborhood,number,"
    "city,country,stateName,stateCode,federalRegistrationNumber,documentType,checkIn,checkOut,"
    "dateAdd,dateUp,status,integrator,channel,raw_json) "
    "VALUES (%(idReservation)s,%(idEntity)s,%(name)s,%(phone)s,%(main)s,%(mail)s,%(zipCode)s,"
    "%(addressDetails)s,%(address)s,%(neighborhood)s,%(number)s,%(city)s,%(country)s,%(stateName)s,"
    "%(stateCode)s,%(federalRegistrationNumber)s,%(documentType)s,%(checkIn)s,%(checkOut)s,"
    "%(dateAdd)s,%(dateUp)s,%(status)s,%(integrator)s,%(channel)s,%(raw_json)s) "
    "ON DUPLICATE KEY UPDATE "
    "name=VALUES(name),phone=VALUES(phone),main=VALUES(main),mail=VALUES(mail),zipCode=VALUES(zipCode),"
    "addressDetails=VALUES(addressDetails),address=VALUES(address),neighborhood=VALUES(neighborhood),"
    "number=VALUES(number),city=VALUES(city),country=VALUES(country),stateName=VALUES(stateName),"
    "stateCode=VALUES(stateCode),federalRegistrationNumber=VALUES(federalRegistrationNumber),"
    "documentType=VALUES(documentType),checkIn=VALUES(checkIn),checkOut=VALUES(checkOut),"
    "dateUp=VALUES(dateUp),status=VALUES(status),integrator=VALUES(integrator),"
    "channel=VALUES(channel),raw_json=VALUES(raw_json)"
)
SQL_INS_DET_MAIN = (
    "INSERT INTO reservation_details "
    "(idReservation,idEntityCompany,companyName,idRequesterCompany,requesterCompanyName,groupName,"
    "contactName,contact1,contact2,dateAdd,dateUp,creditState) "
    "VALUES (%(idReservation)s,%(idEntityCompany)s,%(companyName)s,%(idRequesterCompany)s,"
    "%(requesterCompanyName)s,%(groupName)s,%(contactName)s,%(contact1)s,%(contact2)s,"
    "%(dateAdd)s,%(dateUp)s,%(creditState)s) "
    "ON DUPLICATE KEY UPDATE "
    "companyName=VALUES(companyName),idRequesterCompany=VALUES(idRequesterCompany),"
    "requesterCompanyName=VALUES(requesterCompanyName),groupName=VALUES(groupName),"
    "contactName=VALUES(contactName),contact1=VALUES(contact1),contact2=VALUES(contact2),"
    "dateUp=VALUES(dateUp),creditState=VALUES(creditState)"
)
SQL_NOTE  = "INSERT INTO reservation_notes (idReservation,idEntityCompany,note_index,note) VALUES (%s,%s,%s,%s)"
SQL_ROOM  = ("INSERT INTO reservation_rooms "
             "(idReservation,idEntityCompany,room_index,idRoom,code,checkIn,checkOut,idRoomType,"
             "roomTypeName,amount,status,pax,chd1,chd2,chd3,"
             "ratePlanId,ratePlanName,mealPlanDesc) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")
SQL_REQ   = "INSERT INTO reservation_room_requirements (idReservation,idEntityCompany,room_index,req_index,requirement) VALUES (%s,%s,%s,%s,%s)"
SQL_GUEST = ("INSERT INTO reservation_guests "
             "(idReservation,idEntityCompany,guest_index,idEntity,name,idRoom,contactMail,contactPhone,main,"
             "federalRegistrationNumber,documentType,gender,birthDate,mainDocType,docCpfCnpjPassport,"
             "addressZipCode,addressCountry,addressStateCode,addressStateName,addressCity,addressNeighborhood,"
             "addressAddress,addressDetails,addressNumber) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")
SQL_GNOTE = "INSERT INTO reservation_guest_notes (idReservation,idEntityCompany,guest_index,note_index,note_text) VALUES (%s,%s,%s,%s,%s)"
SQL_COMM  = "INSERT INTO reservation_commissions (idReservation,idEntityCompany,comm_index,idEntity,percentage,value) VALUES (%s,%s,%s,%s,%s,%s)"
SQL_REV   = ("INSERT INTO reservation_revenue "
             "(idReservation,idEntityCompany,marketSegmentId,marketSegmentName,idReservationChannel,nameChannel,"
             "purposeStayId,purpose,managerID,managerName) "
             "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)")

def get_existing_reservations_map():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SELECT idReservation,idEntity,dateUp FROM reservations")
    m = {(r,e): du for r,e,du in cur}
    cur.close(); conn.close()
    return m

def get_existing_details_map():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SELECT idReservation,idEntityCompany,dateUp FROM reservation_details")
    m = {(r,e): du for r,e,du in cur}
    cur.close(); conn.close()
    return m

def upsert_reservations(recs: List[dict]) -> int:
    if not recs:
        log_upd.info("upsert_reservations: nenhum registro novo/atualizado encontrado")
        return 0
    existing = get_existing_reservations_map()
    rows = []
    for r in recs:
        key = (r["idReservation"], r["idEntity"])
        if key not in existing or parse_iso(r["dateUp"]) > existing[key]:
            rows.append({
                "idReservation": r["idReservation"],
                "idEntity":      r["idEntity"],
                "name":          r.get("name"),
                "phone":         r.get("phone"),
                "main":          r.get("main"),
                "mail":          r.get("mail"),
                "zipCode":       r.get("zipCode"),
                "addressDetails":r.get("addressDetails"),
                "address":       r.get("address"),
                "neighborhood":  r.get("neighborhood"),
                "number":        r.get("number"),
                "city":          r.get("city"),
                "country":       r.get("country"),
                "stateName":     r.get("stateName"),
                "stateCode":     r.get("stateCode"),
                "federalRegistrationNumber": r.get("federalRegistrationNumber"),
                "documentType":  r.get("documentType"),
                "checkIn":       parse_iso(r.get("checkIn")),
                "checkOut":      parse_iso(r.get("checkOut")),
                "dateAdd":       parse_iso(r.get("dateAdd")),
                "dateUp":        parse_iso(r.get("dateUp")),
                "status":        r.get("status"),
                "integrator":    r.get("integrator"),
                "channel":       r.get("channel"),
                "raw_json":      json.dumps(r, ensure_ascii=False),
            })
    if not rows:
        return 0
    conn = db_conn(); cur = conn.cursor()
    try:
        cur.executemany(SQL_INS_RES, rows)
        conn.commit()
        log_ins.info("upsert_reservations: inseridos/atualizados %d registros", len(rows))
        return len(rows)
    except Exception:
        conn.rollback(); log_err.exception("upsert_reservations"); raise
    finally:
        cur.close(); conn.close()

def upsert_reservation_details(details: List[dict]) -> int:
    if not details:
        log_upd.info("upsert_reservation_details: nenhum detalhe novo/atualizado a processar")
        return 0
    existing = get_existing_details_map()
    conn = db_conn(); cur = conn.cursor()
    try:
        conn.start_transaction()
        processed = 0
        for det in details:
            key  = (det["idReservation"], det["idEntityCompany"])
            dt_a = parse_iso(det["dateUp"])
            if key in existing and dt_a <= existing[key]:
                continue
            cur.execute(SQL_INS_DET_MAIN, {
                "idReservation": det["idReservation"],
                "idEntityCompany": det["idEntityCompany"],
                "companyName": det.get("companyName"),
                "idRequesterCompany": det.get("idRequesterCompany"),
                "requesterCompanyName": det.get("requesterCompanyName"),
                "groupName": det.get("groupName"),
                "contactName": det.get("contactName"),
                "contact1": det.get("contact1"),
                "contact2": det.get("contact2"),
                "dateAdd": parse_iso(det.get("dateAdd")),
                "dateUp": parse_iso(det.get("dateUp")),
                "creditState": det.get("creditState"),
            })
            for tbl in ["reservation_notes","reservation_room_requirements","reservation_rooms",
                        "reservation_guest_notes","reservation_guests","reservation_commissions","reservation_revenue"]:
                cur.execute(f"DELETE FROM {tbl} WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.executemany(SQL_NOTE, [
                (det["idReservation"], det["idEntityCompany"], i, n.get("note"))
                for i,n in enumerate(det.get("notes", []))
            ])
            room_rows, req_rows = [], []
            for r_idx, room in enumerate(det.get("rooms", [])):
                room_rows.append((
                    det["idReservation"], det["idEntityCompany"], r_idx,
                    room.get("idRoom"), room.get("code"), room.get("checkIn"), room.get("checkOut"),
                    room.get("idRoomType"), room.get("roomTypeName"), room.get("amount"),
                    room.get("status"), room.get("pax"), room.get("chd1"), room.get("chd2"),
                    room.get("chd3"), room.get("ratePlanId"), room.get("ratePlanName"), room.get("mealPlanDesc"),
                ))
                for q_idx, req in enumerate(room.get("requirementReservation", [])):
                    req_rows.append((det["idReservation"], det["idEntityCompany"], r_idx, q_idx, json.dumps(req, ensure_ascii=False)))
            if room_rows: cur.executemany(SQL_ROOM, room_rows)
            if req_rows:  cur.executemany(SQL_REQ,  req_rows)
            g_rows, gn_rows = [], []
            for g_idx, g in enumerate(det.get("guests", [])):
                g_rows.append((
                    det["idReservation"], det["idEntityCompany"], g_idx,
                    g.get("idEntity"), g.get("name"), g.get("idRoom"), g.get("contactMail"),
                    g.get("contactPhone"), g.get("main"), g.get("federalRegistrationNumber"),
                    g.get("documentType"), g.get("gender"), g.get("birthDate"), g.get("mainDocType"),
                    g.get("docCpfCnpjPassport"), g.get("addressZipCode"), g.get("addressCountry"),
                    g.get("addressStateCode"), g.get("addressStateName"), g.get("addressCity"),
                    g.get("addressNeighborhood"), g.get("addressAddress"), g.get("addressDetails"),
                    g.get("addressNumber"),
                ))
                for n_idx, note in enumerate(g.get("notes", [])):
                    gn_rows.append((det["idReservation"], det["idEntityCompany"], g_idx, n_idx, note.get("note")))
            if g_rows:  cur.executemany(SQL_GUEST, g_rows)
            if gn_rows: cur.executemany(SQL_GNOTE, gn_rows)
            c_rows = [
                (det["idReservation"], det["idEntityCompany"], i, c.get("idEntity"), c.get("percentage"), c.get("value"))
                for i,c in enumerate(det.get("commissions", []))
            ]
            if c_rows: cur.executemany(SQL_COMM, c_rows)
            rm = det.get("revenueManagement", {})
            if rm:
                cur.execute(SQL_REV, (
                    det["idReservation"], det["idEntityCompany"],
                    rm.get("marketSegmentId"), rm.get("marketSegmentName"),
                    rm.get("idReservationChannel"), rm.get("nameChannel"),
                    rm.get("purposeStayId"), rm.get("purpose"),
                    rm.get("managerID"), rm.get("managerName"),
                ))
            processed += 1
        conn.commit()
        log_ins.info("upsert_reservation_details: processados %d detalhes", processed)
        return processed
    except Exception:
        conn.rollback(); log_err.exception("upsert_reservation_details"); raise
    finally:
        cur.close(); conn.close()

def truncate_tables():
    conn = db_conn(); cur = conn.cursor()
    cur.execute("SET FOREIGN_KEY_CHECKS=0")
    for tbl in ["reservation_room_requirements","reservation_rooms","reservation_guest_notes",
                "reservation_guests","reservation_commissions","reservation_revenue",
                "reservation_notes","reservation_details","reservations"]:
        cur.execute(f"TRUNCATE TABLE {tbl}")
    cur.execute("SET FOREIGN_KEY_CHECKS=1")
    conn.commit()
    cur.close(); conn.close()
    log_ops.info("Tabelas truncadas – full-sync inicial")

# ---------------------------------------------------------------------------
# Loop principal
# ---------------------------------------------------------------------------
def main():
    path_res = DATA_DIR / "reservations.json"
    path_det = DATA_DIR / "reservation_details.json"

    # ─────────── Cria os JSONs vazios caso não existam ───────────
    for p in (path_res, path_det):
        if not p.exists():
            p.parent.mkdir(parents=True, exist_ok=True)
            p.write_text("[]", encoding="utf-8")
            log_ops.info(f"{p.name} não existia; criado vazio")

    # ─────────── Decide se faz full‐sync inicial ───────────
    if not FLAG_FILE.exists():
        truncate_tables()
        FLAG_FILE.touch()
        last_dt = parse_iso(FULL_SYNC_START)
    else:
        cache = load_cache(path_res)
        last_dt = max(
            (parse_iso(r["dateUp"]) for r in cache.values()),
            default=parse_iso(FULL_SYNC_START)
        )

    # carrega cache em memória
    old_res = load_cache(path_res)
    old_det = load_cache(path_det)

    while True:
        changed = False
        win_start = (last_dt - timedelta(minutes=5)).strftime("%Y-%m-%dT%H:%M:%SZ")
        win_end   = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
        log_ops.info("Window %s → %s", win_start, win_end)

        token = get_token()
        if not token:
            time.sleep(POLL_INTERVAL)
            continue

        # ─────────── Busca incremental de cabeçalhos na API ───────────
        all_api     : Dict[Tuple[int,int], dict] = {}
        res_last_dt : Dict[int, datetime]      = defaultdict(lambda: datetime(1970,1,1))

        for t in TYPES:
            for st in STATUSES:
                batch = fetch_reservations(token, win_start, win_end, t, st)
                for r in batch:
                    k = (r["idReservation"], r["idEntity"])
                    # usar dateAdd para inclusões (Type=1), dateUp para o resto
                    dt = parse_iso(r["dateAdd"]) if t == 1 else parse_iso(r["dateUp"])
                    if k not in all_api or dt > parse_iso(
                            all_api[k].get("dateUp" if t != 1 else "dateAdd")
                        ):
                        all_api[k] = r
                    res_last_dt[r["idReservation"]] = max(
                        res_last_dt[r["idReservation"]],
                        dt
                    )

        log_ops.info(
            "main: total de cabeçalhos únicos da API neste ciclo → %d",
            len(all_api)
        )

        # ─────────── Filtra só os headers verdadeiramente novos/atualizados ───────────
        new_headers = [
            rec for k, rec in all_api.items()
            if k not in old_res
               or parse_iso(rec["dateUp"]) > parse_iso(old_res[k]["dateUp"])
        ]
        log_ops.info(
            "main: headers novos/atualizados → %d",
            len(new_headers)
        )

        # ─────────── Busca detalhes para todas as reservas retornadas pela API ───────────
        new_details: List[dict] = []
        total_to_fetch = len(res_last_dt)
        for idx, rid in enumerate(res_last_dt.keys(), start=1):
            log_ops.info("[%d/%d] fetching detail %s", idx, total_to_fetch, rid)
            det = fetch_detail(token, rid)
            if det:
                new_details.append(det)
                old_det[(det["idReservation"], det["idEntityCompany"])] = det
                changed = True
            else:
                log_err.error("main: fetch_detail falhou para reservation %s", rid)
            time.sleep(1.0)

        log_ops.info(
            "main: detalhes efetivamente recebidos → %d",
            len(new_details)
        )

        # ─────────── Upserts no MySQL ───────────
        n_res = upsert_reservations(list(all_api.values()))
        n_det = upsert_reservation_details(new_details)
        log_ops.info(
            "main: upsert_reservations → %d, upsert_reservation_details → %d",
            n_res, n_det
        )

        # ─────────── Atualiza os JSONs linha-a-linha ───────────
        upsert_json_file(path_res, new_headers, ("idReservation", "idEntity"))
        upsert_json_file(path_det, new_details, ("idReservation", "idEntityCompany"))

        # ─────────── Avança last_dt (só se tivermos algum dt) ───────────
        if res_last_dt:
            last_dt = max(res_last_dt.values())
        else:
            log_ops.info("main: nenhum header novo; last_dt permanece %s", last_dt.isoformat())

        time.sleep(POLL_INTERVAL)


if __name__ == "__main__":
    try:
        main()
    except Exception as e:
        log_err.exception("Erro fatal no main: %s", e)
