import sys
import requests
import json
import os
import time
import mysql.connector
import logging
from datetime import datetime, timedelta, timezone
from zoneinfo import ZoneInfo

# Configurações
TZ = ZoneInfo('America/Fortaleza')
FULL_SYNC_START = '2025-05-01T00:00:00Z'
POLL_INTERVAL = 300  # 5 minutos
PAGE_SIZE = 50
STATUSES = [1, 2, 3, 4]

# Detecta base_dir para .py ou executável empacotado
def get_base_dirs():
    if getattr(sys, 'frozen', False):
        base_dir = os.path.dirname(sys.executable)
    else:
        exe_dir = os.path.dirname(os.path.abspath(__file__))
        base_dir = os.path.dirname(exe_dir)
    data_dir = os.path.join(base_dir, 'data')
    log_root = os.path.join(base_dir, 'logs')
    return base_dir, data_dir, log_root

BASE_DIR, DATA_DIR, LOG_ROOT = get_base_dirs()
FLAG_FILE = os.path.join(DATA_DIR, 'flag_fullsync.ok')

LOG_DIRS = {
    'errors':     os.path.join(LOG_ROOT, 'errors'),
    'updates':    os.path.join(LOG_ROOT, 'updates'),
    'inserts':    os.path.join(LOG_ROOT, 'inserts'),
    'operations': os.path.join(LOG_ROOT, 'operations'),
    'api':        os.path.join(LOG_ROOT, 'api'),
}

# Garante existência de pastas
os.makedirs(DATA_DIR, exist_ok=True)
for path in LOG_DIRS.values():
    os.makedirs(path, exist_ok=True)

# Setup de loggers
def setup_logger(name, path, level=logging.INFO):
    logger = logging.getLogger(name)
    logger.propagate = False
    if logger.hasHandlers():
        return logger
    logger.setLevel(level)
    fmt = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S')
    fh = logging.FileHandler(path, encoding='utf-8')
    fh.setFormatter(fmt)
    logger.addHandler(fh)
    ch = logging.StreamHandler()
    ch.setFormatter(fmt)
    logger.addHandler(ch)
    logger.info(f"{name} logger iniciado em {path}")
    return logger

log_ops = setup_logger('operations', os.path.join(LOG_DIRS['operations'], 'operations.log'))
log_err = setup_logger('errors', os.path.join(LOG_DIRS['errors'], 'errors.log'), level=logging.ERROR)
log_upd = setup_logger('updates', os.path.join(LOG_DIRS['updates'], 'updates.log'))
log_ins = setup_logger('inserts', os.path.join(LOG_DIRS['inserts'], 'inserts.log'))
log_api = setup_logger('api', os.path.join(LOG_DIRS['api'], 'api.log'))

# Conexão ao banco
def get_db_connection():
    return mysql.connector.connect(
        host=os.getenv('DB_HOST', 'localhost'),
        user=os.getenv('DB_USER', 'root'),
        password=os.getenv('DB_PASS', ''),
        database=os.getenv('DB_NAME', 'bihits')
    )

# Parser ISO → datetime utc sem tzinfo
def parse_iso(dt_str):
    if not dt_str:
        return None
    if dt_str.endswith('Z'):
        dt = datetime.fromisoformat(dt_str[:-1] + '+00:00')
    else:
        dt = datetime.fromisoformat(dt_str)
    return dt.astimezone(timezone.utc).replace(tzinfo=None)

# Cache JSON load/save
def load_cache(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return {
        (item['idReservation'], item.get('idEntity') or item.get('idEntityCompany')): item
        for item in data
    }

def save_cache(path, data_dict):
    with open(path, 'w', encoding='utf-8') as f:
        json.dump(list(data_dict.values()), f, ensure_ascii=False, indent=2)

# Mapas de estado no banco
def get_existing_reservations_map():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SELECT idReservation, idEntity, dateUp FROM reservations")
    m = { (rid, eid): du for rid, eid, du in cur }
    cur.close()
    conn.close()
    return m

def get_existing_details_map():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SELECT idReservation, idEntityCompany, dateUp FROM reservation_details")
    m = { (rid, eidc): du for rid, eidc, du in cur }
    cur.close()
    conn.close()
    return m

# Full-sync inicial
def truncate_tables():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SET FOREIGN_KEY_CHECKS=0;")
    for tbl in [
        'reservation_room_requirements',
        'reservation_rooms',
        'reservation_guest_notes',
        'reservation_guests',
        'reservation_commissions',
        'reservation_revenue',
        'reservation_notes',
        'reservation_details',
        'reservations',
    ]:
        try:
            cur.execute(f"TRUNCATE TABLE {tbl};")
        except mysql.connector.Error as e:
            log_err.warning(f"Não foi possível truncar {tbl}: {e}")
    cur.execute("SET FOREIGN_KEY_CHECKS=1;")
    conn.commit()
    cur.close()
    conn.close()
    log_ops.info("Tabelas truncadas para full-sync inicial.")

# Obtenção de token
def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    payload = {"username": os.getenv('API_USER'), "password": os.getenv('API_PASS')}
    try:
        r = requests.post(url, data=payload, timeout=30)
        r.raise_for_status()
        text = r.text.strip()
        if text.startswith('Token obtido:'):
            return text.split(':',1)[1].strip()
        return r.json().get('token')
    except Exception as e:
        log_err.error(f"Erro ao obter token: {e}")
        return None

# HTTP com backoff em 429 e renovação de token em 401
def safe_get(url, headers, params=None, timeout=30):
    delay = 5
    for attempt in range(6):
        r = requests.get(url, headers=headers, params=params, timeout=timeout)
        log_api.info(f"GET {r.url} → {r.status_code}")
        if r.status_code == 401:
            log_api.warning("401 Unauthorized — renovando token e retry")
            new_token = get_token()
            if new_token:
                headers["Authorization"] = f"Bearer {new_token}"
                time.sleep(1)
                continue
            return None
        if r.status_code == 429:
            ra = r.headers.get("Retry-After")
            wait = int(ra) if ra and ra.isdigit() else delay
            log_api.warning(f"429 rate limit, aguardando {wait}s")
            time.sleep(wait)
            delay = min(delay * 2, 60)
            continue
        try:
            r.raise_for_status()
            return r
        except Exception as e:
            log_err.error(f"Request error: {e}")
            time.sleep(delay)
            delay = min(delay * 2, 60)
    log_err.error(f"Falha após múltiplas tentativas: {url}")
    return None

# Fetch de reservas por status/type
def fetch_reservations(token, start_date, end_date, type_filter, status):
    url = "https://api.hitspms.net/Datashare/WebCheckinOut/Reservations"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    results, page = [], 0
    while True:
        params = {
            "Type": type_filter,
            "Status": status,
            "InitialDate": start_date,
            "FinalDate": end_date,
            "Page": page,
            "Size": PAGE_SIZE
        }
        resp = safe_get(url, headers, params)
        if resp is None:
            break
        data = resp.json()
        if not data:
            break
        results.extend(data)
        if len(data) < PAGE_SIZE:
            break
        page += 1
        time.sleep(1)
    return results

# Fetch de detalhe individual
def fetch_reservation_by_id(token, reservation_id):
    url = f"https://api.hitspms.net/Datashare/WebCheckinOut/Reservation/{reservation_id}"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    resp = safe_get(url, headers)
    return resp.json() if resp else None

# Upsert em reservations (com raw_json serializado)
def upsert_reservations(res_list):
    existing = get_existing_reservations_map()
    sql = """
    INSERT INTO reservations
      (idReservation,idEntity,name,phone,main,mail,
       zipCode,addressDetails,address,neighborhood,number,city,
       country,stateName,stateCode,federalRegistrationNumber,
       documentType,checkIn,checkOut,dateAdd,dateUp,status,integrator,channel,raw_json)
    VALUES
      (%(idReservation)s,%(idEntity)s,%(name)s,%(phone)s,%(main)s,%(mail)s,
       %(zipCode)s,%(addressDetails)s,%(address)s,%(neighborhood)s,%(number)s,%(city)s,
       %(country)s,%(stateName)s,%(stateCode)s,%(federalRegistrationNumber)s,
       %(documentType)s,%(checkIn)s,%(checkOut)s,%(dateAdd)s,%(dateUp)s,
       %(status)s,%(integrator)s,%(channel)s,%(raw_json)s)
    ON DUPLICATE KEY UPDATE
      name=VALUES(name),phone=VALUES(phone),main=VALUES(main),mail=VALUES(mail),
      zipCode=VALUES(zipCode),addressDetails=VALUES(addressDetails),address=VALUES(address),
      neighborhood=VALUES(neighborhood),number=VALUES(number),city=VALUES(city),
      country=VALUES(country),stateName=VALUES(stateName),stateCode=VALUES(stateCode),
      federalRegistrationNumber=VALUES(federalRegistrationNumber),
      documentType=VALUES(documentType),checkIn=VALUES(checkIn),checkOut=VALUES(checkOut),
      dateUp=VALUES(dateUp),status=VALUES(status),integrator=VALUES(integrator),
      channel=VALUES(channel),raw_json=VALUES(raw_json)
    """
    conn = get_db_connection()
    cur = conn.cursor()
    for r in res_list:
        key = (r['idReservation'], r['idEntity'])
        dt_api = parse_iso(r['dateUp'])
        if key not in existing or dt_api > existing[key]:
            params = {
                'idReservation': r['idReservation'],
                'idEntity': r['idEntity'],
                'name': r.get('name'),
                'phone': r.get('phone'),
                'main': r.get('main'),
                'mail': r.get('mail'),
                'zipCode': r.get('zipCode'),
                'addressDetails': r.get('addressDetails'),
                'address': r.get('address'),
                'neighborhood': r.get('neighborhood'),
                'number': r.get('number'),
                'city': r.get('city'),
                'country': r.get('country'),
                'stateName': r.get('stateName'),
                'stateCode': r.get('stateCode'),
                'federalRegistrationNumber': r.get('federalRegistrationNumber'),
                'documentType': r.get('documentType'),
                'checkIn': r.get('checkIn'),
                'checkOut': r.get('checkOut'),
                'dateAdd': r.get('dateAdd'),
                'dateUp': r.get('dateUp'),
                'status': r.get('status'),
                'integrator': r.get('integrator'),
                'channel': r.get('channel'),
                'raw_json': json.dumps(r, ensure_ascii=False)
            }
            cur.execute(sql, params)
            if key not in existing:
                log_ins.info(f"[INSERT] reservations {key}")
            else:
                log_upd.info(f"[UPDATE] reservations {key}")
    conn.commit()
    cur.close()
    conn.close()

# Upsert em reservation_details e tabelas filhas
def upsert_reservation_details(details_list):
    existing = get_existing_details_map()
    conn = get_db_connection()
    cur = conn.cursor()

    sql_main = """
    INSERT INTO reservation_details
      (idReservation, idEntityCompany, companyName, idRequesterCompany,
       requesterCompanyName, groupName, contactName, contact1, contact2,
       dateAdd, dateUp, creditState)
    VALUES
      (%(idReservation)s, %(idEntityCompany)s, %(companyName)s, %(idRequesterCompany)s,
       %(requesterCompanyName)s, %(groupName)s, %(contactName)s, %(contact1)s, %(contact2)s,
       %(dateAdd)s, %(dateUp)s, %(creditState)s)
    ON DUPLICATE KEY UPDATE
      companyName = VALUES(companyName),
      idRequesterCompany = VALUES(idRequesterCompany),
      requesterCompanyName = VALUES(requesterCompanyName),
      groupName = VALUES(groupName),
      contactName = VALUES(contactName),
      contact1 = VALUES(contact1),
      contact2 = VALUES(contact2),
      dateUp = VALUES(dateUp),
      creditState = VALUES(creditState)
    """

    sql_note = """
    INSERT INTO reservation_notes
      (idReservation, idEntityCompany, note_index, note)
    VALUES (%s, %s, %s, %s)
    """

    sql_room = """
    INSERT INTO reservation_rooms
      (idReservation, idEntityCompany, room_index, idRoom, code,
       checkIn, checkOut, idRoomType, roomTypeName, amount,
       status, pax, chd1, chd2, chd3,
       ratePlanId, ratePlanName, mealPlanDesc)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
            %s, %s, %s, %s, %s, %s, %s, %s)
    """

    sql_room_req = """
    INSERT INTO reservation_room_requirements
      (idReservation, idEntityCompany, room_index, req_index, requirement)
    VALUES (%s, %s, %s, %s, %s)
    """

    sql_guest = """
        INSERT INTO reservation_guests
        (idReservation, idEntityCompany, guest_index, idEntity, name, idRoom,
        contactMail, contactPhone, main, federalRegistrationNumber,
        documentType, gender, birthDate, mainDocType, docCpfCnpjPassport,
        addressZipCode, addressCountry, addressStateCode, addressStateName,
        addressCity, addressNeighborhood, addressAddress, addressDetails,
        addressNumber)
        VALUES
        (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
        %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
        %s, %s, %s, %s)
        """


    sql_guest_note = """
    INSERT INTO reservation_guest_notes
      (idReservation, idEntityCompany, guest_index, note_index, note_text)
    VALUES (%s, %s, %s, %s, %s)
    """

    sql_comm = """
    INSERT INTO reservation_commissions
      (idReservation, idEntityCompany, comm_index, idEntity, percentage, value)
    VALUES (%s, %s, %s, %s, %s, %s)
    """

    sql_rev = """
    INSERT INTO reservation_revenue
      (idReservation, idEntityCompany, marketSegmentId, marketSegmentName,
       idReservationChannel, nameChannel, purposeStayId, purpose,
       managerID, managerName)
    VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
    """

    for d in details_list:
        key = (d['idReservation'], d['idEntityCompany'])
        dt_api = parse_iso(d['dateUp'])
        if key not in existing or dt_api > existing[key]:
            params = {
                'idReservation':           d['idReservation'],
                'idEntityCompany':         d['idEntityCompany'],
                'companyName':             d.get('companyName'),
                'idRequesterCompany':      d.get('idRequesterCompany'),
                'requesterCompanyName':    d.get('requesterCompanyName'),
                'groupName':               d.get('groupName'),
                'contactName':             d.get('contactName'),
                'contact1':                d.get('contact1'),
                'contact2':                d.get('contact2'),
                'dateAdd':                 d.get('dateAdd'),
                'dateUp':                  d.get('dateUp'),
                'creditState':             d.get('creditState')
            }
            try:
                cur.execute(sql_main, params)
            except Exception:
                log_err.exception("Falha no INSERT de reservation_details")
                raise

            # Limpa antigas filhas
            cur.execute("DELETE FROM reservation_notes WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_room_requirements WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_rooms WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_guest_notes WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_guests WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_commissions WHERE idReservation=%s AND idEntityCompany=%s", key)
            cur.execute("DELETE FROM reservation_revenue WHERE idReservation=%s AND idEntityCompany=%s", key)

            # Insere notas
            for nidx, note in enumerate(d.get('notes', [])):
                cur.execute(sql_note, (
                    d['idReservation'], d['idEntityCompany'], nidx, note.get('note')
                ))

            # Insere quartos e requisitos
            for ridx, room in enumerate(d.get('rooms', [])):
                cur.execute(sql_room, (
                    d['idReservation'], d['idEntityCompany'], ridx,
                    room.get('idRoom'), room.get('code'),
                    room.get('checkIn'), room.get('checkOut'),
                    room.get('idRoomType'), room.get('roomTypeName'), room.get('amount'),
                    room.get('status'), room.get('pax'),
                    room.get('chd1'), room.get('chd2'), room.get('chd3'),
                    room.get('ratePlanId'), room.get('ratePlanName'), room.get('mealPlanDesc')
                ))
                for qidx, req in enumerate(room.get('requirementReservation', [])):
                    cur.execute(sql_room_req, (
                        d['idReservation'], d['idEntityCompany'], ridx, qidx,
                        json.dumps(req, ensure_ascii=False)
                    ))

            # Insere hóspedes e notas de hóspede
            for gidx, guest in enumerate(d.get('guests', [])):
                cur.execute(sql_guest, (
                    d['idReservation'], d['idEntityCompany'], gidx,
                    guest.get('idEntity'), guest.get('name'), guest.get('idRoom'),
                    guest.get('contactMail'), guest.get('contactPhone'),
                    guest.get('main'), guest.get('federalRegistrationNumber'),
                    guest.get('documentType'), guest.get('gender'),
                    guest.get('birthDate'), guest.get('mainDocType'), guest.get('docCpfCnpjPassport'),
                    guest.get('addressZipCode'), guest.get('addressCountry'),
                    guest.get('addressStateCode'), guest.get('addressStateName'),
                    guest.get('addressCity'), guest.get('addressNeighborhood'),
                    guest.get('addressAddress'), guest.get('addressDetails'),
                    guest.get('addressNumber')
                ))
                for gnidx, gnote in enumerate(guest.get('notes', [])):
                    cur.execute(sql_guest_note, (
                        d['idReservation'], d['idEntityCompany'], gidx, gnidx, gnote.get('note')
                    ))

            # Insere comissões
            for cidx, comm in enumerate(d.get('commissions', [])):
                cur.execute(sql_comm, (
                    d['idReservation'], d['idEntityCompany'], cidx,
                    comm.get('idEntity'), comm.get('percentage'), comm.get('value')
                ))

            # Insere revenue
            rm = d.get('revenueManagement', {})
            cur.execute(sql_rev, (
                d['idReservation'], d['idEntityCompany'],
                rm.get('marketSegmentId'), rm.get('marketSegmentName'),
                rm.get('idReservationChannel'), rm.get('nameChannel'),
                rm.get('purposeStayId'), rm.get('purpose'),
                rm.get('managerID'), rm.get('managerName')
            ))

            if key not in existing:
                log_ins.info(f"[INSERT] reservation_details {key}")
            else:
                log_upd.info(f"[UPDATE] reservation_details {key}")

    conn.commit()
    cur.close()
    conn.close()

def main():
    path_res = os.path.join(DATA_DIR, 'reservations.json')
    path_det = os.path.join(DATA_DIR, 'reservation_details.json')
    initial_run = not os.path.isfile(FLAG_FILE)
    if initial_run:
        log_ops.info("Primeira execução: full-sync inicial")
        truncate_tables()
        open(FLAG_FILE, 'w').close()
        with open(path_res, 'w', encoding='utf-8') as f:
            json.dump([], f)
        with open(path_det, 'w', encoding='utf-8') as f:
            json.dump([], f)
        log_ops.info(f"Caches criados: {path_res}, {path_det}")

    old_res_data = load_cache(path_res)
    old_det_data = load_cache(path_det)
    last_dateUp = (
        max(parse_iso(r['dateUp']) for r in old_res_data.values())
        if old_res_data else datetime.fromisoformat(FULL_SYNC_START.replace('Z','+00:00'))
    )

    while True:
        now_utc = datetime.now(timezone.utc)
        start_dt = last_dateUp - timedelta(minutes=5)
        start_date = start_dt.strftime('%Y-%m-%dT%H:%M:%SZ')
        end_date = now_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
        log_ops.info(f"Sync window: {start_date} → {end_date}")

        token = get_token()
        if not token:
            log_err.error("Falha ao obter token, pulando ciclo")
            time.sleep(POLL_INTERVAL)
            continue

        all_api_map_data, all_api_map_time = {}, {}
        for type_f in [1, 2]:
            for status in STATUSES:
                recs = fetch_reservations(token, start_date, end_date, type_f, status)
                time.sleep(1)
                for r in recs:
                    key = (r['idReservation'], r['idEntity'])
                    dt = parse_iso(r['dateUp'])
                    if key not in all_api_map_time or dt > all_api_map_time[key]:
                        all_api_map_time[key] = dt
                        all_api_map_data[key] = r

        to_fetch = [
            key for key, dt in all_api_map_time.items()
            if key not in old_res_data
               or dt > parse_iso(old_res_data[key]['dateUp'])
               or key not in old_det_data
        ]
        new_details = []
        for idx, key in enumerate(to_fetch, start=1):
            rid, _ = key
            log_ops.info(f"[{idx}/{len(to_fetch)}] Fetch detail reservation {rid}")
            det = fetch_reservation_by_id(token, rid)
            if det is None:
                log_ops.info("→ renovando token e tentando detalhe novamente")
                token = get_token() or token
                det = fetch_reservation_by_id(token, rid)
            if det:
                old_det_data[key] = det
                new_details.append(det)
                old_res_data[key] = all_api_map_data[key]
            time.sleep(2.2)

        try:
            upsert_reservations(list(all_api_map_data.values()))
            upsert_reservation_details(new_details)
        except Exception:
            log_err.exception("Erro no upsert de reservation_details")
            break

        if all_api_map_time:
            last_dateUp = max(all_api_map_time.values())

        save_cache(path_res, old_res_data)
        save_cache(path_det, old_det_data)
        log_ops.info("Ciclo completo, aguardando próximo...")
        time.sleep(POLL_INTERVAL)

if __name__ == '__main__':
    main()
