import sys
import requests
import json
import os
import time
import mysql.connector
import logging
from datetime import datetime, timedelta, timezone
from zoneinfo import ZoneInfo

# Configurações
TZ = ZoneInfo('America/Fortaleza')
FULL_SYNC_START = '2024-01-01T00:00:00Z'    # início do primeiro full sync
DAILY_RETRO_DAYS = 30                      # janela retroativa no dia a dia
POLL_INTERVAL = 300                        # segundos entre ciclos de polling de status
PAGE_SIZE = 50                             # registros por página na API

# Diretórios base (cada integrador em dist/<nome>/bin)
def get_base_dirs():
    if getattr(sys, 'frozen', False):
        # quando empacotado, usar dist/ como base
        base_dir = os.path.dirname(sys.executable)
    else:
        # em .py, partir de project_root/src → project_root
        exe_dir  = os.path.dirname(os.path.abspath(__file__))
        base_dir = os.path.dirname(exe_dir)
    data_dir = os.path.join(base_dir, 'data')
    log_dir  = os.path.join(base_dir, 'logs')
    return base_dir, data_dir, log_dir

BASE_DIR, DATA_DIR, LOG_ROOT = get_base_dirs()

# (Opcional) debug de paths
print(f"→ BASE_DIR = {BASE_DIR}")
print(f"→ DATA_DIR = {DATA_DIR} (existe? {os.path.isdir(DATA_DIR)})")

# Sub-pastas de log dentro de <BASE_DIR>/logs
LOG_DIRS = {
    'errors'    : os.path.join(LOG_ROOT, 'errors'),
    'updates'   : os.path.join(LOG_ROOT, 'updates'),
    'inserts'   : os.path.join(LOG_ROOT, 'inserts'),
    'operations': os.path.join(LOG_ROOT, 'operations'),
    'api'       : os.path.join(LOG_ROOT, 'api'),
}

FLAG_FILE = os.path.join(DATA_DIR, 'flag_fullsync.ok')

# Cria diretórios caso não existam
os.makedirs(DATA_DIR, exist_ok=True)
for p in LOG_DIRS.values():
    os.makedirs(p, exist_ok=True)

# Logger setup
def setup_logger(name, path, level=logging.INFO, console=True):
    logger = logging.getLogger(name)
    if logger.hasHandlers():
        return logger
    logger.setLevel(level)
    fmt = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S')
    fh = logging.FileHandler(path, encoding='utf-8')
    fh.setFormatter(fmt)
    logger.addHandler(fh)
    if console:
        ch = logging.StreamHandler()
        ch.setFormatter(fmt)
        logger.addHandler(ch)
    return logger

# Cria loggers
log_ops = setup_logger('operations', os.path.join(LOG_DIRS['operations'], 'operations.log'))
log_err = setup_logger('errors', os.path.join(LOG_DIRS['errors'], 'errors.log'), level=logging.ERROR)
log_upd = setup_logger('updates', os.path.join(LOG_DIRS['updates'], 'updates.log'))
log_ins = setup_logger('inserts', os.path.join(LOG_DIRS['inserts'], 'inserts.log'))
log_api = setup_logger('api', os.path.join(LOG_DIRS['api'], 'api.log'))

# Conexão ao banco
def get_db_connection():
    return mysql.connector.connect(
        host=os.getenv('DB_HOST', 'localhost'),
        user=os.getenv('DB_USER', 'root'),
        password=os.getenv('DB_PASS', ''),
        database=os.getenv('DB_NAME', 'bihits')
    )

# Parser de datas ISO
def parse_iso(dt_str):
    if dt_str.endswith('Z'):
        dt_str = dt_str[:-1] + '+00:00'
    return datetime.fromisoformat(dt_str)

# Funções de leitura de estado existente no banco
def get_existing_reservations_map():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SELECT idReservation, idEntity, dateUp FROM reservations")
    mapping = { (rid, eid): du for rid, eid, du in cur }
    cur.close()
    conn.close()
    return mapping

def get_existing_details_map():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SELECT idReservation, dateUp FROM reservation_details")
    mapping = { rid: du for rid, du in cur }
    cur.close()
    conn.close()
    return mapping

# Cache JSON
def load_reservations_cache(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return {(item['idReservation'], item['idEntity']): item for item in data}

def load_details_cache(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return {item['idReservation']: item for item in data}

def save_cache(path, data_dict):
    with open(path, 'w', encoding='utf-8') as f:
        json.dump(list(data_dict.values()), f, ensure_ascii=False, indent=2)

# Funções de manutenção de tabelas
def truncate_tables():
    conn = get_db_connection()
    cur  = conn.cursor()
    cur.execute("SET FOREIGN_KEY_CHECKS=0;")
    cur.execute("TRUNCATE TABLE reservation_details;")
    cur.execute("TRUNCATE TABLE reservations;")
    cur.execute("SET FOREIGN_KEY_CHECKS=1;")
    conn.commit()
    cur.close()
    conn.close()
    log_ops.info("Tabelas truncadas.")

def ensure_tables():
    # Tabelas já criadas externamente
    pass

# HTTP Auth and Requests
def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    payload = {"username": os.getenv('API_USER'), "password": os.getenv('API_PASS')}
    try:
        r = requests.post(url, data=payload, timeout=30)
        r.raise_for_status()
        text = r.text.strip()
        prefix = "Token obtido:"
        token = text.replace(prefix, "").strip() if text.startswith(prefix) else r.json().get('token')
        log_ops.info("Token obtido com sucesso")
        return token
    except Exception as e:
        log_err.error(f"Erro ao obter token: {e}")
        return None

def safe_get(url, headers, params=None, timeout=30):
    delay = 5
    for attempt in range(6):
        try:
            r = requests.get(url, headers=headers, params=params, timeout=timeout)
            log_api.info(f"Request {r.url} → Status {r.status_code}")
            if r.status_code == 429:
                log_api.info(f"429 recebido, retry em {delay}s (tentativa {attempt+1}/6)")
                time.sleep(delay)
                delay *= 2
                continue
            r.raise_for_status()
            return r
        except requests.RequestException as ex:
            log_api.error(f"Request error: {ex}")
            time.sleep(delay)
            delay *= 2
    log_err.error(f"Falha após múltiplas tentativas: {url}")
    return None

def fetch_reservations(token, start_date, end_date, type_filter, status, page_size=PAGE_SIZE):
    reservations = []
    page = 0
    url = "https://api.hitspms.net/Datashare/WebCheckinOut/Reservations"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    while True:
        params = {"Type": type_filter, "Status": status,
                  "InitialDate": start_date, "FinalDate": end_date,
                  "Page": page, "Size": page_size}
        log_ops.info(f"➡️ Fetch Reservations: status={status}, page={page}")
        resp = safe_get(url, headers, params)
        if not resp:
            break
        data = resp.json()
        if not data:
            break
        reservations.extend(data)
        if len(data) < page_size:
            break
        page += 1
        time.sleep(0.5)
    return reservations

def fetch_reservation_by_id(token, reservation_id):
    url = f"https://api.hitspms.net/Datashare/WebCheckinOut/Reservation/{reservation_id}"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    log_ops.info(f"🔍 Fetch detail for reservation {reservation_id}")
    resp = safe_get(url, headers)
    return resp.json() if resp else None

# =============================================
# Upsert em reservations
# =============================================
def upsert_reservations(res_list):
    existing = { rid: dt for rid, dt in get_existing_reservations_map().items() }
    sql = """
    INSERT INTO reservations
      (idReservation,idEntity,name,phone,main,mail,
       zipCode,addressDetails,address,neighborhood,number,city,
       country,stateName,stateCode,federalRegistrationNumber,
       documentType,checkIn,checkOut,dateAdd,dateUp,status,integrator,channel,raw_json)
    VALUES
      (%(idReservation)s,%(idEntity)s,%(name)s,%(phone)s,%(main)s,%(mail)s,
       %(zipCode)s,%(addressDetails)s,%(address)s,%(neighborhood)s,%(number)s,%(city)s,
       %(country)s,%(stateName)s,%(stateCode)s,%(federalRegistrationNumber)s,
       %(documentType)s,%(checkIn)s,%(checkOut)s,%(dateAdd)s,%(dateUp)s,
       %(status)s,%(integrator)s,%(channel)s,%(raw_json)s)
    ON DUPLICATE KEY UPDATE
      name=VALUES(name),phone=VALUES(phone),main=VALUES(main),mail=VALUES(mail),
      zipCode=VALUES(zipCode),addressDetails=VALUES(addressDetails),address=VALUES(address),
      neighborhood=VALUES(neighborhood),number=VALUES(number),city=VALUES(city),
      country=VALUES(country),stateName=VALUES(stateName),stateCode=VALUES(stateCode),
      federalRegistrationNumber=VALUES(federalRegistrationNumber),
      documentType=VALUES(documentType),checkIn=VALUES(checkIn),checkOut=VALUES(checkOut),
      dateUp=VALUES(dateUp),status=VALUES(status),integrator=VALUES(integrator),
      channel=VALUES(channel),raw_json=VALUES(raw_json)
    """
    conn = get_db_connection()
    cur  = conn.cursor()
    for r in res_list:
        key    = (r['idReservation'], r['idEntity'])
        dt_api = parse_iso(r['dateUp'])
        if key not in existing or dt_api > existing[key]:
            params = { **r, 'raw_json': json.dumps(r, ensure_ascii=False) }
            cur.execute(sql, params)
            if key not in existing:
                log_ins.info(f"reservations {r['idReservation']} inserted")
            else:
                log_upd.info(f"reservations {r['idReservation']} updated")
    conn.commit()
    cur.close()
    conn.close()

# =============================================
# Upsert em reservation_details
# =============================================
def upsert_reservation_details(details_list):
    existing = { rid: dt for rid, dt in get_existing_details_map().items() }
    grouped = {}
    for d in details_list:
        rid = d['idReservation']
        dt  = parse_iso(d['dateUp'])
        if rid not in grouped or dt > parse_iso(grouped[rid]['dateUp']):
            grouped[rid] = d

    sql = """
    INSERT INTO reservation_details
      (idReservation,idEntityCompany,companyName,idRequesterCompany,
       requesterCompanyName,groupName,contactName,contact1,contact2,
       dateAdd,dateUp,creditState,notes,rooms,guests,commissions,
       revenueManagement,raw_json)
    VALUES
      (%(idReservation)s,%(idEntityCompany)s,%(companyName)s,%(idRequesterCompany)s,
       %(requesterCompanyName)s,%(groupName)s,%(contactName)s,%(contact1)s,%(contact2)s,
       %(dateAdd)s,%(dateUp)s,%(creditState)s,%(notes)s,%(rooms)s,%(guests)s,
       %(commissions)s,%(revenueManagement)s,%(raw_json)s)
    ON DUPLICATE KEY UPDATE
      idEntityCompany=VALUES(idEntityCompany),companyName=VALUES(companyName),
      idRequesterCompany=VALUES(idRequesterCompany),
      requesterCompanyName=VALUES(requesterCompanyName),groupName=VALUES(groupName),
      contactName=VALUES(contactName),contact1=VALUES(contact1),contact2=VALUES(contact2),
      dateUp=VALUES(dateUp),creditState=VALUES(creditState),notes=VALUES(notes),
      rooms=VALUES(rooms),guests=VALUES(guests),commissions=VALUES(commissions),
      revenueManagement=VALUES(revenueManagement),raw_json=VALUES(raw_json)
    """
    conn = get_db_connection()
    cur  = conn.cursor()
    for d in grouped.values():
        rid    = d['idReservation']
        dt_api = parse_iso(d['dateUp'])
        if rid not in existing or dt_api > existing[rid]:
            payload = {
                **d,
                'notes':             json.dumps(d.get('notes', []), ensure_ascii=False),
                'rooms':             json.dumps(d.get('rooms', []), ensure_ascii=False),
                'guests':            json.dumps(d.get('guests', []), ensure_ascii=False),
                'commissions':       json.dumps(d.get('commissions', []), ensure_ascii=False),
                'revenueManagement': json.dumps(d.get('revenueManagement', {}), ensure_ascii=False),
                'raw_json':          json.dumps(d, ensure_ascii=False)
            }
            cur.execute(sql, payload)
            if rid not in existing:
                log_ins.info(f"reservation_details {rid} inserted")
            else:
                log_upd.info(f"reservation_details {rid} updated")
    conn.commit()
    cur.close()
    conn.close()

# Fluxo principal
def main():
    path_res = os.path.join(DATA_DIR, 'reservations.json')
    path_det = os.path.join(DATA_DIR, 'reservation_details.json')

    # Detecta se é a primeira execução
    initial_run = not os.path.isfile(FLAG_FILE)
    if initial_run:
        log_ops.info("🚀 Primeira execução – truncando tabelas para full-sync inicial")
        truncate_tables()
        with open(FLAG_FILE, 'w', encoding='utf-8') as f:
            f.write('ok')
        log_ops.info(f"✅ Flag de full-sync criada em {FLAG_FILE}")

    # Carrega caches (ou inicializa vazios)
    old_res = load_reservations_cache(path_res)
    old_det = load_details_cache(path_det)

    while True:
        now_utc = datetime.now(timezone.utc)

        # Define janela de sync
        if initial_run:
            start_date = FULL_SYNC_START
            initial_run = False
            log_ops.info(f"🔄 Full-sync inicial desde {start_date}")
        else:
            start_date = (now_utc - timedelta(days=DAILY_RETRO_DAYS)) \
                         .strftime('%Y-%m-%dT%H:%M:%SZ')
        end_date = now_utc.strftime('%Y-%m-%dT%H:%M:%SZ')
        log_ops.info(f"📅 Sync window: {start_date} → {end_date}")

        token = get_token()
        if not token:
            log_err.error("Token não obtido, pulando ciclo.")
            time.sleep(POLL_INTERVAL)
            continue

        # 1) Fetch consolidado de TODOS os status
        statuses = [1, 2, 3, 4]
        all_api_map = {}
        for idx, status in enumerate(statuses):
            recs = fetch_reservations(token, start_date, end_date,
                                      type_filter=1, status=status)
            for r in recs:
                key = (r['idReservation'], r['idEntity'])
                if (key not in all_api_map or
                    parse_iso(r['dateUp']) > parse_iso(all_api_map[key]['dateUp'])):
                    all_api_map[key] = r

            # Pausa entre status (exceto após o último)
            if idx < len(statuses) - 1:
                log_ops.info(f"⏱ Dormindo {POLL_INTERVAL//60}min até status {statuses[idx+1]}")
                time.sleep(POLL_INTERVAL)

        # 2) Excluir do banco e do cache local o que sumiu da API
        to_delete = set(old_res) - set(all_api_map)
        if to_delete:
            conn = get_db_connection()
            cur = conn.cursor()
            for rid, eid in to_delete:
                cur.execute("DELETE FROM reservation_details WHERE idReservation=%s", (rid,))
                cur.execute("DELETE FROM reservations WHERE idReservation=%s AND idEntity=%s",
                            (rid, eid))
                old_res.pop((rid, eid), None)
                old_det.pop(rid, None)
            conn.commit()
            cur.close()
            conn.close()

        # 3) Identificar registros novos ou atualizados
        to_fetch = [
            (key, rec)
            for key, rec in all_api_map.items()
            if (key not in old_res or
                parse_iso(rec['dateUp']) > parse_iso(old_res[key]['dateUp']))
        ]

        # 4) Buscar detalhes só desses registros
        new_det_map = {}
        total = len(to_fetch)
        for num, (key, rec) in enumerate(to_fetch, start=1):
            rid, _ = key
            log_ops.info(f"📦 ({num}/{total}) Fetching details para reservation {rid}")
            det = fetch_reservation_by_id(token, rid)
            if det:
                new_det_map[rid] = det
                old_det[rid] = det
            # Atualiza cache de reservas
            old_res[key] = rec
            time.sleep(2.2)

        # 5) Upsert + gravação do cache atualizado em disco
        upsert_reservations(list(all_api_map.values()))
        upsert_reservation_details(list(new_det_map.values()))
        save_cache(path_res, old_res)
        save_cache(path_det, old_det)

        log_ops.info("✅ Ciclo completo, aguardando próximo...")
        time.sleep(POLL_INTERVAL)

if __name__ == '__main__':
    main()
