import requests
import json
import os
import time
import mysql.connector
from datetime import datetime, timedelta, timezone

def parse_iso(dt_str):
    return datetime.fromisoformat(dt_str.rstrip('Z'))

BASE_DIR = os.path.abspath(os.path.dirname(__file__))
DATA_DIR = os.path.join(BASE_DIR, 'dados-reservation')
LOG_BASE = os.path.join(BASE_DIR, 'logs', 'system-reservation')
LOG_ERROR_DIR = os.path.join(LOG_BASE, 'errors')
LOG_UPDATE_DIR = os.path.join(LOG_BASE, 'updates')
LOG_INSERT_DIR = os.path.join(LOG_BASE, 'inserts')
LOG_OPER_DIR = os.path.join(LOG_BASE, 'operations')
FLAG_FULLSYNC = os.path.join(DATA_DIR, 'full_sync.done')

for d in [LOG_BASE, LOG_ERROR_DIR, LOG_UPDATE_DIR, LOG_INSERT_DIR, LOG_OPER_DIR, DATA_DIR]:
    os.makedirs(d, exist_ok=True)

def _write_log(dir_path, filename, msg):
    path = os.path.join(dir_path, filename)
    with open(path, 'a', encoding='utf-8') as f:
        f.write(msg + '\n')

def log_system(msg):
    ts = datetime.now(timezone.utc).astimezone().isoformat()
    line = f"{ts} [SYSTEM] {msg}"
    print(line)
    _write_log(LOG_OPER_DIR, 'operations.log', line)

def log_error(msg):
    ts = datetime.now(timezone.utc).astimezone().isoformat()
    line = f"{ts} [ERROR] {msg}"
    print(line)
    _write_log(LOG_ERROR_DIR, 'errors.log', line)

def log_update(msg):
    ts = datetime.now(timezone.utc).astimezone().isoformat()
    line = f"{ts} [UPDATE] {msg}"
    print(line)
    _write_log(LOG_UPDATE_DIR, 'updates.log', line)

def log_insert(msg):
    ts = datetime.now(timezone.utc).astimezone().isoformat()
    line = f"{ts} [INSERT] {msg}"
    print(line)
    _write_log(LOG_INSERT_DIR, 'inserts.log', line)

def get_db_connection():
    return mysql.connector.connect(host="localhost", user="root", password="", database="bihits")

def get_existing_reservations_map():
    conn = get_db_connection()
    cursor = conn.cursor()
    cursor.execute("SELECT id, dateUp FROM reservations")
    m = {rid: dt for rid, dt in cursor}
    cursor.close()
    conn.close()
    return m

def get_existing_details_map():
    conn = get_db_connection()
    cursor = conn.cursor()
    cursor.execute("SELECT idReservation, dateUp FROM reservation_details")
    m = {rid: dt for rid, dt in cursor}
    cursor.close()
    conn.close()
    return m

def load_json_file(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return {item['id']: item for item in data}

def save_json_file(path, data_dict):
    with open(path, 'w', encoding='utf-8') as f:
        json.dump(list(data_dict.values()), f, ensure_ascii=False, indent=2)

def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    payload = {"username": "seu_usuario", "password": "sua_senha"}
    try:
        r = requests.post(url, data=payload, timeout=30)
        r.raise_for_status()
        raw = r.text.strip()
        log_system(f"Resposta bruta do token: {raw}")
        prefix = "Token obtido:"
        return raw.replace(prefix, "").strip() if raw.startswith(prefix) else r.json().get("token")
    except Exception as e:
        log_error(f"Erro ao obter token: {e}")
        return None

def safe_get(url, headers, params=None, timeout=30):
    delay = 5
    for attempt in range(6):
        r = requests.get(url, headers=headers, params=params, timeout=timeout)
        log_system(f"Request URL: {r.url} → Status {r.status_code}")
        if r.status_code == 429:
            log_system(f"429 recebido, aguardando {delay}s (tentativa {attempt+1}/6)")
            time.sleep(delay)
            delay *= 2
            continue
        r.raise_for_status()
        return r
    log_error(f"Falha após múltiplas tentativas: {url}")
    return None

def fetch_reservations(token, start_date, end_date, type_filter, status, page_size=50):
    url = "https://api.hitspms.net/Datashare/WebCheckinOut/Reservations"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    reservations = []
    page = 0
    while True:
        log_system(f"➡️ Buscando reservas – Type={type_filter}, Status={status}, Página {page}")
        params = {
            "Type": type_filter,
            "Status": status,
            "InitialDate": start_date,
            "FinalDate": end_date,
            "Page": page,
            "Size": page_size
        }
        r = safe_get(url, headers=headers, params=params)
        if not r:
            break

        data = r.json()
        log_system(f"✅ Página {page}: {len(data)} registros encontrados")

        # Extração de períodos (ano/mês)
        if data:
            try:
                sample_dates = [parse_iso(r['dateUp']).strftime('%Y-%m') for r in data if 'dateUp' in r]
                anos_meses = sorted(set(sample_dates))
                log_system(f"📆 Períodos encontrados na página {page}: {', '.join(anos_meses)}")
            except Exception as e:
                log_error(f"Erro ao extrair ano/mês da página {page}: {e}")

        if not data:
            break

        reservations.extend(data)
        if len(data) < page_size:
            break

        page += 1
        time.sleep(0.5)

    return reservations

def fetch_reservation_by_id(token, reservation_id):
    url = f"https://api.hitspms.net/Datashare/WebCheckinOut/Reservation/{reservation_id}"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    log_system(f"Buscando detalhes da reserva ID {reservation_id}")
    r = safe_get(url, headers=headers)
    return r.json() if r else None

def truncate_tables():
    conn = get_db_connection()
    cur = conn.cursor()
    cur.execute("SET FOREIGN_KEY_CHECKS=0;")
    cur.execute("TRUNCATE TABLE reservation_details;")
    cur.execute("TRUNCATE TABLE reservations;")
    cur.execute("SET FOREIGN_KEY_CHECKS=1;")
    conn.commit()
    cur.close()
    conn.close()

def upsert_reservations(res_dict):
    existing = get_existing_reservations_map()
    conn = get_db_connection()
    cur = conn.cursor()
    sql = """
    INSERT INTO reservations
      (id,name,phone,main,mail,zipCode,addressDetails,address,neighborhood,number,city,
       country,stateName,stateCode,federalRegistrationNumber,documentType,
       checkIn,checkOut,dateAdd,dateUp,status,integrator,channel,raw_json)
    VALUES
      (%(id)s,%(name)s,%(phone)s,%(main)s,%(mail)s,%(zipCode)s,%(addressDetails)s,%(address)s,
       %(neighborhood)s,%(number)s,%(city)s,%(country)s,%(stateName)s,%(stateCode)s,
       %(federalRegistrationNumber)s,%(documentType)s,%(checkIn)s,%(checkOut)s,
       %(dateAdd)s,%(dateUp)s,%(status)s,%(integrator)s,%(channel)s,%(raw_json)s)
    ON DUPLICATE KEY UPDATE
      name=VALUES(name), phone=VALUES(phone), main=VALUES(main), mail=VALUES(mail),
      zipCode=VALUES(zipCode), addressDetails=VALUES(addressDetails), address=VALUES(address),
      neighborhood=VALUES(neighborhood), number=VALUES(number), city=VALUES(city),
      country=VALUES(country), stateName=VALUES(stateName), stateCode=VALUES(stateCode),
      federalRegistrationNumber=VALUES(federalRegistrationNumber), documentType=VALUES(documentType),
      checkIn=VALUES(checkIn), checkOut=VALUES(checkOut), dateUp=VALUES(dateUp),
      status=VALUES(status), integrator=VALUES(integrator), channel=VALUES(channel),
      raw_json=VALUES(raw_json)
    """
    for rid, r in res_dict.items():
        dt_api = parse_iso(r['dateUp'])
        if rid not in existing or dt_api > existing[rid]:
            params = {**r, 'raw_json': json.dumps(r, ensure_ascii=False)}
            cur.execute(sql, params)
            if rid not in existing:
                log_insert(f"reservation {rid} inserted")
            else:
                log_update(f"reservation {rid} updated")
    conn.commit()
    cur.close()
    conn.close()

def upsert_reservation_details(details_dict):
    existing = get_existing_details_map()
    conn = get_db_connection()
    cur = conn.cursor()
    sql = """
    INSERT INTO reservation_details
      (idReservation,idEntityCompany,companyName,idRequesterCompany,
       requesterCompanyName,groupName,contactName,contact1,contact2,
       dateAdd,dateUp,creditState,notes,rooms,guests,commissions,
       revenueManagement,raw_json)
    VALUES
      (%(idReservation)s,%(idEntityCompany)s,%(companyName)s,%(idRequesterCompany)s,
       %(requesterCompanyName)s,%(groupName)s,%(contactName)s,%(contact1)s,%(contact2)s,
       %(dateAdd)s,%(dateUp)s,%(creditState)s,%(notes)s,%(rooms)s,%(guests)s,
       %(commissions)s,%(revenueManagement)s,%(raw_json)s)
    ON DUPLICATE KEY UPDATE
      idEntityCompany=VALUES(idEntityCompany),
      companyName=VALUES(companyName),
      idRequesterCompany=VALUES(idRequesterCompany),
      requesterCompanyName=VALUES(requesterCompanyName),
      groupName=VALUES(groupName),
      contactName=VALUES(contactName),
      contact1=VALUES(contact1),
      contact2=VALUES(contact2),
      dateUp=VALUES(dateUp),
      creditState=VALUES(creditState),
      notes=VALUES(notes),
      rooms=VALUES(rooms),
      guests=VALUES(guests),
      commissions=VALUES(commissions),
      revenueManagement=VALUES(revenueManagement),
      raw_json=VALUES(raw_json)
    """
    for rid, d in details_dict.items():
        dt_api = parse_iso(d['dateUp'])
        if rid not in existing or dt_api > existing[rid]:
            payload = {
                **d,
                'notes': json.dumps(d.get('notes', []), ensure_ascii=False),
                'rooms': json.dumps(d.get('rooms', []), ensure_ascii=False),
                'guests': json.dumps(d.get('guests', []), ensure_ascii=False),
                'commissions': json.dumps(d.get('commissions', []), ensure_ascii=False),
                'revenueManagement': json.dumps(d.get('revenueManagement', {}), ensure_ascii=False),
                'raw_json': json.dumps(d, ensure_ascii=False)
            }
            cur.execute(sql, payload)
            if rid not in existing:
                log_insert(f"reservation_details {rid} inserted")
            else:
                log_update(f"reservation_details {rid} updated")
    conn.commit()
    cur.close()
    conn.close()

def main():
    path_res = os.path.join(DATA_DIR, 'reservations.json')
    path_det = os.path.join(DATA_DIR, 'reservation_details.json')
    FLAG_FULLSYNC = os.path.join(DATA_DIR, 'flag_fullsync.ok')

    primeira_exec = not os.path.isfile(FLAG_FULLSYNC)

    if primeira_exec:
        truncate_tables()
        start_date = '2024-01-01T00:00:00Z'
        end_date = '2025-12-31T23:59:59Z'
    else:
        now = datetime.now(timezone.utc)
        start_date = (now - timedelta(days=60)).strftime('%Y-%m-%dT00:00:00Z')
        end_date = now.strftime('%Y-%m-%dT23:59:59Z')

    log_system(f"Range de busca: {start_date} até {end_date}")

    token = get_token()
    if not token:
        log_error("Token não obtido, abortando.")
        return

    old_res_map = load_json_file(path_res)
    old_det_map = load_json_file(path_det)
    all_res = {}
    all_det = {}

    status_list = [1, 2, 3, 4]
    for idx, status in enumerate(status_list):
        log_system(f"Iniciando processamento para o Status: {status}")
        api_res = fetch_reservations(token, start_date, end_date, type_filter=1, status=status)

        api_res_map = {}
        for rec in api_res:
            key = (rec['idReservation'], rec['idEntity'])
            if key not in api_res_map or parse_iso(rec['dateUp']) > parse_iso(api_res_map[key]['dateUp']):
                api_res_map[key] = rec

        # Deletar registros que sumiram da API
        to_delete = set(old_res_map.keys()) - set(api_res_map.keys())
        if to_delete:
            conn = get_db_connection()
            cur = conn.cursor()
            for rid, eid in to_delete:
                cur.execute("DELETE FROM reservation_details WHERE idReservation=%s", (rid,))
                cur.execute("DELETE FROM reservations WHERE idReservation=%s AND idEntity=%s", (rid, eid))
                old_res_map.pop((rid, eid), None)
                old_det_map.pop(rid, None)
            conn.commit()
            cur.close()
            conn.close()

        to_fetch = []
        for key, rec in api_res_map.items():
            dt_api = parse_iso(rec['dateUp'])
            if key not in old_res_map or dt_api > parse_iso(old_res_map[key]['dateUp']):
                to_fetch.append(rec['idReservation'])
            old_res_map[key] = rec

        for idx, rid in enumerate(to_fetch, 1):
            log_system(f"📦 ({idx}/{len(to_fetch)}) Buscando detalhes da reserva ID {rid}")
            d = fetch_reservation_by_id(token, rid)
            if d:
                try:
                    dt = parse_iso(d['dateUp']).strftime('%Y-%m')
                    log_system(f"📅 Reserva ID {rid} tem dateUp em {dt}")
                except Exception as e:
                    log_error(f"Erro ao extrair dateUp da reserva ID {rid}: {e}")
                all_det[rid] = d
                old_det_map[rid] = d
            time.sleep(2.2)


        all_res.update(api_res_map)
        upsert_reservations(list(api_res_map.values()))
        upsert_reservation_details(list(all_det.values()))

        if idx < len(status_list) - 1:
            log_system("Pausa de 5 minutos antes do próximo Status.")
            time.sleep(300)

    save_json_file(path_res, old_res_map)
    save_json_file(path_det, old_det_map)

    if primeira_exec:
        with open(FLAG_FULLSYNC, 'w') as f:
            f.write('done')

if __name__ == '__main__':
    main()
