import sys
import requests
import json
import os
import time
import mysql.connector
import logging
from datetime import datetime, timedelta, timezone

# Diretórios base
def get_base_dirs():
    if getattr(sys, 'frozen', False):
        base = os.path.dirname(sys.argv[0])
    else:
        base = os.path.abspath(os.path.dirname(__file__))
    data = os.path.join(base, 'dados-reservation')
    log_root = os.path.join(base, 'log')
    return base, data, log_root

BASE_DIR, DATA_DIR, LOG_ROOT = get_base_dirs()
LOG_BASE = os.path.join(LOG_ROOT, 'system-reservation')
LOG_DIRS = {
    'errors': os.path.join(LOG_BASE, 'errors'),
    'updates': os.path.join(LOG_BASE, 'updates'),
    'inserts': os.path.join(LOG_BASE, 'inserts'),
    'operations': os.path.join(LOG_BASE, 'operations'),
    'api': os.path.join(LOG_BASE, 'api'),
}
FLAG_FILE = os.path.join(DATA_DIR, 'flag_fullsync.ok')

# Cria diretórios
os.makedirs(DATA_DIR, exist_ok=True)
for d in LOG_DIRS.values():
    os.makedirs(d, exist_ok=True)


def setup_logger(name, path, level=logging.INFO, console=True):
    logger = logging.getLogger(name)
    if logger.hasHandlers():
        return logger
    logger.setLevel(level)
    fmt = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s', '%Y-%m-%d %H:%M:%S')
    fh = logging.FileHandler(path, encoding='utf-8')
    fh.setFormatter(fmt)
    logger.addHandler(fh)
    if console:
        ch = logging.StreamHandler()
        ch.setFormatter(fmt)
        logger.addHandler(ch)
    return logger

# Loggers
g_log = setup_logger('operations', os.path.join(LOG_DIRS['operations'], 'operations.log'))
g_err = setup_logger('errors', os.path.join(LOG_DIRS['errors'], 'errors.log'), level=logging.ERROR)
g_upd = setup_logger('updates', os.path.join(LOG_DIRS['updates'], 'updates.log'))
g_ins = setup_logger('inserts', os.path.join(LOG_DIRS['inserts'], 'inserts.log'))
g_api = setup_logger('api', os.path.join(LOG_DIRS['api'], 'api.log'))

# DB connection via context manager
def get_db_connection():
    return mysql.connector.connect(
        host=os.getenv('DB_HOST', 'localhost'),
        user=os.getenv('DB_USER', 'root'),
        password=os.getenv('DB_PASS', ''),
        database=os.getenv('DB_NAME', 'bihits')
    )

# Parsers
def parse_iso(dt_str):
    if dt_str.endswith('Z'):
        dt_str = dt_str[:-1] + '+00:00'
    return datetime.fromisoformat(dt_str)

# JSON cache com chaves ajustadas
def load_reservations_cache(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return { (item['idReservation'], item['idEntity']): item for item in data }

def load_details_cache(path):
    if not os.path.isfile(path):
        return {}
    with open(path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return { item['idReservation']: item for item in data }

def save_cache(path, data_dict):
    with open(path, 'w', encoding='utf-8') as f:
        json.dump(list(data_dict.values()), f, ensure_ascii=False, indent=2)

# Upsert SQL
SQL_UPSERT_RES = '''
INSERT INTO reservations
  (idReservation, idEntity, dateUp, raw_json)
VALUES
  (%(idReservation)s, %(idEntity)s, %(dateUp)s, %(raw_json)s)
ON DUPLICATE KEY UPDATE
  dateUp   = VALUES(dateUp),
  raw_json = VALUES(raw_json)
'''

SQL_UPSERT_DET = '''
INSERT INTO reservation_details
  (idReservation, dateUp, raw_json)
VALUES
  (%(idReservation)s, %(dateUp)s, %(raw_json)s)
ON DUPLICATE KEY UPDATE
  dateUp   = VALUES(dateUp),
  raw_json = VALUES(raw_json)
'''

# HTTP helpers
def get_token():
    url = "https://bric-investment.com/BI-HITS/dados/Hits/token/token.php"
    payload = {"username": os.getenv('API_USER'), "password": os.getenv('API_PASS')}
    try:
        r = requests.post(url, data=payload, timeout=30)
        r.raise_for_status()
        raw = r.text.strip()
        prefix = "Token obtido:"
        return raw.replace(prefix, "").strip() if raw.startswith(prefix) else r.json().get("token")
    except Exception as e:
        g_err.error(f"Erro ao obter token: {e}")
        return None


def safe_get(url, headers, params=None, timeout=30):
    delay = 5
    for attempt in range(6):
        try:
            r = requests.get(url, headers=headers, params=params, timeout=timeout)
            g_api.info(f"Request {r.url} → {r.status_code}")
            if r.status_code == 429:
                g_api.info(f"429: retry em {delay}s (#{attempt+1})")
                time.sleep(delay)
                delay *= 2
                continue
            r.raise_for_status()
            return r
        except requests.RequestException as e:
            g_api.error(f"Request error: {e}")
            time.sleep(delay)
            delay *= 2
    g_err.error(f"Falha após tentativas em {url}")
    return None


def fetch_reservations(token, start_date, end_date, type_filter, status, page_size=50):
    reservations = []
    page = 0
    url = "https://api.hitspms.net/Datashare/WebCheckinOut/Reservations"
    headers = {
        "Accept": "application/json",
        "X-API-VERSION": "1",
        "X-API-TENANT-NAME": "thecoralbeachresort",
        "X-API-PROPERTY-CODE": "1",
        "X-API-PARTNER-USERID": "0",
        "X-API-LANGUAGE-CODE": "pt-br",
        "X-Client-Id": "THECORALBR",
        "Authorization": f"Bearer {token}"
    }
    while True:
        params = {
            "Type": type_filter,
            "Status": status,
            "InitialDate": start_date,
            "FinalDate": end_date,
            "Page": page,
            "Size": page_size
        }
        g_log.info(f"➡️ Status={status}, Página={page}")
        r = safe_get(url, headers, params)
        if not r:
            break
        data = r.json()
        if not data:
            break
        reservations.extend(data)
        if len(data) < page_size:
            break
        page += 1
        time.sleep(0.5)
    return reservations


def fetch_reservation_by_id(token, reservation_id):
    url = f"https://api.hitspms.net/Datashare/WebCheckinOut/Reservation/{reservation_id}"
    headers = {
        **{
            "Accept": "application/json",
            "X-API-VERSION": "1",
            "X-API-TENANT-NAME": "thecoralbeachresort",
            "X-API-PROPERTY-CODE": "1",
            "X-API-PARTNER-USERID": "0",
            "X-API-LANGUAGE-CODE": "pt-br",
            "X-Client-Id": "THECORALBR",
        },
        "Authorization": f"Bearer {token}"
    }
    g_log.info(f"🔍 Detalhes reserva {reservation_id}")
    r = safe_get(url, headers)
    return r.json() if r else None

# Upsert functions
def upsert_reservations(res_map):
    with get_db_connection() as conn:
        cur = conn.cursor()
        for key, rec in res_map.items():
            rec['raw_json'] = json.dumps(rec, ensure_ascii=False)
            rec['dateUp'] = parse_iso(rec['dateUp'])
            cur.execute(SQL_UPSERT_RES, rec)
            if cur.rowcount == 1:
                g_ins.info(f"Inserted reservation {key}")
            else:
                g_upd.info(f"Updated reservation {key}")
        conn.commit()
        cur.close()

def upsert_reservation_details(det_map):
    with get_db_connection() as conn:
        cur = conn.cursor()
        for rid, det in det_map.items():
            det['raw_json'] = json.dumps(det, ensure_ascii=False)
            det['dateUp'] = parse_iso(det['dateUp'])
            cur.execute(SQL_UPSERT_DET, det)
            if cur.rowcount == 1:
                g_ins.info(f"Inserted detail {rid}")
            else:
                g_upd.info(f"Updated detail {rid}")
        conn.commit()
        cur.close()

# Truncate (opcional)
def truncate_tables():
    with get_db_connection() as conn:
        cur = conn.cursor()
        cur.execute("SET FOREIGN_KEY_CHECKS=0;")
        cur.execute("TRUNCATE TABLE reservation_details;")
        cur.execute("TRUNCATE TABLE reservations;")
        cur.execute("SET FOREIGN_KEY_CHECKS=1;")
        conn.commit()
        cur.close()

# Loop contínuo
def main():
    path_res = os.path.join(DATA_DIR, 'reservations.json')
    path_det = os.path.join(DATA_DIR, 'reservation_details.json')

    primeira = not os.path.isfile(FLAG_FILE)
    statuses = [1, 2, 3, 4]

    old_res = load_reservations_cache(path_res)
    old_det = load_details_cache(path_det)

    if primeira:
        truncate_tables()
        with open(FLAG_FILE, 'w') as f:
            f.write('done')

    while True:
        now_utc = datetime.now(timezone.utc)
        start_date = (now_utc - timedelta(days=60)).strftime('%Y-%m-%dT00:00:00Z')
        end_date = now_utc.strftime('%Y-%m-%dT23:59:59Z')

        g_log.info(f"🔄 Sync de {start_date} até {end_date}")
        token = get_token()
        if not token:
            time.sleep(300)
            continue

        new_res_map = {}
        new_det_map = {}

        for idx, status in enumerate(statuses):
            recs = fetch_reservations(token, start_date, end_date, type_filter=1, status=status)
            api_map = {}
            for r in recs:
                key = (r['idReservation'], r['idEntity'])
                if key not in api_map or parse_iso(r['dateUp']) > parse_iso(api_map[key]['dateUp']):
                    api_map[key] = r

            # DELETE registros removidos
            to_delete = set(old_res.keys()) - set(api_map.keys())
            if to_delete:
                with get_db_connection() as conn:
                    cur = conn.cursor()
                    for (rid, eid) in to_delete:
                        cur.execute("DELETE FROM reservation_details WHERE idReservation=%s", (rid,))
                        cur.execute("DELETE FROM reservations WHERE idReservation=%s AND idEntity=%s", (rid, eid))
                        old_res.pop((rid, eid), None)
                        old_det.pop(rid, None)
                    conn.commit()
                    cur.close()

            # BUSCA detalhes novos/modificados
            for key, rec in api_map.items():
                old = old_res.get(key)
                if not old or parse_iso(rec['dateUp']) > parse_iso(old['dateUp']):
                    det = fetch_reservation_by_id(token, rec['idReservation'])
                    if det:
                        new_det_map[rec['idReservation']] = det
                        old_det[rec['idReservation']] = det
                old_res[key] = rec

            new_res_map.update(api_map)

            # Pausa antes do próximo status
            if idx < len(statuses) - 1:
                g_log.info(f"⏱ Pausa 5min antes do próximo status {statuses[idx+1]}")
                time.sleep(300)

        # Upsert no banco e grava cache
        upsert_reservations(new_res_map)
        upsert_reservation_details(new_det_map)
        save_cache(path_res, old_res)
        save_cache(path_det, old_det)

        g_log.info("✅ Ciclo completo, recomeçando...")

if __name__ == '__main__':
    main()
