import os
import json
import time
import subprocess
import shutil
from datetime import datetime
from zoneinfo import ZoneInfo
import concurrent.futures
from collections import defaultdict

import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', help='The file to make custom')
parser.add_argument('-o', '--output', help='The output to make custom')
parser.add_argument('-s', '--specific', help='The output to make custom')
parser.add_argument('-e', '--sortby', help='The output to make custom')
parser.add_argument('-a', '--allv', help='The output to make custom')
parser.add_argument('-r', '--remv', help='The output to make custom')
parser.add_argument('-m', '--merge', help='The output to make custom')
args = parser.parse_args()
arg_val = args.file
sort__by = args.sortby or "l"
out_val = args.output or 10
allv_val = args.allv or 0
specific_val = args.specific or ""
remv_val = args.remv or 0
merge_val = args.merge or 0
STOP_WHILE = False
ADD_DATA1 = 1
error_retries_times = 0
do_cookie_retry = 0
error_retries = defaultdict(int)
# Define	 color codes
RESET = "\033[0m"

BOLD = "\033[1m"
GREEN = "\033[92m"
RED = "\033[91m"
CYAN = "\033[96m"
MASTER_FILE = "everything_adb"
if arg_val:
  MASTER_FILE = arg_val
# Set the timezone to "America/Monterrey"
os.environ['TZ'] = 'America/Monterrey'
os.system("")  # enables ansi escape characters in terminal
tz=ZoneInfo("America/Monterrey")

master_data_list = []
with open(f"array/master_{MASTER_FILE}.json", "r") as f:
  master_data_list = json.load(f)
len_master_data_list = len(master_data_list)
def count_files_in_folder(folder_path):
    try:
        # List all items in the folder
        all_items = os.listdir(folder_path)
        
        # Count only the files
        file_count = sum(1 for item in all_items if os.path.isfile(os.path.join(folder_path, item)))
        
        return file_count
    except FileNotFoundError:
        return "The folder does not exist."
    except Exception as e:
        return f"An error occurred: {e}"


def remove_dir_and_contents(dir_path):
    if os.path.isdir(dir_path):
        shutil.rmtree(dir_path)
        print(f"Directory {dir_path} and all its contents removed successfully.")
    else:
        print(f"Directory {dir_path} does not exist or is not a directory.")
    
def copy_directory(src, dst):
    # Check if the source directory exists
    if os.path.exists(src):
        # If the destination directory exists, delete it first
        if os.path.exists(dst):
            shutil.rmtree(dst)  # Remove the destination directory and its contents
        shutil.copytree(src, dst)  # Copy the entire directory tree from src to dst
        print(f"Copied {src} to {dst}")
    else:
        print(f"Source directory {src} does not exist")

if int(remv_val) > 0:
  remove_dir_and_contents(f"results_{MASTER_FILE}")
  # remove_dir_and_contents(f"everything")
os.makedirs(f"results_{MASTER_FILE}", exist_ok=True)
os.makedirs(f"{MASTER_FILE}", exist_ok=True)
def start_files(file_type='', index_step=1, start_index=0):
  globals()["error_retries_times"] = 0
  # Load the JSON list from the file
  def update_json_file(file_path, prov, pas):
    # Step 1: Read the JSON data
    with open(file_path, "r") as f:
        file_data = json.load(f)

    # Step 2: Update the data
    if "inventario" in file_data:
        # file_data["imagen"] = f'=IMAGEN("{image}")'
        file_data["proveedor"] = prov
        file_data["pasillo"] = pas
        file_data["pedir"] = 0
        with open(f"results_{file_type}/file_{i}.json", "w") as f:
            json.dump(file_data, f, indent=2)
            print("Updated")
  print(f"{GREEN}Starting  {file_type} at {index_step} in {start_index} {RESET}")
  with open(f"array/master_{file_type}.json", "r") as f:
    data_list = json.load(f)


  
  # Function to get results for a given index
  def get_results(i=0):
    
    if globals()["error_retries_times"] > 0:
      tosleep = 5
      # tosleep = 0
      print(f"Sleeping due to many retries {i}, sleeping {tosleep} seconds")
      time.sleep(tosleep)
      
      globals()["error_retries_times"] = int(globals()["error_retries_times"]) - 1
      print(f"Retries {globals()["error_retries_times"]}")
      return
    # Check if the file exists and contains the 'inventario' field
    file_path = f"results_{file_type}/file_{i}.json"
    
    
    if os.path.exists(file_path):
      try:
        with open(file_path, "r") as f:
          file_data = json.load(f)
          if "inventario" in file_data:
            codigo = file_data.get("codigoBarras")
            file_path2 = f"codebar_data/file_{codigo}.json"
            if int(remv_val) > 0:
              with open(file_path2, "w") as f:
                end_time = datetime.now(tz)
                # print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]}{RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET} in {MASTER_FILE}")
                # json.dump(file_data, f)
            print(f"{GREEN}File {file_path} already contains 'inventario'. Skipping...{RESET}")
            # update_json_file(file_path, data_list[i][3], data_list[i][4])
            return
      except json.JSONDecodeError as e:
          print(f"JSON error: {e}")
      except UnicodeDecodeError as e:
          print(f"Encoding error: {e}")
    # Get the "codigo" from the list
    codigo = data_list[i][1]

    # Prepare the curl equivalent request using the subprocess
    curl_command = [
        'curl', '-k', '-s',
        '-A', f'Mozilla/5.0 (Linux; Android 11; SAMSUNG {i}) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/14.2 Chrome/87.0.4280.141 Mobile Safari/537.36',
        '-b', 'cookien.txt',
        f'https://www.intracomer.com.mx/simaEspecialWeb/AuditarServlet?_param=22&tipoReporte=2&codigo={codigo}'
    ]
    if(int(globals()["error_retries"][int(codigo)]) > 5):
      globals()["len_master_data_list"] = int(globals()["len_master_data_list"]) - 1
    # Check if the result is valid JSON
    try:
        result = subprocess.run(curl_command, capture_output=True, text=True, encoding="utf-8", timeout=120   )
        test = result.stdout
        # print(test)
        data = json.loads(test)
        data["seccion_numero"] = data_list[i][0]
        secc_num = data["seccion_numero"]
        data["categoria_real"] = data_list[i][2]
        # data["proveedor"] = data_list[i][4]
        data["proveedor_num"] = data_list[i][3]
        data["pasillo"] = data_list[i][4]
        data["pedir"] = 1
          
        file_path2 = f"codebar_data/file_{codigo}.json"
        #data["image"] = f'=IMAGEN("{data_list[i][3]}")'
        # Write the data to the file
        with open(file_path, "w") as f:
            end_time = datetime.now(tz)
            print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]}{RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET} in {MASTER_FILE}")
            json.dump(data, f)
        with open(file_path2, "w") as f:
            end_time = datetime.now(tz)
            # print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]}{RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET} in {MASTER_FILE}")
            json.dump(data, f)
    except json.JSONDecodeError as e:
        globals()["error_retries_times"] = globals()["error_retries_times"] + 1
        
        if int(globals()["error_retries_times"]) > 5:
          globals()["do_cookie_retry"] = 1
        if int(globals()["error_retries_times"]) > 6 and int(globals()["do_cookie_retry"]) > 0:
          subprocess.run(["bash", "/var/www/html/f9/cookie.sh"])
          globals()["do_cookie_retry"] = 0
          globals()["error_retries_times"] = 0
        if not globals()["error_retries"][int(codigo)]:
          globals()["error_retries"][int(codigo)] = globals()["error_retries"].get(int(codigo),0)+1
        else:
          globals()["error_retries"][int(codigo)] = int(globals()["error_retries"][int(codigo)]) + 1
        
        print(f"ERROR {globals()["error_retries"][codigo]}")
        print(f"Failed to parse JSON for index {i} Barcode: {data_list[i][1]}, errors: {globals()["error_retries_times"] }, resultado :: {result.stdout}")
        # if(result.stdout == ""):
          # with open(file_path, "w") as f:
            # end_time = datetime.now(tz)
            # data = []
            # data["seccion_numero"] = data_list[i][0]
            # data["categoria_real"] = data_list[i][2]
            # data["proveedor_num"] = data_list[i][3]
            # data["pasillo"] = data_list[i][4]
            # data["pedir"] = 1
            # print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]} (FAILED NOT CATALOG) {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET} in {MASTER_FILE}")
            # json.dump(data, f)
            
    except UnicodeDecodeError as e:
        globals()["error_retries_times"] =  globals()["error_retries_times"]+1
        print(f"Encoding error: {e}, index: {i}")
    
      
  # Loop through the list, fetching results for every index with the given step
  for i in range(start_index, len(data_list), index_step):
    get_results(i)
  return True

def merge_files(file_type = ''):
  print("Merging Files...")
  stop = False
  data = []
  rol_zero = []
  rol_negative = []
  no_rol_negative = []
  real_list_rol = []
  list_zauto = []
  worry_some = []
  real_list = []
  new_pedido = []
  pedido_list = []
  venta_zero = []
  venta_zero2 = []
  nped = []
  nped2 = []
  nped3 = []
  invcap = []
  eq_inv_cap = []
  sin_venta = []
  # Directory containing the JSON files
  directory = f"results_{file_type}"

  # Process JSON files
  for i in range(99999):
    file_path = os.path.join(directory, f"file_{i}.json")
    if os.path.exists(file_path):
      with open(file_path, "r") as f:
        arr = json.load(f)
        data.append(arr)
    else:
      continue

  pasillo_data = {}
  # Process the collected data
  is_full = globals()["allv_val"]
  if int(is_full) > 0:
    for curr in data:
      real_list.append(curr)
      if int(curr["inventario"]) == 0 and curr["resurtido"] == "Automatico":
        rol_zero.append(curr)
    with open(f"{file_type}/result.json", "w") as f:
      json.dump(real_list, f, indent=2)
    with open(f"{file_type}/rol_empty.json", "w") as f:
      json.dump(rol_zero, f, indent=2)
  else:
    for curr in data:
        
      if curr is not None and "resurtido" in curr and "inventario" in curr:
        rol = curr["resurtido"] != "SIN RESURTIDO"
        norol = curr["resurtido"] == "SIN RESURTIDO"
        rol_zero_auto = curr["resurtido"] == "Automatico" and curr["inventario"] == 0
        equal_inv_cap = curr["resurtido"] == "Automatico" and curr["inventario"] > 0 and curr["inventario"] % curr["capacidadEmpaque"] == 0
        para_pedido = curr['resurtido'] == "Automatico" and curr["inventario"] < ( int(curr["capacidadEmpaque"]) * 4) and int(curr["pendienteDeEntregaCabecera"]) == 0 and curr["promedioLinea"] > 0.5 and curr["capacidadEmpaque"] < 90
        no_venta = curr['resurtido'] == "Automatico" and curr["inventario"] != 0 and curr["ventas"]["acumuladoAnualAnioCorriente"] == 0
      if curr["resurtido"] == "Automatico" and curr["diasDeInventario"] < 80:
        next_val = 0
        if len(curr["pendienteDeEntrega"]) > 0:
          next_val = int(curr["pendienteDeEntrega"][0]["unidadesPendientes"])
        new_inv = int(curr["inventario"]) + next_val
        new_days = 0
        prom = 1
        if curr["promedioLinea"] > 0:
          prom = curr["promedioLinea"]
      
        new_days = new_inv / prom
        if new_days < 60:
          new_pedido.append(curr)
      if equal_inv_cap:
          eq_inv_cap.append(curr)
      if no_venta:
          sin_venta.append(curr)
      if para_pedido:
          pedido_list.append(curr)
      daysInv = int(curr["diasDeInventario"])
      currInv = int(curr["inventario"])
      currProm = int(curr["promedioLinea"])
      capEmp = int(curr["capacidadEmpaque"])
      # XD
      new_days_inv = 0
      new_inv_p = curr["inventario"]
      if len(curr["pendienteDeEntrega"]) > 0:
        if curr["promedioLinea"] == 0:
          new_days_inv = int(curr["inventario"])
        else:
          new_days_inv = int(curr["pendienteDeEntrega"][0]["unidadesPendientes"]) / float(curr["promedioLinea"])
          new_inv_p = int(curr["inventario"]) + int(curr["pendienteDeEntrega"][0]["unidadesPendientes"])
      curr["new_inv"] = new_inv_p
      curr["new_pend"] = 0
      curr["future_days"] = float(new_days_inv) + float(curr["diasDeInventario"])
      fut_d = curr["future_days"]
      if currInv < capEmp * 5 and currProm > 0.3 and daysInv < 15 and curr["resurtido"] == "Automatico":
        nped.append(curr)
      if currInv < capEmp * 2 and currProm > 1 and daysInv < 10 and curr["resurtido"] == "Automatico":
        nped3.append(curr)
      if currInv < capEmp * 5 and currProm > 0.5 and fut_d < 60 and curr["resurtido"] == "Automatico":
        nped2.append(curr)
      if new_inv_p == 0 and currProm <= 0.5 and curr["resurtido"] == "Automatico":
        nped2.append(curr)
      if currInv == capEmp:
        invcap.append(curr)
      #####
      if curr["inventario"] > 48 and curr["diasDeInventario"] > 360:
          worry_some.append(curr)

      if int(curr["inventario"]) == 0 and curr["resurtido"] == "Automatico":
          rol_zero.append(curr)
      if int(curr["inventario"]) < 0 and norol:
          no_rol_negative.append(curr)
      if rol:
        real_list_rol.append(curr)
      if rol_zero_auto:
        list_zauto.append(curr)
      if int(curr["inventario"]) < 0:
        rol_negative.append(curr)

      if int(curr["ventas"]["ventasUltimos30diasAnioCorriente"]) == 0 and curr["inventario"] != 0:
        venta_zero.append(curr)
      if int(curr["ventas"]["ventasUltimos30diasAnioCorriente"]) == 0 and curr["inventario"] <= 48 and curr["promedioLinea"] <= 0.5:
        venta_zero2.append(curr)
      pedir = curr.get("pedir", 0)
      if curr['resurtido'] == "Automatico" and curr["inventario"] < 10 and int(curr["pendienteDeEntregaCabecera"]) == 0 and curr["promedioLinea"] > 1 and curr["capacidadEmpaque"] == 1:
        pedido_list.append(curr)
      real_list.append(curr)
      code = curr["codigoBarras"]
      descripcion= curr["descripcion"]
      inv = curr["inventario"]
      cap = curr["capacidadEmpaque"]
      pend = curr["pendienteDeEntregaCabecera"]
      fecha = ""
      marc = curr["marca"]
      if pend > 0 and len(curr["pendienteDeEntrega"]) > 0:
        fecha = curr["pendienteDeEntrega"][0]["fhFinVigencia"]
      # time.sleep(0.1);
      # if globals()["ADD_DATA1"] == 0:
        
        # time.sleep(0.1)
        # add_data(code, descripcion, inv, cap, pend, fecha, marc, 'everything')
        
      # Group data by pasillo
      pasillo = curr.get("pasillo")
      if pasillo:
          if pasillo not in pasillo_data:
              pasillo_data[pasillo] = []
          pasillo_data[pasillo].append(curr)
      with open(f"codebar_data/file_{code}.json", "w") as f:
        json.dump(curr, f, indent=2)
      # Save the results into respective pasillo directories

    # ADD_DATA1 = 1
    globals()["ADD_DATA1"] = 1
    for pasillo, pasillo_items in pasillo_data.items():
      pasillo_dir = f"{file_type}/pasillo_{pasillo}"

      # Create the directory if it doesn't exist
      if not os.path.exists(f"{pasillo_dir}"):
        os.makedirs(f"{pasillo_dir}")

          # Save the merged data for each pasillo
      pasillo_items.sort(key=lambda x: float(x.get("promedioLinea", 0)), reverse=True)
      with open(os.path.join(f"{pasillo_dir}", "result.json"), "w") as f:
          json.dump(pasillo_items, f, indent=2)
    if globals()["sort__by"] == "l":
      real_list.sort(key=lambda x: float(x.get("inventario", 0)), reverse=False)
      real_list_rol.sort(key=lambda x: float(x.get("inventario", 0)), reverse=False)
    elif globals()["sort__by"] == "m":
      real_list_rol.sort(key=lambda x: float(x.get("promedioLinea", 0)), reverse=True)
      real_list.sort(key=lambda x: float(x.get("promedioLinea", 0)), reverse=True)
    elif globals()["sort__by"] == "e":
      real_list.sort(key=lambda x: float(x.get("capacidadEmpaque", 0)), reverse=True)
      real_list_rol.sort(key=lambda x: float(x.get("capacidadEmpaque", 0)), reverse=True)
    nped2.sort(key=lambda x: float(x.get("inventario", 0)), reverse=False)
    rol_negative.sort(key=lambda x: float(x.get("inventario", 0)), reverse=True)
    # Write the results to a JSON file
    with open(f"{file_type}/result.json", "w") as f:
        json.dump(real_list, f, indent=2)
    with open(f"{file_type}/rol_empty.json", "w") as f:
        json.dump(rol_zero, f, indent=2)

    with open(f"{file_type}/negatives.json", "w") as f:
        json.dump(rol_negative, f, indent=2)
    with open(f"{file_type}/rol.json", "w") as f:
        json.dump(real_list_rol, f, indent=2)
    with open(f"{file_type}/faltantes.json", "w") as f:
        json.dump(pedido_list, f, indent=2)
    with open(f"{file_type}/venta_zero.json", "w") as f:
        json.dump(venta_zero, f, indent=2)
    with open(f"{file_type}/venta_low_zero.json", "w") as f:
        json.dump(venta_zero2, f, indent=2)
    with open(f"{file_type}/new_pedido.json", "w") as f:
        json.dump(new_pedido, f, indent=2)
    with open(f"{file_type}/peligro.json", "w") as f:
        json.dump(worry_some, f, indent=2)
    with open(f"{file_type}/pendiente.json", "w") as f:
        json.dump(nped, f, indent=2)
    with open(f"{file_type}/pd.json", "w") as f:
        json.dump(nped2, f, indent=2)
    with open(f"{file_type}/pd2.json", "w") as f:
        json.dump(nped3, f, indent=2)
    # with open(f"{file_type}/inv_cap.json", "w") as f:
        # json.dump(invcap, f, indent=2)
    with open(f"{file_type}/inv_cap.json", "w") as f:
        json.dump(eq_inv_cap, f, indent=2)
    with open(f"{file_type}/no_venta.json", "w") as f:
        json.dump(sin_venta, f, indent=2)
  end_time = datetime.now(tz)
  print(f"{GREEN}Pedido Length: {RESET}{CYAN}{len(nped3)}{RESET}")
  print(f"{CYAN}MERGED {file_type} at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")
  return True


# Main loop
def run_tasks(custom_handler = 'everything', futures_created=1):
    
  start_time = datetime.now(tz)
  print(f"{CYAN}Started processing at {RESET}{RED}{start_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")

    

  with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor:
    futures_list = []
    steps = 0
    for i in range(futures_created):
      if(i == 0):
        step = 0
      else:
        step = steps+i
      futures_list.append(executor.submit(start_files, custom_handler, futures_created, step))
    
    futures = futures_list
    done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.ALL_COMPLETED)
    

    # Check when all futures are done
  if not not_done:  # If 'not_done' is empty, all tasks are done
      print("All futures are completed.")
  # Optionally process results if needed
  for future in done:
      try:
          result = future.result()  # Get the result of each future
          
      except Exception as exc:
          print(f"Task generated an exception: {exc}")
    # Wait for all tasks to complete
    
    # for future in concurrent.futures.as_completed(futures):
      # future.result()
  end_time = datetime.now(tz)
  print(f"{CYAN}Script finished at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")

Main_Loop = 0


def make_master(file_type = 'everything_adb'):
  # Directory containing the JSON files
  directory = f"results_{file_type}"
  # Process JSON files
  data = []
  # for i in range(99999):
    # file_path = os.path.join(directory, f"file_{i}.json")
    # if os.path.exists(file_path):
      # with open(file_path, "r") as f:
        # arr = json.load(f)
        # data.append(arr)
    # else:
      # continue
  with open(f"{file_type}/result.json", "r") as f:
    data = json.load(f)
  secciones = [335,412,114,473,472,108,341,163,164,474,13]
  fields =  ["seccion_numero", "codigoBarras", "categoria_real", "proveedor_num", "pasillo", "pedir", "proveedor_num"]
  merged_data = {}
  unique_key = "codigoBarras"  
  for item in data:
      key = item.get(unique_key)
      if key:  # Ensure the key exists
          merged_data[key] = item

  master_json = []
  for item in merged_data.values():
      secc = item.get("seccion_numero")
      if secc in secciones:
        pass
      master_json_entry = [item.get(field, None) for field in fields]
      master_json.append(master_json_entry)
  with open(f"array/master_{file_type}.json", "w") as f:
      json.dump(master_json, f, indent=2)
      print(f"Modified Master with new length: {len(master_json)}, old length: {len(master_data_list)}.")


subprocess.run(["bash", "/var/www/html/f9/cookie.sh"])
print("Running cookie")
while True:
  if int(merge_val) > 0:
    merge_files(MASTER_FILE)
    break
  if count_files_in_folder(f"results_{MASTER_FILE}") >= globals()["len_master_data_list"]:
    print("Merging..")
    merge_files(MASTER_FILE)
    # make_master(MASTER_FILE)
    break
  else:
    print("Starting..")
    if int(globals()["allv_val"]) > 0:
      print("Duping ALL from the array")
    run_tasks(MASTER_FILE, int(out_val))

