import os
import json
import time
import subprocess
import shutil
from datetime import datetime
from collections import defaultdict
from zoneinfo import ZoneInfo
import concurrent.futures

import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', help='The file to make custom')
parser.add_argument('-o', '--output', help='The output to make custom')
parser.add_argument('-s', '--specific', help='The output to make custom')
args = parser.parse_args()
arg_val = args.file
out_val = args.output or 10
specific_val = args.specific or ""

# Set the timezone to "America/Monterrey"
os.system("")  # enables ansi escape characters in terminal

os.environ['TZ'] = 'America/Monterrey'
tz=ZoneInfo("America/Monterrey")

STOP_WHILE = False

CUSTOM_NAME_CODE = "everything_pedido"

if arg_val:
  CUSTOM_NAME_CODE = arg_val
# Define color codes
RESET = "\033[0m"
BOLD = "\033[1m"
GREEN = "\033[92m"
RED = "\033[91m"
CYAN = "\033[96m"


def remove_dir_and_contents(dir_path):
    if os.path.isdir(dir_path):
        shutil.rmtree(dir_path)
        print(f"Directory {dir_path} and all its contents removed successfully.")
    else:
        print(f"Directory {dir_path} does not exist or is not a directory.")
    
def copy_directory(src, dst):
    # Check if the source directory exists
    if os.path.exists(src):
        # If the destination directory exists, delete it first
        if os.path.exists(dst):
            shutil.rmtree(dst)  # Remove the destination directory and its contents
        shutil.copytree(src, dst)  # Copy the entire directory tree from src to dst
        print(f"Copied {src} to {dst}")
    else:
        print(f"Source directory {src} does not exist")


remove_dir_and_contents(f"results_{CUSTOM_NAME_CODE}")
#remove_dir_and_contents(f"{CUSTOM_NAME_CODE}")
os.makedirs(f"results_{CUSTOM_NAME_CODE}", exist_ok=True)
os.makedirs(f"{CUSTOM_NAME_CODE}", exist_ok=True)
def start_files(file_type='', index_step=1, start_index=0):
  # Load the JSON list from the file

  print(f"{GREEN}Starting  {file_type} at {index_step} in {start_index} {RESET}")
  with open(f"array/master_{file_type}.json", "r") as f:
    data_list = json.load(f)

  # Function to parse the single-line cookie from the cookie.txt file
  def load_cookie_from_file(cookie_file):
    with open(cookie_file, "r") as f:
        line = f.readline().strip()
        if line.startswith("#"):
            parts = line.split("\t")
            if len(parts) >= 7:
                cookie_name = parts[5]
                cookie_value = parts[6]
                return {cookie_name: cookie_value}
    return {}

  # Load the single cookie from the cookie.txt file
  cookie_file = r'./cookie.txt'
  cookies = load_cookie_from_file(cookie_file)

  # Function to get results for a given index
  def get_results(i=0):
    # Check if the file exists and contains the 'inventario' field
    file_path = f"results_{file_type}/file_{i}.json"
    if os.path.exists(file_path):
      with open(file_path, "r") as f:
        file_data = json.load(f)
        if "inventario" in file_data:
          print(f"{GREEN}File {file_path} already contains 'inventario'. Skipping... at {index_step} in {start_index} {RESET}")
          return

    # Get the "codigo" from the list
    codigo = data_list[i][1]
    
    # Prepare the curl equivalent request using the subprocess
    curl_command = [
        'curl', '-k', '-s',
        '-A', 'Mozilla/5.0 (Linux; Android 11; SAMSUNG SM-G973U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/14.2 Chrome/87.0.4280.141 Mobile Safari/537.36',
        '-b', 'cookien.txt',
        f'https://www.intracomer.com.mx/simaEspecialWeb/AuditarServlet?_param=22&tipoReporte=2&codigo={codigo}'
    ]
    result = subprocess.run(curl_command, capture_output=True, text=True, encoding="utf-8")

    # Check if the result is valid JSON
    try:
        data = json.loads(result.stdout)
        pasillo = data_list[i][4] or 1
        pedir = data_list[i][5] or 1
        data["seccion_numero"] = data_list[i][0]
        data["categoria_real"]= data_list[i][2]
        data["proveedor"]= data_list[i][3]
        data["pasillo"]= pasillo
        data["pedir"] = pedir
        # Write the data to the file
        with open(file_path, "w") as f:
            end_time = datetime.now(tz)
            print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]}{RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}  at {index_step} in {start_index} ")
            json.dump(data, f)
    except json.JSONDecodeError:
        print(f"Failed to parse JSON for index {i}")

  # Loop through the list, fetching results for every index with the given step
  for i in range(start_index, len(data_list), index_step):
    get_results(i)

  return True

def merge_files(file_type = '', specific= ''):
  stop = False
  data = []
  rol_zero = []
  rol_negative = []
  no_rol_negative = []
  real_list_rol = []
  list_zauto = []
  real_list = []
  list_pedido = []
  ped = []
  nped = []
  # Directory containing the JSON files
  directory = f"results_{file_type}"
  
  # Process JSON files
  for i in range(13000):
    file_path = os.path.join(directory, f"file_{i}.json")
    if os.path.exists(file_path):
      with open(file_path, "r") as f:
        arr = json.load(f)
        data.append(arr)
    else:
      continue

  # Process the collected data
  for curr in data:
    if curr is not None and "resurtido" in curr and "inventario" in curr:
      if specific_val is not None or specific_val != "":
        if specific_val == 'p':
          if curr["resurtido"] == "Diario" or curr["resurtido"] == "Abierto" or curr["SIN RESURTIDO"]:
            continue
        elif specific_val == 'd':
          if curr["resurtido"] == "Automatico" or curr["resurtido"] == "SIN RESURTIDO":
            continue
        elif specific_val == 'n':
          if curr["resurtido"] == "Automatico" or curr["resurtido"] == "Diario":
            continue
      if curr["pedir"] is None:
        curr["pedir"] = 0
      rol = curr["resurtido"] != "SIN RESURTIDO"
      norol = curr["resurtido"] == "SIN RESURTIDO"
      rol_zero_auto = curr["resurtido"] == "Automatico" and curr["inventario"] == 0
      pedir = curr.get("pedir", 0)
      daysInv = int(curr["diasDeInventario"])
      currInv = int(curr["inventario"])
      currProm = int(curr["promedioLinea"])
      capEmp = int(curr["capacidadEmpaque"])
      # XD
      new_days_inv = 0
      new_inv_p = curr["inventario"]
      if len(curr["pendienteDeEntrega"]) > 0:
        if curr["promedioLinea"] == 0:
          new_days_inv = int(curr["inventario"])
        else:
          new_days_inv = int(curr["pendienteDeEntrega"][0]["unidadesPendientes"]) / float(curr["promedioLinea"])
          new_inv_p = int(curr["inventario"]) + int(curr["pendienteDeEntrega"][0]["unidadesPendientes"])
      curr["new_inv"] = new_inv_p
      curr["new_pend"] = 0
      curr["future_days"] = float(new_days_inv) + float(curr["diasDeInventario"])
      fut_d = curr["future_days"]

      # if currInv < capEmp * 5 and currProm > 0.5 and daysInv < 40:
        # nped.append(curr)
      if currInv < capEmp * 5 and currProm > 0.5 and fut_d < 28 and curr["resurtido"] == "Automatico":
        nped.append(curr)
      if new_inv_p == 0 and currProm <= 0.5 and curr["resurtido"] == "Automatico":
        nped.append(curr)
      if pedir == 0:
        curr["pedir"] = 0
      if pedir == 0:
        if int(curr["promedioLinea"]) < 0.5:
          curr["pedir"] = 2
        elif int(curr["promedioLinea"]) > 0.4 and  int(curr["promedioLinea"]) < 1 and curr["pedir"] == 0:
          curr["pedir"] = 3
        elif int(curr["promedioLinea"]) > 0.9 and  int(curr["promedioLinea"]) < 5  and curr["pedir"] == 0:
          curr["pedir"] = 5
        elif int(curr["promedioLinea"]) > 4.9:
          curr["pedir"] = 10
      if curr["pedir"] == 0:
        curr["pedir"] = 1
      
      if int(curr["inventario"]) == 0 and rol:
          rol_zero.append(curr)
      elif int(curr["inventario"]) < 0 and rol:
          rol_negative.append(curr)
      elif int(curr["inventario"]) < 0 and norol:
          no_rol_negative.append(curr)
      elif rol:
          real_list_rol.append(curr)
      elif rol_zero_auto:
          list_zauto.append(curr)
      # if int(curr["pendienteDeEntregaCabecera"]) == 0:
      real_list.append(curr)
      if int(curr["inventario"]) < 7:
          ped.append(curr)
  nped.sort(key=lambda x: float(x.get("future_days", 0)), reverse=False)
  real_list.sort(key=lambda x: float(x.get("diasDeInventario", 0)), reverse=False)
  # Create a nested grouping by "categoria_real" and "capacidadEmpaque"
  # grouped_data = defaultdict(lambda: defaultdict(list))
  # group_pedir = defaultdict(list)
  # for item in data:
      # group_pedir[item["pedir"]].append(item)
  # for capa, group in group_pedir.items():
    # with open(f"{file_type}/{capa}.json", "w", encoding="utf-8") as f:
      # json.dump(group, f, indent=2)
  # for item in data:
      # categoria = item.get("seccion", "Unknown")
      # capacidad = item.get("pedir", "Unknown")
      # grouped_data[categoria][capacidad].append(item)

  # # Save the data into files
  # for categoria, capacidades in grouped_data.items():
      # # Create a directory for each category
      # os.makedirs(f"{file_type}/{categoria}", exist_ok=True)
      # for capacidad, items in capacidades.items():
          # # Create a JSON file for each capacity in the category
          # filename = os.path.join(f"{file_type}/{categoria}", f"{capacidad}.json")
          # with open(filename, "w", encoding="utf-8") as f:
              # json.dump(items, f, indent=4, ensure_ascii=False)

  # print("Data grouped and saved into files successfully.")
    
  

  with open(f"{file_type}/result.json", "w") as f:
      json.dump(real_list, f, indent=2)
  with open(f"{file_type}/rol_empty.json", "w") as f:
      json.dump(list_zauto, f, indent=2)
  with open(f"{file_type}/pedido.json", "w") as f:
      json.dump(ped, f, indent=2)
  with open(f"{file_type}/pd.json", "w") as f:
      json.dump(nped, f, indent=2)
  end_time = datetime.now(tz)
  print(f"{GREEN}Pedido Length: {RESET}{CYAN}{len(nped)}{RESET}")
  print(f"{CYAN}MERGED {file_type} at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")
  return True


# Main loop
def run_tasks(custom_handler = "custom", futures_created=1):



  start_time = datetime.now(tz)
  print(f"{CYAN}Started processing at {RESET}{RED}{start_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")

  with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor:
    futures_list = []
    steps = 0
    for i in range(futures_created):
      if(i == 0):
        step = 0
      else:
        step = steps+i
      futures_list.append(executor.submit(start_files, custom_handler, futures_created, step))
    
    futures = futures_list
    done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.ALL_COMPLETED)
    
    # Check when all futures are done
  if not not_done:  # If 'not_done' is empty, all tasks are done
      # merge_files(f'{custom_handler}')
      

      # Copy "zero/" to "zero_old/
      print("All futures are completed.")
  # Optionally process results if needed
  for future in done:
      try:
          result = future.result()  # Get the result of each future
          
      except Exception as exc:
          print(f"Task generated an exception: {exc}")
    # Wait for all tasks to complete
    
    # for future in concurrent.futures.as_completed(futures):
      # future.result()
  end_time = datetime.now(tz)
  print(f"{CYAN}Script finished at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")

def is_time_to_stop():
    now = datetime.now(tz)
    return now.hour > 3 and now.minute > 0  and now.hour < 14 and now.minute < 59

  

def count_files_in_folder(folder_path):
    try:
        # List all items in the folder
        all_items = os.listdir(folder_path)
        
        # Count only the files
        file_count = sum(1 for item in all_items if os.path.isfile(os.path.join(folder_path, item)))
        
        return file_count
    except FileNotFoundError:
        print("The folder does not exist.")
        return "The folder does not exist."
    except Exception as e:
        print(f"An error occurred: {e}")
        return f"An error occurred: {e}"


master_data_list = []
with open(f"array/master_{CUSTOM_NAME_CODE}.json", "r") as f:
  master_data_list = json.load(f)


print(f"{count_files_in_folder(f"results_{CUSTOM_NAME_CODE}")}, List length: {len(master_data_list)}")
while True:
  
  if count_files_in_folder(f"results_{CUSTOM_NAME_CODE}") >= len(master_data_list):
    merge_files(f'{CUSTOM_NAME_CODE}')
    break
  else:
    run_tasks(f"{CUSTOM_NAME_CODE}", int(out_val))
    
