import os
import json
import time
import subprocess
import shutil
from datetime import datetime
import concurrent.futures
# import pymysql
import argparse
# Set the timezone to "America/Monterrey"
os.environ['TZ'] = 'America/Monterrey'
os.system("")  # enables ansi escape characters in terminal

parser = argparse.ArgumentParser()
parser.add_argument('-r', '--allv', help='The output to make custom')
parser.add_argument('-m', '--remv', help='The output to make custom')
args = parser.parse_args()
allv_val = args.allv or 0
remv_by = args.remv or 0
STOP_WHILE = False
ADD_DATA1 = 0
error_retries_times = 0
# Define color codes
RESET = "\033[0m"
BOLD = "\033[1m"
GREEN = "\033[92m"
RED = "\033[91m"
CYAN = "\033[96m"

CUSTOM_NAME_CODE = "zero_every_section"

def add_data(code, descripcion, inv, plinea, cap, pend, fecha, marc, tp):

  try:
      # Establish a connection
      connection = pymysql.connect(
          host='localhost',
          user='root',
          password='',
          database='f9'
      )

      print("Connected to MySQL database")

      # Create a cursor object
      cursor = connection.cursor()
      cursor.execute(
          """
          INSERT INTO empties (codigoBarras, descripcion, inventario, promedioLinea, capacidadEmpaque, pendienteEntregaCabecera, pedidoPendiente, marca)
          VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
          """,
          (code, descripcion, inv, plinea, cap, pend, fecha, marc)
      )
      connection.commit()  
      print("Done:", cursor)

  except pymysql.MySQLError as e:
      print("Error while connecting to MySQL", e)
  finally:
      # Close the connection
      cursor.close()
      connection.close()
      print("MySQL connection is closed")

def remove_dir_and_contents(dir_path):
    if os.path.isdir(dir_path):
        shutil.rmtree(dir_path)
        print(f"Directory {dir_path} and all its contents removed successfully.")
    else:
        print(f"Directory {dir_path} does not exist or is not a directory.")
    
def copy_directory(src, dst):
    # Check if the source directory exists
    if os.path.exists(src):
        # If the destination directory exists, delete it first
        if os.path.exists(dst):
            shutil.rmtree(dst)  # Remove the destination directory and its contents
        shutil.copytree(src, dst)  # Copy the entire directory tree from src to dst
        print(f"Copied {src} to {dst}")
    else:
        print(f"Source directory {src} does not exist")

if allv_val:
  remove_dir_and_contents(f"results_zero_every_section")
  #remove_dir_and_contents(f"zero_every_section")
os.makedirs(f"results_zero_every_section", exist_ok=True)
os.makedirs(f"zero_every_section", exist_ok=True)
def start_files(file_type='', index_step=1, start_index=0):
  # Load the JSON list from the file

  print(f"{GREEN}Starting  {file_type} at {index_step} in {start_index} {RESET}")
  with open(f"array/master_{file_type}.json", "r") as f:
    data_list = json.load(f)

  # Function to get results for a given index
  def get_results(i=0):
    if globals()["error_retries_times"] > 5:
      tosleep = (i/60)+(int(globals()["error_retries_times"])*2)
      print(f"Sleeping due to many retries {i}, sleeping {tosleep} seconds")
      time.sleep(tosleep)
      globals()["error_retries_times"] = int(globals()["error_retries_times"]) - 1
    # Check if the file exists and contains the 'inventario' field
    file_path = f"results_{file_type}/file_{i}.json"
    if os.path.exists(file_path):
      with open(file_path, "r") as f:
        try:
          file_data = json.load(f)
          if "inventario" in file_data:
            print(f"{GREEN}File {file_path} already contains 'inventario'. Skipping...{RESET}")
          return
        except json.JSONDecodeError:
          print(f"Failed to parse JSON for index {i}")

    # Get the "codigo" from the list
    codigo = data_list[i][1]

    # Prepare the curl equivalent request using the subprocess
    curl_command = [
        'curl', '-k', '-s',
        '-A', 'Mozilla/5.0 (Linux; Android 11; SAMSUNG SM-G973U) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/14.2 Chrome/87.0.4280.141 Mobile Safari/537.36',
        '-b', 'cookien.txt',
        f'https://www.intracomer.com.mx/simaEspecialWeb/AuditarServlet?_param=22&tipoReporte=2&codigo={codigo}'
    ]
    result = subprocess.run(curl_command, capture_output=True, text=True, encoding="utf-8")

    # Check if the result is valid JSON
    try:
        data = json.loads(result.stdout)
        data["seccion_numero"] = data_list[i][0]
        data["categoria_real"] = data_list[i][2]
        data["proveedor_num"] = data_list[i][3]
        # Write the data to the file
        with open(file_path, "w") as f:
            print(f"{GREEN}Done index {i},  Barcode: {data_list[i][1]}{RESET}")
            json.dump(data, f)
    except json.JSONDecodeError:
        globals()["error_retries_times"] =  globals()["error_retries_times"]+1
        if int(globals()["error_retries_times"]) == 2:
          subprocess.run(["bash", "/var/www/html/f9/cookie.sh"])
        print(f"Failed to parse JSON for index {i}")

  # Loop through the list, fetching results for every index with the given step
  for i in range(start_index, len(data_list), index_step):
    get_results(i)
  return True

def merge_files(file_type = ''):
  remove_dir_and_contents(f"zero_every_section")
  os.makedirs(f"zero_every_section", exist_ok=True)
  stop = False
  data = []
  rol_zero = []
  rol_negative = []
  no_rol_negative = []
  real_list_rol = []
  list_zauto = []
  real_list = []

  # Directory containing the JSON files
  directory = f"results_{file_type}"

  # Process JSON files
  for i in range(99999):
    file_path = os.path.join(directory, f"file_{i}.json")
    if os.path.exists(file_path):
      with open(file_path, "r") as f:
        arr = json.load(f)
        data.append(arr)
    else:
      continue

  # Process the collected data

  for curr in data:
    if curr is not None and "resurtido" in curr and "inventario" in curr:
      rol = curr["resurtido"] != "SIN RESURTIDO"
      norol = curr["resurtido"] == "SIN RESURTIDO"
      rol_zero_auto = curr["resurtido"] == "Automatico" and curr["inventario"] == 0
    
    if int(curr["inventario"]) == 0 and ( curr["resurtido"] == "Automatico" ):
        rol_zero.append(curr)
    if int(curr["inventario"]) < 0 and rol:
        rol_negative.append(curr)
    if int(curr["inventario"]) < 0 and norol:
        no_rol_negative.append(curr)
    if rol:
      real_list_rol.append(curr)
    if rol_zero_auto:
      list_zauto.append(curr)

    real_list.append(curr)
    code = curr["codigoBarras"]
    descripcion= curr["descripcion"]
    inv = curr["inventario"]
    cap = curr["capacidadEmpaque"]
    pend = curr["pendienteDeEntregaCabecera"]
    fecha = ""
    marc = curr["marca"]
    plinea = curr["promedioLinea"]
    if pend > 0 and len(curr["pendienteDeEntrega"]) > 0:
      fecha = curr["pendienteDeEntrega"][0]["fhFinVigencia"]
    # time.sleep(0.1);
    #if globals()["ADD_DATA1"] == 0:
      # time.sleep(0.1)
      #add_data(code, descripcion, inv, plinea, cap, pend, fecha, marc, 'everything')
      
  globals()["ADD_DATA1"] = 1

  # Write the results to a JSON file
  with open(f"{file_type}/rol_empty.json", "w") as f:
      json.dump(rol_zero, f, indent=2)
  with open(f"{file_type}/result.json", "w") as f:
      json.dump(real_list, f, indent=2)

  end_time = datetime.now()
  print(f"{CYAN}MERGED {file_type} at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")
  #remove_dir_and_contents(f"results_{file_type}")
  os.makedirs(f"results_{file_type}", exist_ok=True)
  return True


# Main loop
def run_tasks():
    
  start_time = datetime.now()
  print(f"{CYAN}Started processing at {RESET}{RED}{start_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")
    


  with concurrent.futures.ThreadPoolExecutor(max_workers=18) as executor:
    futures = [
      executor.submit(start_files, 'zero_every_section', 15, 0),
      executor.submit(start_files, 'zero_every_section', 15, 1),
      executor.submit(start_files, 'zero_every_section', 15, 2),
      executor.submit(start_files, 'zero_every_section', 15, 3),
      executor.submit(start_files, 'zero_every_section', 15, 4),
      executor.submit(start_files, 'zero_every_section', 15, 5),
      executor.submit(start_files, 'zero_every_section', 15, 6),
      executor.submit(start_files, 'zero_every_section', 15, 7),
      executor.submit(start_files, 'zero_every_section', 15, 8),
      executor.submit(start_files, 'zero_every_section', 15, 9),
      executor.submit(start_files, 'zero_every_section', 15, 10),
      executor.submit(start_files, 'zero_every_section', 15, 11),
      executor.submit(start_files, 'zero_every_section', 15, 12),
      executor.submit(start_files, 'zero_every_section', 15, 13),
      executor.submit(start_files, 'zero_every_section', 15, 14),
    ]
    done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.ALL_COMPLETED)
    
    # Check when all futures are done
  if not not_done:  # If 'not_done' is empty, all tasks are done
      merge_files('zero_every_section')
      
      # remove_dir_and_contents("zero_every_section_old")
      # os.makedirs(f"zero_every_section_old", exist_ok=True)
      # Copy "perfu/" to "perfu_old/"
      # copy_directory('zero_every_section/', 'zero_every_section_old/')

      print("All futures are completed.")
  # Optionally process results if needed
  for future in done:
      try:
          result = future.result()  # Get the result of each future
          
      except Exception as exc:
          print(f"Task generated an exception: {exc}")
    # Wait for all tasks to complete
    
    # for future in concurrent.futures.as_completed(futures):
      # future.result()
  end_time = datetime.now()
  print(f"{CYAN}Script finished at {RESET}{RED}{end_time.strftime('%Y-%m-%d %H:%M:%S')}{RESET}")

STOP_WHILE = False

def count_files_in_folder(folder_path):
    try:
        # List all items in the folder
        all_items = os.listdir(folder_path)
        
        # Count only the files
        file_count = sum(1 for item in all_items if os.path.isfile(os.path.join(folder_path, item)))
        
        return file_count
    except FileNotFoundError:
        return "The folder does not exist."
    except Exception as e:
        return f"An error occurred: {e}"


master_data_list = []
with open(f"array/master_{CUSTOM_NAME_CODE}.json", "r") as f:
  master_data_list = json.load(f)

def make_master(file_type = 'zero_every_section'):
  # Directory containing the JSON files
  directory = f"results_{file_type}"
  # Process JSON files
  data = []
  # for i in range(99999):
    # file_path = os.path.join(directory, f"file_{i}.json")
    # if os.path.exists(file_path):
      # with open(file_path, "r") as f:
        # arr = json.load(f)
        # data.append(arr)
    # else:
      # continue
  with open(f"{CUSTOM_NAME_CODE}/result.json", "r") as f:
    data = json.load(f)

  fields =  ["seccion_numero", "codigoBarras", "categoria_real", "proveedor_num", "pasillo", "pedir", "proveedor_num"]
  merged_data = {}
  unique_key = "codigoBarras"  
  for item in data:
      key = item.get(unique_key)
      if key:  # Ensure the key exists
          merged_data[key] = item
  has_json = []
  
  for item in merged_data.values():
       if int(item.get("inventario")) != 0 and (item.get("resurtido") == "Automatico" or item.get("resurtido") == "SIN RESURTIDO" ):
         master_json_entry = [item.get(field, None) for field in fields]
         has_json.append(master_json_entry)
  with open(f"array/master_new_items.json", "w") as f:
      json.dump(has_json, f, indent=2)
      print(f"Modified Master new_items with new length: {len(has_json)}, old length: {len(has_json)}.")
  master_json = []
  for item in merged_data.values():
       if int(item.get("inventario")) == 0 and (item.get("resurtido") == "Automatico" or item.get("resurtido") == "SIN RESURTIDO" ):
         master_json_entry = [item.get(field, None) for field in fields]
         master_json.append(master_json_entry)
  with open(f"array/master_zero_every_section.json", "w") as f:
      json.dump(master_json, f, indent=2)
      print(f"Modified Master zero_every_section with new length: {len(master_json)}, old length: {len(master_data_list)}.")

# master_everything = []
# with open(f"array/master_everything.json", "r") as f:
  # master_everything = json.load(f)
if remv_by:
  merge_files(CUSTOM_NAME_CODE)
else:
  while True:
    if count_files_in_folder(f"results_{CUSTOM_NAME_CODE}") >= len(master_data_list):
      merge_files(CUSTOM_NAME_CODE)
      make_master(CUSTOM_NAME_CODE)
      break
    else:
      # break
      run_tasks()
    #else:
     # print(f"File at: {count_files_in_folder("results_everything")} of {len(master_everything)}")
     # print("Retrying in 5")
     # time.sleep(5)
