# /// script # requires-python = "==3.12" # dependencies = [ # "PyMuPDF>=1.26.0", # "datasets>=3.5.0", # ] # /// import os import gc import re import string import json import logging import shutil from datetime import datetime from tqdm import tqdm import fitz from datasets import Dataset, load_dataset output_path = os.path.join(".","data") def remove(path): """ param could either be relative or absolute. """ if os.path.isfile(path) or os.path.islink(path): os.remove(path) # remove the file elif os.path.isdir(path): shutil.rmtree(path) # remove dir and all contains else: raise ValueError("file {} is not a file or dir.".format(path)) def reorganize_data(output_path: str, shard_size: str = "5"): """ Loads the temporary data folders in the data path and creates 5GB shards for each year, deletes temporary files """ folders = os.listdir(output_path) temp_folders = [i for i in folders if "_t" in i] if len(temp_folders) == 0: print("DATA ORGANIZED") return print("REORGANIZING DATA...") for t_fold in tqdm(temp_folders): #load all separate parquets into 1 ds data_path = os.path.join(output_path,t_fold) data_set = load_dataset(data_path, split = "train") #save it to appropriately size chunks year_str = t_fold[:-2] new_data_path = os.path.join(output_path,year_str) try: data_set.save_to_disk(new_data_path, max_shard_size="5GB") except BaseException as e: print(f"temporarty_folder: {t_fold} could not be processed") print("Lowering max shard size to 3GB") #remove folder and try again remove(new_data_path) try: data_set.save_to_disk(new_data_path, max_shard_size="3GB") except BaseException as e: print(f"temporarty_folder: {t_fold} could not be processed") print("Lowering max shard size to 1GB") #remove folder and try again remove(new_data_path) try: data_set.save_to_disk(new_data_path, max_shard_size="1GB") except BaseException as e: print(f"temporarty_folder: {t_fold} could not be processed") #print("Lowering max shard size to 1GB") #remove folder and try again remove(new_data_path) continue #delete temp_folder try : remove(data_path) except PermissionError as e: print(f"{e}") data_set.cleanup_cache_files() def main(): reorganize_data(output_path) if __name__ == "__main__": main()