hydata / create_metadata_sft_shuffle_v2.py
SCMayS's picture
Upload create_metadata_sft_shuffle_v2.py with huggingface_hub
7ba2ffa verified
import os
import re
import glob
import json
import argparse
import random
import uuid
from tqdm import tqdm
from pathlib import Path
from collections import defaultdict
def parse_ground_truth(name):
"""Extract ground truth rotation axis and angle from filename or folder name"""
# Remove file extension if present
basename = name.split(".")[0] if "." in name else name
parts = basename.split("_")
if len(parts) >= 4: # figXXXX_XXX_axis_angle
rotation_axis = parts[-2] # Second to last element is axis
rotation_angle = int(parts[-1]) # Last element is angle
# Convert negative angles to 0-360 range
if rotation_angle < 0:
rotation_angle += 360
return rotation_axis, rotation_angle
print(f"Warning: Could not parse name: {basename}")
return None, None
def load_examples(example_dir, generation_mode):
"""Load example images from the example directory"""
if generation_mode == "combined":
# Load all single PNG files from the example directory
files = glob.glob(os.path.join(example_dir, "*.png"))
print(f"Found {len(files)} combined example images in {example_dir}")
return files
else: # separate mode
# Find all folders in the example directory
folders = [f for f in glob.glob(os.path.join(example_dir, "*")) if os.path.isdir(f)]
# Filter folders that contain both _ini.png and _rot.png files
valid_folders = []
for folder in folders:
folder_name = os.path.basename(folder)
ini_file = os.path.join(folder, f"{folder_name}_ini.png")
rot_file = os.path.join(folder, f"{folder_name}_rot.png")
if os.path.exists(ini_file) and os.path.exists(rot_file):
valid_folders.append(folder)
print(f"Found {len(valid_folders)} example folder pairs in {example_dir}")
return valid_folders
def organize_examples(examples, generation_mode):
"""Organize examples by rotation axis and angle"""
organized = defaultdict(list)
for example in examples:
basename = os.path.basename(example)
if generation_mode == "combined":
basename = basename.split(".")[0]
axis, angle = parse_ground_truth(basename)
if axis is None or angle is None:
continue
key = (axis, angle)
organized[key].append(example)
# Print statistics
print("\nDistribution of examples by axis-angle:")
for key, examples_list in organized.items():
print(f" {key[0]}-axis, {key[1]} degrees: {len(examples_list)} examples")
return dict(organized)
def select_examples(organized_examples, test_axis, possible_angles, max_examples=2):
"""Select examples for the test case, with appropriate randomization"""
examples = []
# Organize examples by angle for this axis
examples_by_angle = {}
for (axis, angle), example_list in organized_examples.items():
if axis == test_axis and angle in possible_angles:
if angle not in examples_by_angle:
examples_by_angle[angle] = []
examples_by_angle[angle].extend([(example, angle) for example in example_list])
# If no examples found, return empty list
if not examples_by_angle:
print(f"Warning: No examples found for rotation around {test_axis}-axis")
return []
# If max_examples is less than the number of possible angles,
# randomly select max_examples angles and pick one example from each
if max_examples < len(possible_angles):
# Get available angles that have examples
available_angles = [angle for angle in possible_angles if angle in examples_by_angle and examples_by_angle[angle]]
# Randomly select angles to use
if available_angles:
selected_angles = random.sample(available_angles, min(max_examples, len(available_angles)))
# Select one example from each selected angle
for angle in selected_angles:
selected_example = random.choice(examples_by_angle[angle])
examples.append(selected_example)
else:
# Try to select one example from each angle
for angle in possible_angles:
if angle in examples_by_angle and examples_by_angle[angle]:
selected_example = random.choice(examples_by_angle[angle])
examples.append(selected_example)
# If we have enough examples, stop
if len(examples) >= max_examples:
break
# If we don't have enough examples, fill with random examples from any angle
if len(examples) < max_examples and examples_by_angle:
all_examples = []
for angle_examples in examples_by_angle.values():
all_examples.extend(angle_examples)
while len(examples) < max_examples and all_examples:
selected_example = random.choice(all_examples)
# Avoid duplicates
all_examples.remove(selected_example)
if selected_example not in examples:
examples.append(selected_example)
return examples
def construct_prompt_with_examples(axis, possible_angles, examples=None, difficulty="easy", generation_mode="combined"):
"""Create prompt for the VLM with an in-context example"""
# Generate list of all possible rotation angles based on angle increment
'''possible_angles = []
current_angle = 0 + angle_increment
while current_angle < 360:
possible_angles.append(current_angle)
current_angle += angle_increment'''
# Common instructions for both modes
coordinate_system_templates = [
# Version 1 (Slightly more concise)
(
"The 3D Cartesian coordinate setup is:\n"
"- x-axis: Runs horizontally, positive to the right.\n"
"- y-axis: Runs vertically, positive going up.\n"
"- z-axis: Runs perpendicular to the screen, positive towards the viewer.\n"
"- The origin (0,0,0) is positioned at the geometric center of the 3D object mesh.\n\n"
"To visualize rotations, imagine looking along the positive direction of the rotation axis."
),
# Version 2 (Focus on orientation and direction)
(
"We are using a 3D Cartesian system oriented as follows:\n"
"- The positive x-direction points right.\n"
"- The positive y-direction points up.\n"
"- The positive z-direction points out of the screen towards you.\n"
"- The coordinate system's origin (0,0,0) coincides with the geometric center of the 3D object.\n\n"
"When discussing axis rotations, the viewpoint is assumed to be looking along the axis's positive direction."
),
# Version 3 (More explicit about axes)
(
"This work employs a 3D Cartesian coordinate frame where:\n"
"- The horizontal axis (X) increases positively to the right.\n"
"- The vertical axis (Y) increases positively upwards.\n"
"- The depth axis (Z) increases positively coming out towards the observer.\n"
"- The reference point (0,0,0) is located at the object's geometric center.\n\n"
"For rotations, the perspective is looking down the positive axis towards the origin."
),
# Version 4 (Technical description)
(
"The coordinate system for this task is defined as:\n"
"- X-axis: Horizontal, with positive values increasing rightward\n"
"- Y-axis: Vertical, with positive values increasing upward\n"
"- Z-axis: Depth, with positive values increasing toward the viewer\n"
"- The origin point (0,0,0) is established at the centroid of the 3D mesh geometry.\n\n"
"When analyzing rotations, consider the viewpoint from the positive end of the rotation axis."
),
# Version 5 (More casual explanation)
(
"Let's define our 3D space like this:\n"
"- The x-axis goes left to right (right is positive)\n"
"- The y-axis goes bottom to top (up is positive)\n"
"- The z-axis comes from the screen toward you (out is positive)\n"
"- The zero point (0,0,0) sits exactly at the center of mass of the 3D object.\n\n"
"When thinking about rotations, imagine you're looking along the axis from its positive end."
),
# Version 6 (Formal academic style)
(
"The spatial reference frame utilized herein consists of:\n"
"- An x-axis oriented horizontally (positive rightward)\n"
"- A y-axis oriented vertically (positive upward)\n"
"- A z-axis oriented perpendicular to the viewing plane (positive outward)\n"
"- An origin (0,0,0) that is coincident with the geometric centroid of the object mesh.\n\n"
"Rotational transformations are visualized from the perspective of an observer positioned along the positive direction of the axis in question."
)
]
coordinate_system = random.choice(coordinate_system_templates)
angle_constraints_templates = [
# Version 1
(
f"The angle of rotation must be one of these specific values: {possible_angles} degrees. "
f"Positive angles denote clockwise rotation when viewed along the axis's positive direction."
),
# Version 2
(
f"Permitted rotation angles are limited to these values: {possible_angles}. "
f"The convention used is: a positive angle signifies rotation in the clockwise direction, assuming a viewpoint looking along the positive axis."
),
# Version 3
(
f"Rotation is constrained to these angles: {possible_angles} degrees. "
f"A positive angle value corresponds to a clockwise turn relative to an observer looking along the positive direction of the rotation axis."
),
]
angle_constraints = random.choice(angle_constraints_templates)
# Add examples text if examples are provided
example_text = ""
if examples and len(examples) > 0:
example_text = "\n### EXAMPLES OF ROTATION ###\n"
for idx, (_, example_angle) in enumerate(examples):
if generation_mode == "combined":
img_num = idx + 1
example_text += f"\nExample {idx+1}: Image {img_num} shows a 3D object with its left half showing the initial view and right half showing a {example_angle} degree rotation around the {axis}-axis.\n"
else: # separate mode
img_start = idx * 2 + 1
img_end = idx * 2 + 2
example_text += f"\nExample {idx+1}: Image {img_start} shows the initial view and Image {img_end} shows the object after a {example_angle} degree rotation around the {axis}-axis.\n"
# Different instructions based on difficulty
if difficulty == "easy":
# For easy mode - axis is provided, internal reasoning but only output number
thinking_instructions_templates = [
# Version 1: More concise, action-oriented
(
f"CRITICAL STEPS for finding the rotation angle:"
f"\n\n1. Analyze Object Structure: Examine both views thoroughly to understand the object's form."
f"\n\n2. Evaluate ALL {axis}-Axis Options: You must consider every angle in this list: {possible_angles}."
f"\n - Visualize Rotation: For each angle, mentally rotate the object around the {axis}-axis."
f"\n - Compare to Target View: Check how each visualized rotation matches the second view."
f"\n - Complete Evaluation First: Do not choose an angle until all in {possible_angles} are tested."
f"\n\n3. Select Best Match: After reviewing all possibilities, pick the angle that correctly transforms the first view to the second."
f"\n\n4. Final Verification: Mentally apply your chosen rotation one last time to confirm it perfectly matches the second view."
),
# Version 2: Slightly more explanatory, guiding tone
(
f"Follow this methodical process to identify the correct rotation:"
f"\n\n1. Understand the Object: Start by carefully studying the object's features in both views to grasp its 3D shape."
f"\n\n2. Systematic Angle Check ({axis}-axis): It's essential to evaluate the full set of potential rotation angles: {possible_angles}."
f"\n - For every angle listed: Imagine rotating the object around the {axis}-axis by that specific amount."
f"\n - Match Visualization to Reality: Compare your mental image after rotation with the provided second view."
f"\n - Avoid Premature Decisions: Ensure you have mentally tested *all* angles before making a selection."
f"\n\n3. Determine the Correct Angle: Once all angles ({possible_angles}) have been considered, choose the one rotation that best explains the change between views."
f"\n\n4. Confirm Your Answer: As a final check, mentally perform the chosen rotation again to ensure it accurately produces the second view."
),
# Version 3: Focus on comparison and elimination
(
f"Use this procedure to pinpoint the rotation angle accurately:"
f"\n\n1. Initial Analysis: Compare the first and second views to understand the object's spatial configuration."
f"\n\n2. Exhaustive {axis}-Axis Evaluation: You are required to assess each of these candidate rotation angles: {possible_angles}."
f"\n - Test Each Angle: Mentally simulate rotating the object around the {axis}-axis by each angle in the list."
f"\n - Cross-Reference Views: Evaluate how closely each simulated rotation aligns with the actual second view."
f"\n - Full Assessment Required: Withhold judgment until every single angle from {possible_angles} has been assessed."
f"\n\n3. Identify the Matching Rotation: After assessing all options, select the angle that precisely transforms the first view into the second."
f"\n\n4. Validate Your Choice: Double-check by mentally applying the selected rotation to confirm it yields the exact second view."
)
]
thinking_instructions = random.choice(thinking_instructions_templates)
# Updated response format to match rot_pred_sft.py
response_format = (
f"IMPORTANT: You must ONLY output the rotation angle as a number from this list: {possible_angles}. "
f"Your output should contain ONLY the number. "
f"Do NOT include any reasoning, explanation, or additional text - ONLY the number."
f"\n\nExample of correct output format: 30"
f"\n\nIncorrect output formats:"
f"\n\"I think it's 30 degrees\""
f"\n\"The rotation angle is 30\""
f"\n\"30 degrees\""
)
task_description = (
f"Your task is to determine the angle of rotation around the {axis}-axis in degrees."
)
else: # hard mode - axis is not provided
thinking_instructions = (
f"IMPORTANT: Please follow this systematic approach to determine the rotation:"
f"\n\n1. First, analyze the object's features in both views to understand its structure."
f"\n\n2. Consider what would happen if rotation occurred around each of the three axes (x, y, and z):"
f"\n - For x-axis rotation: What specific features would change and how?"
f"\n - For y-axis rotation: What specific features would change and how?"
f"\n - For z-axis rotation: What specific features would change and how?"
f"\n - Based on the observed changes, explain which axis makes the most sense and why."
f"\n\n3. Once you've determined the most likely axis, evaluate ALL of these possible rotation angles: {possible_angles}"
f"\n - For each angle in the list, describe what the object would look like after rotating around your chosen axis by that amount"
f"\n - Compare these descriptions with the actual second view"
f"\n - DO NOT make a decision until you have evaluated all angles in the list"
f"\n\n4. After evaluating all angles, choose the one that best matches the observed changes"
)
response_format = (
f"Place your detailed reasoning process in <thinking></thinking> tags. Your reasoning should include:"
f"\n- Analysis of how rotation around each axis would affect the object"
f"\n- Systematic evaluation of possible rotation angles from the provided list"
f"\n- Specific visual features you used to determine your answer"
f"\n\nThen provide your final answer in <rotation_axis></rotation_axis> and <rotation_angle></rotation_angle> tags respectively (use only x, y, or z for axis and only a number from the list for angle)."
f"\ni.e., <thinking> your reasoning process here </thinking><rotation_axis> your predicted axis here </rotation_axis><rotation_angle> your predicted degrees here </rotation_angle>"
)
# task_description = (
# f"Your task is to determine which axis the object was rotated around and by what angle."
# )
task_description_templates = [
"Identify the object's axis of rotation and the corresponding angle.",
"You need to figure out around which axis the object turned, and by how much.",
"Ascertain the rotational axis and the magnitude of the angle applied to the object.",
"Find out both the specific axis used for the object's rotation and the degree of that rotation.",
"The objective is to specify the rotation parameters for the object: its axis and angle."
]
task_description = random.choice(task_description_templates)
# Generate the prompt based on generation mode
if generation_mode == "combined":
test_img_num = len(examples) + 1 if examples else 1
prompt = (
f"IMPORTANT: I'm showing you {len(examples) + 1 if examples else 1} image{'s' if examples else ''} of 3D objects. "
f"{'Each' if examples else 'The'} image contains TWO separate 3D renderings side-by-side. " # Changed example to examples
f"\n\nThe LEFT HALF shows a 3D object in its initial orientation. "
f"The RIGHT HALF shows the SAME 3D object after being rotated."
f"\n\n{task_description}"
f"\n\n{coordinate_system}"
f"\n\n{angle_constraints}"
f"\n\n{example_text}"
f"\n\n### YOUR TASK ###"
f"\nNow, for Image {test_img_num}, determine the angle of rotation around the {axis}-axis."
f"\n{'' if not examples else 'Based on the example provided, '}analyze Image {test_img_num} carefully." # Changed example to examples
f"\n\n{thinking_instructions}"
f"\n\n{response_format}"
)
elif generation_mode == "separate_shuffle":
test_img_start = len(examples) * 2 + 1 if examples else 1
test_img_end = len(examples) * 2 + 2 if examples else 2
begin_description = (
f"I'm showing you {len(examples) * 2 + 2 if examples else 2} images of 3D objects. "
f"{'For each example or test case, ' if examples else ''}two images represent the same object before and after rotation." # Changed example to examples
)
end_description = (
f"\n\n### YOUR TASK ###"
f"\nNow, determine the angle of rotation around the {axis}-axis from Image {test_img_start} to Image {test_img_end}."
f"\n{'' if not examples else 'Based on the example provided, '}analyze the rotation carefully." # Changed example to examples
f"\n\n{thinking_instructions}"
f"\n\n{response_format}"
)
prompt_list = [task_description, coordinate_system, angle_constraints, example_text]
random.shuffle(prompt_list)
prompt = begin_description + "\n\n".join(prompt_list) + end_description
elif generation_mode == "separate":
# Calculate image numbers based on examples
test_img_start = len(examples) * 2 + 1 if examples else 1
test_img_end = len(examples) * 2 + 2 if examples else 2
prompt = (
f"I'm showing you {len(examples) * 2 + 2 if examples else 2} images of 3D objects. "
f"{'For each example or test case, ' if examples else ''}two images represent the same object before and after rotation." # Changed example to examples
f"\n\n{task_description}"
f"\n\n{coordinate_system}"
f"\n\n{angle_constraints}"
f"\n\n{example_text}"
f"\n\n### YOUR TASK ###"
f"\nNow, determine the angle of rotation around the {axis}-axis from Image {test_img_start} to Image {test_img_end}."
f"\n{'' if not examples else 'Based on the example provided, '}analyze the rotation carefully." # Changed example to examples
f"\n\n{thinking_instructions}"
f"\n\n{response_format}"
)
else:
raise ValueError(f"Invalid generation mode: {generation_mode}")
return prompt
def create_metadata_jsonl_separate(input_dir, output_file, example_dir=None, possible_angles=[45, 315], difficulty="easy", max_examples=2):
"""Create metadata JSONL file for all images in input_dir (combined mode)"""
# Get all PNG files in the input directory
png_files = glob.glob(os.path.join(input_dir, "*.png"))
# Sort files to ensure consistent order
png_files = sorted(png_files)
if not png_files:
print(f"No PNG files found in {input_dir}")
return
print(f"Found {len(png_files)} PNG files in {input_dir}")
# Load and organize examples if example_dir is provided
organized_examples = None
if example_dir:
examples = load_examples(example_dir, "combined")
organized_examples = organize_examples(examples, "combined")
# Create output directory if it doesn't exist
output_dir = os.path.dirname(output_file)
os.makedirs(output_dir, exist_ok=True)
# Process each file and create metadata entries
entries = []
for png_file in tqdm(png_files, desc="Creating metadata for combined mode"):
# Parse ground truth from filename
axis, angle = parse_ground_truth(os.path.basename(png_file))
if axis is None or angle is None:
print(f"Skipping {png_file} - could not parse ground truth")
continue
# Get the relative path to the image
rel_path = os.path.relpath(png_file, os.path.dirname(output_file))
# Generate a unique ID based on the filename
image_base_id = os.path.splitext(os.path.basename(png_file))[0]
# Select an example if examples are available
examples = None
if organized_examples:
examples = select_examples(organized_examples, axis, possible_angles, max_examples)
# Construct prompt with examples
prompt = construct_prompt_with_examples(axis, possible_angles, examples, difficulty, generation_mode="combined")
# Create assistant response based on difficulty
if difficulty == "easy":
# For easy mode, just output the number
assistant_content = f"{angle}"
else:
# For hard mode, include both axis and angle in XML tags
assistant_content = f"<thinking>Detailed reasoning about rotation axis and angle...</thinking><rotation_axis>{axis}</rotation_axis><rotation_angle>{angle}</rotation_angle>"
# Create the conversations array
conversations = []
# Fix the human message construction to handle multiple examples
human_value = ""
# Add example images if available
if examples:
for example_path, _ in examples:
example_rel_path = os.path.relpath(example_path, os.path.dirname(output_file))
human_value += f"\n"
# Add test image
human_value += f"\n{prompt}"
conversations.append({
"from": "human",
"value": human_value
})
# Add assistant response
conversations.append({
"from": "gpt",
"value": assistant_content
})
# Create entry with the correct format
entry = {
"id": image_base_id,
"image": rel_path,
"conversations": conversations
}
entries.append(entry)
# Write entries to JSONL file
with open(output_file, 'w') as f:
for entry in entries:
f.write(json.dumps(entry) + '\n')
print(f"\nSummary for combined mode:")
print(f" Found {len(png_files)} PNG files")
print(f" Created metadata for {len(entries)} entries")
print(f" Output file: {output_file}")
def create_metadata_jsonl_separate(input_dir, output_file, example_dir=None, possible_angles=[45, 315], difficulty="easy", max_examples=2, generation_mode="separate"):
"""Create metadata JSONL file for folders in input_dir (separate mode)"""
# Get all directories in the input directory
folders = [f for f in glob.glob(os.path.join(input_dir, "*"))
if os.path.isdir(f) and os.path.basename(f) != "examples"]
# Sort folders to ensure consistent order
folders = sorted(folders)
if not folders:
print(f"No folders found in {input_dir}")
return
print(f"Found {len(folders)} folders in {input_dir}")
# Load and organize examples if example_dir is provided
organized_examples = None
if example_dir:
examples = load_examples(example_dir, "separate")
organized_examples = organize_examples(examples, "separate")
# Create output directory if it doesn't exist
output_dir = os.path.dirname(output_file)
os.makedirs(output_dir, exist_ok=True)
# Process each folder and create metadata entries
entries = []
valid_folders = 0
for folder in tqdm(folders, desc="Creating metadata for separate mode"):
folder_name = os.path.basename(folder)
# Parse ground truth from folder name
axis, angle = parse_ground_truth(folder_name)
if axis is None or angle is None:
print(f"Skipping {folder} - could not parse ground truth")
continue
# Check for the two required images in the folder
ini_path = os.path.join(folder, f"{folder_name}_ini.png")
rot_path = os.path.join(folder, f"{folder_name}_rot.png")
if not os.path.exists(ini_path):
print(f"Skipping {folder} - missing initial view image")
continue
if not os.path.exists(rot_path):
print(f"Skipping {folder} - missing rotated view image")
continue
# Get the relative paths to the images
rel_ini_path = os.path.relpath(ini_path, os.path.dirname(output_file))
rel_rot_path = os.path.relpath(rot_path, os.path.dirname(output_file))
# Select an example if examples are available
examples = None
if organized_examples:
examples = select_examples(organized_examples, axis, possible_angles, max_examples)
# Update this to construct_prompt_with_examples
prompt = construct_prompt_with_examples(axis, possible_angles, examples, difficulty, generation_mode=generation_mode)
# Create assistant response based on difficulty
if difficulty == "easy":
# For easy mode, just output the number
assistant_content = f"{angle}"
else:
# For hard mode, include both axis and angle in XML tags
assistant_content = f"<thinking>Detailed reasoning about rotation axis and angle...</thinking><rotation_axis>{axis}</rotation_axis><rotation_angle>{angle}</rotation_angle>"
# Create the conversations array
conversations = []
# Prepare images array for the entry
all_image_paths = []
# Add example images if available
if examples:
for example_folder, _ in examples:
example_folder_name = os.path.basename(example_folder)
example_ini_path = os.path.join(example_folder, f"{example_folder_name}_ini.png")
example_rot_path = os.path.join(example_folder, f"{example_folder_name}_rot.png")
example_rel_ini_path = os.path.relpath(example_ini_path, os.path.dirname(output_file))
example_rel_rot_path = os.path.relpath(example_rot_path, os.path.dirname(output_file))
all_image_paths.append(example_rel_ini_path)
all_image_paths.append(example_rel_rot_path)
# Add test images
all_image_paths.append(rel_ini_path)
all_image_paths.append(rel_rot_path)
# Update the human message tags to match number of images
# For 2 examples (4 images) + 2 test images = 6 total images
image_tags = "<image>\n" * len(all_image_paths)
human_value = image_tags + prompt
conversations.append({
"from": "human",
"value": human_value
})
# Add assistant response
conversations.append({
"from": "gpt",
"value": assistant_content
})
# Create entry with the correct format
entry = {
"id": folder_name,
"image": all_image_paths,
"conversations": conversations
}
entries.append(entry)
valid_folders += 1
# Write entries to JSONL file
with open(output_file, 'w') as f:
for entry in entries:
f.write(json.dumps(entry) + '\n')
print(f"\nSummary for separate mode:")
print(f" Found {len(folders)} folders")
print(f" Created metadata for {valid_folders} valid folders")
print(f" Output file: {output_file}")
def main():
parser = argparse.ArgumentParser(description="Create metadata JSONL for rotation dataset")
parser.add_argument('--input-dir', type=str, required=True,
help="Directory containing rotation dataset images or folders")
parser.add_argument('--output-file', type=str, default="metadata.jsonl",
help="Output JSONL file path")
parser.add_argument('--example-dir', type=str, default=None,
help="Directory containing example images for in-context learning")
parser.add_argument('--possible-angles', type=int, nargs='+', default=[45, 315],
help="List of possible rotation angles in degrees (e.g., 45 315)")
parser.add_argument('--difficulty', type=str, choices=["easy", "hard"], default="easy",
help="Difficulty mode: easy (axis provided) or hard (axis not provided)")
parser.add_argument('--generation-mode', type=str, choices=["combined", "separate", "separate_shuffle"], default="combined",
help="Mode for dataset generation (combined = one image with both views, separate = folder with two images, separate_shuffle = separate with shuffled prompt sections)")
parser.add_argument('--random-seed', type=int, default=None,
help="Random seed for example selection (None for true randomness)")
parser.add_argument('--max-examples', type=int, default=1,
help="Maximum number of examples to include for each test case (default: 1)")
args = parser.parse_args()
# Set random seed for reproducibility if provided
if args.random_seed is not None:
print(f"Using fixed random seed: {args.random_seed}")
random.seed(args.random_seed)
else:
print("Using true randomness (different examples each run)")
print(f"Creating metadata JSONL for rotation dataset:")
print(f"Input directory: {args.input_dir}")
print(f"Output file: {args.output_file}")
if args.example_dir:
print(f"Example directory: {args.example_dir}")
print(f"Possible angles: {args.possible_angles}")
print(f"Difficulty mode: {args.difficulty}")
print(f"Generation mode: {args.generation_mode}")
# Check if example_dir is None but there's an 'examples' subdirectory in input_dir
if args.example_dir is None and os.path.exists(os.path.join(args.input_dir, "examples")):
args.example_dir = os.path.join(args.input_dir, "examples")
print(f"Using examples directory: {args.example_dir}")
if args.generation_mode == "combined":
create_metadata_jsonl_combined(
input_dir=args.input_dir,
output_file=args.output_file,
example_dir=args.example_dir,
possible_angles=args.possible_angles,
difficulty=args.difficulty,
max_examples=args.max_examples # Make sure this is passed properly
)
else: # separate or separate_shuffle mode
create_metadata_jsonl_separate(
input_dir=args.input_dir,
output_file=args.output_file,
example_dir=args.example_dir,
possible_angles=args.possible_angles,
difficulty=args.difficulty,
max_examples=args.max_examples,
generation_mode=args.generation_mode # Pass the actual mode
)
if __name__ == "__main__":
main()