second_finetune_2560_3000multi / training_meta.json
AaronWu901225's picture
Upload LoRA adapter folder
81998cb verified
{
"num_samples_train": 3240,
"world_size": 1,
"effective_batch_size": 8,
"steps_per_epoch": 405,
"save_steps": 135,
"saves_per_epoch": 3,
"total_steps_est": 405,
"approx_ckpts": 3,
"target_modules": [
"q_proj",
"k_proj",
"v_proj"
],
"lora_r": 8,
"lora_alpha": 16,
"lora_dropout": 0.05,
"response_template": "<|start_header_id|>assistant<|end_header_id|>",
"use_max_len": 2560,
"label_all_assistant": true,
"skip_tool_only_assistant": false,
"assistant_tag": "<|start_header_id|>assistant<|end_header_id|>",
"tool_use_token": "<|use_tool|>",
"merged_old_lora": true,
"old_lora_path": "./xlam_lora_round1/checkpoint-3408"
}