{ "fine_tune_type": "lora", "lora_parameters": { "dropout": 0.05, "keys": [ "self_attn.q_proj", "self_attn.v_proj" ], "rank": 8, "scale": 4.0 }, "model": "./checkpoints//meta-llama/Meta-Llama-3.1-8B-Instruct/", "num_layers": 32 }