gemma-text-to-js-peft-adapter / adapter_config.json
sam-larx's picture
Training in progress, epoch 1
334d4d6 verified
raw
history blame
1.51 kB
{
"alpha_pattern": {},
"auto_mapping": null,
"base_model_name_or_path": "google/gemma-3n-E2B-it",
"bias": "none",
"corda_config": null,
"eva_config": null,
"exclude_modules": null,
"fan_in_fan_out": false,
"inference_mode": true,
"init_lora_weights": true,
"layer_replication": null,
"layers_pattern": null,
"layers_to_transform": null,
"loftq_config": {},
"lora_alpha": 16,
"lora_bias": false,
"lora_dropout": 0.05,
"megatron_config": null,
"megatron_core": "megatron.core",
"modules_to_save": [
"lm_head",
"embed_tokens"
],
"peft_type": "LORA",
"qalora_group_size": 16,
"r": 16,
"rank_pattern": {},
"revision": null,
"target_modules": [
"pos_proj",
"v_proj",
"ffw_layer_2",
"per_layer_model_projection",
"ffw_layer_1",
"gate_proj",
"correction_coefs",
"linear_left",
"modality_router",
"per_layer_input_gate",
"linear_end",
"altup_projections.1",
"altup_unembed_projections.1",
"embedding_projection",
"input_proj_linear",
"q_proj",
"k_proj",
"o_proj",
"prediction_coefs",
"altup_unembed_projections.0",
"post",
"altup_projections.0",
"linear_right",
"per_layer_projection",
"up_proj",
"linear_start",
"altup_projections.2",
"down_proj",
"altup_unembed_projections.2"
],
"target_parameters": null,
"task_type": "CAUSAL_LM",
"trainable_token_indices": null,
"use_dora": false,
"use_qalora": false,
"use_rslora": false
}