StephaneBah's picture
Training in progress, step 200
4abda26 verified
{
"activation_dropout": 0.0,
"activation_function": "gelu",
"adapters": {
"adapters": {
"encoder_4_layer_lora": "b184c9285811e48c"
},
"config_map": {
"b184c9285811e48c": {
"alpha": 64,
"architecture": "lora",
"attn_matrices": [
"q",
"k",
"v",
"o"
],
"composition_mode": "add",
"dropout": 0.05,
"dtype": null,
"init_weights": "lora",
"init_weights_seed": null,
"intermediate_lora": true,
"leave_out": [],
"output_lora": true,
"r": 32,
"selfattn_lora": true,
"use_gating": false,
"vera_b": null,
"vera_d": null
}
},
"fusion_config_map": {},
"fusion_name_map": {},
"fusions": {}
},
"apply_spec_augment": false,
"architectures": [
"WhisperAdapterModel"
],
"attention_dropout": 0.0,
"begin_suppress_tokens": null,
"bos_token_id": 50257,
"classifier_proj_size": 256,
"d_model": 768,
"decoder_attention_heads": 12,
"decoder_ffn_dim": 3072,
"decoder_layerdrop": 0.0,
"decoder_layers": 12,
"decoder_start_token_id": 50258,
"dropout": 0.0,
"encoder_attention_heads": 12,
"encoder_ffn_dim": 3072,
"encoder_layerdrop": 0.0,
"encoder_layers": 12,
"eos_token_id": 50257,
"forced_decoder_ids": [
[
1,
50259
],
[
2,
50359
],
[
3,
50363
]
],
"id2label": null,
"init_std": 0.02,
"is_encoder_decoder": true,
"label2id": null,
"mask_feature_length": 10,
"mask_feature_min_masks": 0,
"mask_feature_prob": 0.0,
"mask_time_length": 10,
"mask_time_min_masks": 2,
"mask_time_prob": 0.05,
"max_length": null,
"max_source_positions": 1500,
"max_target_positions": 448,
"median_filter_width": 7,
"model_type": "whisper",
"num_hidden_layers": 12,
"num_mel_bins": 80,
"pad_token_id": 50257,
"prediction_heads": {
"default": {
"activation_function": null,
"bias": false,
"head_type": "seq2seq_lm",
"label2id": null,
"layer_norm": false,
"layers": 1,
"shift_labels": false,
"vocab_size": 51865
}
},
"scale_embedding": false,
"torch_dtype": "float16",
"transformers_version": "4.51.3",
"use_cache": true,
"use_weighted_layer_sum": false,
"vocab_size": 51865
}