accum_grad: 4 cmvn: global_cmvn cmvn_conf: cmvn_file: data/train_libri/global_cmvn is_json_cmvn: true ctc: ctc ctc_conf: ctc_blank_id: 0 dataset: asr dataset_conf: batch_conf: batch_size: 8 batch_type: dynamic max_frames_in_batch: 120000 pad_feat: true fbank_conf: dither: 1.0 frame_length: 25 frame_shift: 10 num_mel_bins: 80 filter_conf: max_length: 40960 min_length: 0 token_max_length: 400 token_min_length: 1 resample_conf: resample_rate: 16000 shuffle: true shuffle_conf: shuffle_size: 1000 sort: false sort_conf: sort_size: 2000 spec_aug: true spec_aug_conf: max_f: 10 max_t: 50 num_f_mask: 2 num_t_mask: 2 spec_sub: false spec_sub_conf: max_t: 30 num_t_sub: 3 speed_perturb: true decoder: bitransformer decoder_conf: attention_heads: 4 dropout_rate: 0.1 linear_units: 2048 num_blocks: 3 positional_dropout_rate: 0.1 r_num_blocks: 3 self_attention_dropout_rate: 0.1 src_attention_dropout_rate: 0.1 dtype: fp16 encoder: chunkformer encoder_conf: activation_type: swish attention_dropout_rate: 0.1 attention_heads: 4 cnn_module_kernel: 15 cnn_module_norm: layer_norm dropout_rate: 0.1 dynamic_chunk_sizes: - -1 - -1 - 64 - 128 - 256 dynamic_conv: true dynamic_left_context_sizes: - 64 - 128 - 256 dynamic_right_context_sizes: - 64 - 128 - 256 input_layer: dw_striding linear_units: 2048 normalize_before: true num_blocks: 12 output_size: 256 pos_enc_layer_type: chunk_rel_pos positional_dropout_rate: 0.1 selfattention_layer_type: chunk_rel_seflattn use_cnn_module: true grad_clip: 5 input_dim: 80 log_interval: 100 max_epoch: 200 model: asr_model model_conf: ctc_weight: 0.3 length_normalized_loss: false lsm_weight: 0.1 reverse_weight: 0.3 model_dir: exp/v11 optim: adamw optim_conf: lr: 0.001 output_dim: 4992 save_states: model_only scheduler: warmuplr scheduler_conf: warmup_steps: 25000 tokenizer: bpe tokenizer_conf: bpe_path: data/lang_char/train_libri_bpe5000.model is_multilingual: false non_lang_syms_path: null num_languages: 1 special_tokens: : 0 : 2 : 2 : 1 split_with_space: false symbol_table_path: data/lang_char/train_libri_bpe5000_units.txt train_engine: torch_ddp use_amp: true vocab_size: 4992