accum_grad: 4 cmvn: global_cmvn cmvn_conf: cmvn_file: data/train/global_cmvn is_json_cmvn: true ctc: ctc ctc_conf: ctc_blank_id: 0 dataset: asr dataset_conf: batch_conf: batch_size: 20 batch_type: static ctc_label: true cycle: 100 fbank_conf: dither: 0.1 frame_length: 25 frame_shift: 10 num_mel_bins: 80 filter_conf: max_length: 3000 min_length: 0 token_max_length: 200 token_min_length: 1 no_time_idx: 3 prompt_conf: context_conf: batch_num_context: 60 len_max: 5 len_min: 2 utt_num_context: 3 prompt_conf: max_number_out_prompts: 4 noprompt_prob: 0.4 prompt_tokens: end: start: remove_punctuation: true remove_timestamp: true resample_conf: resample_rate: 16000 shuffle: true shuffle_conf: shuffle_size: 2048 sort: true sort_conf: sort_size: 2048 spec_aug: true spec_aug_conf: max_f: 10 max_t: 50 num_f_mask: 2 num_t_mask: 2 speed_perturb: true time_apply_prob: 0.0 decoder: transformer decoder_conf: attention_heads: 12 dropout_rate: 0.1 linear_units: 3072 num_blocks: 12 positional_dropout_rate: 0.1 self_attention_dropout_rate: 0.1 src_attention_dropout_rate: 0.1 use_sdpa: true dtype: fp32 encoder: e_branchformer encoder_conf: activation_type: swish attention_dropout_rate: 0.1 attention_heads: 12 causal: false cgmlp_conv_kernel: 31 cgmlp_linear_units: 3072 dropout_rate: 0.1 gate_activation: identity input_layer: conv2d linear_units: 3072 merge_conv_kernel: 31 num_blocks: 12 output_size: 768 pos_enc_layer_type: rel_pos positional_dropout_rate: 0.1 selfattention_layer_type: rel_selfattn use_linear_after_conv: false use_sdpa: true grad_clip: 5 input_dim: 80 log_interval: 10 max_epoch: 100 model: asr_model model_conf: ctc_weight: 0.2 length_normalized_loss: false lsm_weight: 0.1 model_dir: exp/dolphin_ebf_small_nostreaming_prompt optim: adam optim_conf: lr: 0.0005 output_dim: 18173 save_interval: 200 save_states: model_only scheduler: warmuplr scheduler_conf: warmup_steps: 2048 stats_dialect: true tokenizer: char tokenizer_conf: special_tokens: : 4 : 0 : 3 : 2 : 1 split_with_space: false symbol_table_path: data/dict/units.txt train_engine: torch_ddp use_amp: false vocab_size: 18173