{ | |
"_name_": "AMPLIFY", | |
"architectures": [ | |
"AMPLIFYForMaskedLM" | |
], | |
"att_bias": false, | |
"auto_map": { | |
"AutoConfig": "amplify_te.AMPLIFYConfig", | |
"AutoModel": "amplify_te.AMPLIFY", | |
"AutoModelForMaskedLM": "amplify_te.AMPLIFYForMaskedLM" | |
}, | |
"bos_token_id": 3, | |
"decoder_init_range": 0.02, | |
"dropout_prob": 0, | |
"embedding_init_range": 0.02, | |
"eos_token_id": 4, | |
"ffn_bias": false, | |
"hidden_act": "SwiGLU", | |
"hidden_size": 960, | |
"intermediate_size": 3840, | |
"layer_norm_after_embedding": false, | |
"layer_norm_before_last_layer": true, | |
"mask_token_id": 2, | |
"max_length": 2048, | |
"model_type": "AMPLIFY", | |
"norm_eps": 1e-05, | |
"num_attention_heads": 15, | |
"num_hidden_layers": 32, | |
"other_special_token_ids": null, | |
"pad_token_id": 0, | |
"padded_vocab_size": 32, | |
"pre_activation_layer_norm": true, | |
"rms_norm": true, | |
"torch_dtype": "float32", | |
"transformers_version": "4.53.2", | |
"unk_token_id": 1, | |
"vocab_path": "conf/tokenizer/amplify_vocab.txt", | |
"vocab_size": 27 | |
} |