| { | |
| "architectures": [ | |
| "InternVLChatModel" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_internvl_chat.InternVLChatConfig", | |
| "AutoModel": "modeling_internvl_chat.InternVLChatModel", | |
| "AutoModelForCausalLM": "modeling_internvl_chat.InternVLChatModel" | |
| }, | |
| "downsample_ratio": 0.5, | |
| "dynamic_image_size": true, | |
| "force_image_size": 448, | |
| "hidden_size": 5120, | |
| "image_fold": null, | |
| "llm_config": { | |
| "_attn_implementation_autoset": true, | |
| "_name_or_path": "./pretrained/Qwen2.5-32B-Instruct", | |
| "architectures": [ | |
| "Qwen2ForCausalLM" | |
| ], | |
| "attention_dropout": 0.0, | |
| "bos_token_id": 151643, | |
| "eos_token_id": 151643, | |
| "hidden_act": "silu", | |
| "hidden_size": 5120, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 13824, | |
| "layer_types": [ | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention", | |
| "full_attention" | |
| ], | |
| "max_position_embeddings": 32768, | |
| "max_window_layers": 70, | |
| "model_type": "qwen2", | |
| "moe_config": null, | |
| "num_attention_heads": 40, | |
| "num_hidden_layers": 48, | |
| "num_key_value_heads": 8, | |
| "rms_norm_eps": 1e-06, | |
| "rope_scaling": { | |
| "factor": 2.0, | |
| "rope_type": "dynamic", | |
| "type": "dynamic" | |
| }, | |
| "rope_theta": 1000000.0, | |
| "sliding_window": null, | |
| "torch_dtype": "bfloat16", | |
| "use_bfloat16": true, | |
| "use_cache": false, | |
| "use_sliding_window": false, | |
| "vocab_size": 151674 | |
| }, | |
| "max_dynamic_patch": 12, | |
| "min_dynamic_patch": 1, | |
| "model_type": "internvl_chat", | |
| "output_attentions": false, | |
| "pad2square": false, | |
| "ps_version": "v2", | |
| "quantization_config": { | |
| "_load_in_4bit": true, | |
| "_load_in_8bit": false, | |
| "bnb_4bit_compute_dtype": "bfloat16", | |
| "bnb_4bit_quant_storage": "uint8", | |
| "bnb_4bit_quant_type": "nf4", | |
| "bnb_4bit_use_double_quant": true, | |
| "llm_int8_enable_fp32_cpu_offload": false, | |
| "llm_int8_has_fp16_weight": false, | |
| "llm_int8_skip_modules": null, | |
| "llm_int8_threshold": 6.0, | |
| "load_in_4bit": true, | |
| "load_in_8bit": false, | |
| "quant_method": "bitsandbytes" | |
| }, | |
| "select_layer": -1, | |
| "system_message": null, | |
| "template": "internvl2_5", | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "float16", | |
| "transformers_version": null, | |
| "use_backbone_lora": 0, | |
| "use_llm_lora": 0, | |
| "use_thumbnail": true, | |
| "vision_config": { | |
| "_attn_implementation_autoset": true, | |
| "_name_or_path": "OpenGVLab/InternViT-6B-448px-V1-5", | |
| "architectures": [ | |
| "InternVisionModel" | |
| ], | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "configuration_intern_vit.InternVisionConfig", | |
| "AutoModel": "modeling_intern_vit.InternVisionModel" | |
| }, | |
| "capacity_factor": 1.2, | |
| "drop_path_rate": 0.1, | |
| "dropout": 0.0, | |
| "eval_capacity_factor": 1.4, | |
| "hidden_act": "gelu", | |
| "hidden_size": 1024, | |
| "image_size": 448, | |
| "initializer_factor": 0.1, | |
| "initializer_range": 1e-10, | |
| "intermediate_size": 4096, | |
| "laux_allreduce": "all_nodes", | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "intern_vit_6b", | |
| "moe_coeff_ratio": 0.5, | |
| "moe_intermediate_size": 768, | |
| "moe_output_scale": 4.0, | |
| "noisy_gate_policy": "RSample_before", | |
| "norm_type": "layer_norm", | |
| "num_attention_heads": 16, | |
| "num_channels": 3, | |
| "num_experts": 8, | |
| "num_hidden_layers": 24, | |
| "num_routed_experts": 4, | |
| "num_shared_experts": 4, | |
| "patch_size": 14, | |
| "qk_normalization": false, | |
| "qkv_bias": true, | |
| "shared_expert_intermediate_size": 3072, | |
| "torch_dtype": "bfloat16", | |
| "use_bfloat16": true, | |
| "use_flash_attn": true, | |
| "use_moe": false, | |
| "use_residual": true, | |
| "use_rts": false, | |
| "use_weighted_residual": false | |
| } | |
| } | |