Add files using upload-large-folder tool
Browse files- adapter2_config.json +29 -0
- adapter_config.json +29 -0
- adapter_model.safetensors +3 -0
- added_tokens.json +42 -0
- chat_template.json +3 -0
- config.json +265 -0
- dump/tmp_adapter_config.json +26 -0
- dump/tmp_adapter_model.safetensors +3 -0
- git_hash.txt +1 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- preprocessor_config.json +28 -0
- processor_config.json +4 -0
- results.json +1 -0
- special_tokens_map.json +53 -0
- tokenizer.json +0 -0
- tokenizer_config.json +479 -0
- training_config.yml +72 -0
- vocab.json +0 -0
adapter2_config.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "./models/ColSmolVLM-256M-Base",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"eva_config": null,
|
| 7 |
+
"exclude_modules": null,
|
| 8 |
+
"fan_in_fan_out": false,
|
| 9 |
+
"inference_mode": true,
|
| 10 |
+
"init_lora_weights": "gaussian",
|
| 11 |
+
"layer_replication": null,
|
| 12 |
+
"layers_pattern": null,
|
| 13 |
+
"layers_to_transform": null,
|
| 14 |
+
"loftq_config": {},
|
| 15 |
+
"lora_alpha": 32,
|
| 16 |
+
"lora_bias": false,
|
| 17 |
+
"lora_dropout": 0.1,
|
| 18 |
+
"megatron_config": null,
|
| 19 |
+
"megatron_core": "megatron.core",
|
| 20 |
+
"modules_to_save": null,
|
| 21 |
+
"peft_type": "LORA",
|
| 22 |
+
"r": 32,
|
| 23 |
+
"rank_pattern": {},
|
| 24 |
+
"revision": null,
|
| 25 |
+
"target_modules": "(.*(model.text_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)",
|
| 26 |
+
"task_type": "FEATURE_EXTRACTION",
|
| 27 |
+
"use_dora": false,
|
| 28 |
+
"use_rslora": false
|
| 29 |
+
}
|
adapter_config.json
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "./models/ColSmolVLM-256M-Base",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"eva_config": null,
|
| 7 |
+
"exclude_modules": null,
|
| 8 |
+
"fan_in_fan_out": false,
|
| 9 |
+
"inference_mode": true,
|
| 10 |
+
"init_lora_weights": "gaussian",
|
| 11 |
+
"layer_replication": null,
|
| 12 |
+
"layers_pattern": null,
|
| 13 |
+
"layers_to_transform": null,
|
| 14 |
+
"loftq_config": {},
|
| 15 |
+
"lora_alpha": 32,
|
| 16 |
+
"lora_bias": false,
|
| 17 |
+
"lora_dropout": 0.1,
|
| 18 |
+
"megatron_config": null,
|
| 19 |
+
"megatron_core": "megatron.core",
|
| 20 |
+
"modules_to_save": null,
|
| 21 |
+
"peft_type": "LORA",
|
| 22 |
+
"r": 32,
|
| 23 |
+
"rank_pattern": {},
|
| 24 |
+
"revision": null,
|
| 25 |
+
"target_modules": "(.*(model.text_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)",
|
| 26 |
+
"task_type": "FEATURE_EXTRACTION",
|
| 27 |
+
"use_dora": false,
|
| 28 |
+
"use_rslora": false
|
| 29 |
+
}
|
adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:c7e32974ae0949c6ce82561c2a883cb41fef7e1dd67349dcbd23ae294e111821
|
| 3 |
+
size 39135840
|
added_tokens.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"<end_of_utterance>": 49191,
|
| 3 |
+
"<fake_token_around_image>": 49189,
|
| 4 |
+
"<global-img>": 49152,
|
| 5 |
+
"<image>": 49190,
|
| 6 |
+
"<row_1_col_1>": 49153,
|
| 7 |
+
"<row_1_col_2>": 49154,
|
| 8 |
+
"<row_1_col_3>": 49155,
|
| 9 |
+
"<row_1_col_4>": 49156,
|
| 10 |
+
"<row_1_col_5>": 49157,
|
| 11 |
+
"<row_1_col_6>": 49158,
|
| 12 |
+
"<row_2_col_1>": 49159,
|
| 13 |
+
"<row_2_col_2>": 49160,
|
| 14 |
+
"<row_2_col_3>": 49161,
|
| 15 |
+
"<row_2_col_4>": 49162,
|
| 16 |
+
"<row_2_col_5>": 49163,
|
| 17 |
+
"<row_2_col_6>": 49164,
|
| 18 |
+
"<row_3_col_1>": 49165,
|
| 19 |
+
"<row_3_col_2>": 49166,
|
| 20 |
+
"<row_3_col_3>": 49167,
|
| 21 |
+
"<row_3_col_4>": 49168,
|
| 22 |
+
"<row_3_col_5>": 49169,
|
| 23 |
+
"<row_3_col_6>": 49170,
|
| 24 |
+
"<row_4_col_1>": 49171,
|
| 25 |
+
"<row_4_col_2>": 49172,
|
| 26 |
+
"<row_4_col_3>": 49173,
|
| 27 |
+
"<row_4_col_4>": 49174,
|
| 28 |
+
"<row_4_col_5>": 49175,
|
| 29 |
+
"<row_4_col_6>": 49176,
|
| 30 |
+
"<row_5_col_1>": 49177,
|
| 31 |
+
"<row_5_col_2>": 49178,
|
| 32 |
+
"<row_5_col_3>": 49179,
|
| 33 |
+
"<row_5_col_4>": 49180,
|
| 34 |
+
"<row_5_col_5>": 49181,
|
| 35 |
+
"<row_5_col_6>": 49182,
|
| 36 |
+
"<row_6_col_1>": 49183,
|
| 37 |
+
"<row_6_col_2>": 49184,
|
| 38 |
+
"<row_6_col_3>": 49185,
|
| 39 |
+
"<row_6_col_4>": 49186,
|
| 40 |
+
"<row_6_col_5>": 49187,
|
| 41 |
+
"<row_6_col_6>": 49188
|
| 42 |
+
}
|
chat_template.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"chat_template": "<|im_start|>{% for message in messages %}{{message['role'].capitalize()}}{% if message['content'][0]['type'] == 'image' %}{{':'}}{% else %}{{': '}}{% endif %}{% for line in message['content'] %}{% if line['type'] == 'text' %}{{line['text']}}{% elif line['type'] == 'image' %}{{ '<image>' }}{% endif %}{% endfor %}<|endoftext|>\n{% endfor %}{% if add_generation_prompt %}{{ 'Assistant:' }}{% endif %}"
|
| 3 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "models/SmolVLM-256M-Base",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"ColIdefics3"
|
| 5 |
+
],
|
| 6 |
+
"image_token_id": 49190,
|
| 7 |
+
"model_type": "idefics3",
|
| 8 |
+
"scale_factor": 4,
|
| 9 |
+
"text_config": {
|
| 10 |
+
"_attn_implementation_autoset": false,
|
| 11 |
+
"_flash_attn_2_enabled": true,
|
| 12 |
+
"_name_or_path": "/fsx/m4/experiments/local_experiment_dir/tr_341_vsmollm2_05b/opt_step-27750/unwrapped_model",
|
| 13 |
+
"add_cross_attention": false,
|
| 14 |
+
"architectures": [
|
| 15 |
+
"VLlama3ForCausalLM"
|
| 16 |
+
],
|
| 17 |
+
"attention_bias": false,
|
| 18 |
+
"attention_dropout": 0.0,
|
| 19 |
+
"bad_words_ids": null,
|
| 20 |
+
"begin_suppress_tokens": null,
|
| 21 |
+
"bos_token_id": 1,
|
| 22 |
+
"chunk_size_feed_forward": 0,
|
| 23 |
+
"cross_attention_hidden_size": null,
|
| 24 |
+
"decoder_start_token_id": null,
|
| 25 |
+
"diversity_penalty": 0.0,
|
| 26 |
+
"do_sample": false,
|
| 27 |
+
"early_stopping": false,
|
| 28 |
+
"encoder_no_repeat_ngram_size": 0,
|
| 29 |
+
"eos_token_id": 2,
|
| 30 |
+
"exponential_decay_length_penalty": null,
|
| 31 |
+
"finetuning_task": null,
|
| 32 |
+
"forced_bos_token_id": null,
|
| 33 |
+
"forced_eos_token_id": null,
|
| 34 |
+
"head_dim": 64,
|
| 35 |
+
"hidden_act": "silu",
|
| 36 |
+
"hidden_size": 576,
|
| 37 |
+
"id2label": {
|
| 38 |
+
"0": "LABEL_0",
|
| 39 |
+
"1": "LABEL_1"
|
| 40 |
+
},
|
| 41 |
+
"initializer_range": 0.041666666666666664,
|
| 42 |
+
"intermediate_size": 1536,
|
| 43 |
+
"is_decoder": false,
|
| 44 |
+
"is_encoder_decoder": false,
|
| 45 |
+
"is_llama_config": true,
|
| 46 |
+
"label2id": {
|
| 47 |
+
"LABEL_0": 0,
|
| 48 |
+
"LABEL_1": 1
|
| 49 |
+
},
|
| 50 |
+
"length_penalty": 1.0,
|
| 51 |
+
"max_length": 20,
|
| 52 |
+
"max_position_embeddings": 8192,
|
| 53 |
+
"min_length": 0,
|
| 54 |
+
"mlp_bias": false,
|
| 55 |
+
"model_type": "llama",
|
| 56 |
+
"neftune_noise_alpha": 0.0,
|
| 57 |
+
"no_repeat_ngram_size": 0,
|
| 58 |
+
"num_attention_heads": 9,
|
| 59 |
+
"num_beam_groups": 1,
|
| 60 |
+
"num_beams": 1,
|
| 61 |
+
"num_hidden_layers": 30,
|
| 62 |
+
"num_key_value_heads": 3,
|
| 63 |
+
"num_return_sequences": 1,
|
| 64 |
+
"output_attentions": false,
|
| 65 |
+
"output_hidden_states": false,
|
| 66 |
+
"output_scores": false,
|
| 67 |
+
"pad_token_id": 2,
|
| 68 |
+
"perceiver_config": {
|
| 69 |
+
"_attn_implementation_autoset": false,
|
| 70 |
+
"_name_or_path": "",
|
| 71 |
+
"add_cross_attention": false,
|
| 72 |
+
"architectures": null,
|
| 73 |
+
"attention_dropout": 0.0,
|
| 74 |
+
"bad_words_ids": null,
|
| 75 |
+
"begin_suppress_tokens": null,
|
| 76 |
+
"bos_token_id": null,
|
| 77 |
+
"chunk_size_feed_forward": 0,
|
| 78 |
+
"cross_attention_hidden_size": null,
|
| 79 |
+
"decoder_start_token_id": null,
|
| 80 |
+
"diversity_penalty": 0.0,
|
| 81 |
+
"do_sample": false,
|
| 82 |
+
"early_stopping": false,
|
| 83 |
+
"encoder_no_repeat_ngram_size": 0,
|
| 84 |
+
"eos_token_id": null,
|
| 85 |
+
"exponential_decay_length_penalty": null,
|
| 86 |
+
"finetuning_task": null,
|
| 87 |
+
"forced_bos_token_id": null,
|
| 88 |
+
"forced_eos_token_id": null,
|
| 89 |
+
"hidden_act": "silu",
|
| 90 |
+
"id2label": {
|
| 91 |
+
"0": "LABEL_0",
|
| 92 |
+
"1": "LABEL_1"
|
| 93 |
+
},
|
| 94 |
+
"is_decoder": false,
|
| 95 |
+
"is_encoder_decoder": false,
|
| 96 |
+
"label2id": {
|
| 97 |
+
"LABEL_0": 0,
|
| 98 |
+
"LABEL_1": 1
|
| 99 |
+
},
|
| 100 |
+
"length_penalty": 1.0,
|
| 101 |
+
"max_length": 20,
|
| 102 |
+
"min_length": 0,
|
| 103 |
+
"model_type": "vllama3",
|
| 104 |
+
"no_repeat_ngram_size": 0,
|
| 105 |
+
"num_beam_groups": 1,
|
| 106 |
+
"num_beams": 1,
|
| 107 |
+
"num_key_value_heads": 1,
|
| 108 |
+
"num_return_sequences": 1,
|
| 109 |
+
"output_attentions": false,
|
| 110 |
+
"output_hidden_states": false,
|
| 111 |
+
"output_scores": false,
|
| 112 |
+
"pad_token_id": null,
|
| 113 |
+
"prefix": null,
|
| 114 |
+
"problem_type": null,
|
| 115 |
+
"pruned_heads": {},
|
| 116 |
+
"qk_layer_norms_perceiver": false,
|
| 117 |
+
"remove_invalid_values": false,
|
| 118 |
+
"repetition_penalty": 1.0,
|
| 119 |
+
"resampler_depth": 6,
|
| 120 |
+
"resampler_head_dim": 96,
|
| 121 |
+
"resampler_n_heads": 16,
|
| 122 |
+
"resampler_n_latents": 64,
|
| 123 |
+
"return_dict": true,
|
| 124 |
+
"return_dict_in_generate": false,
|
| 125 |
+
"sep_token_id": null,
|
| 126 |
+
"suppress_tokens": null,
|
| 127 |
+
"task_specific_params": null,
|
| 128 |
+
"temperature": 1.0,
|
| 129 |
+
"tf_legacy_loss": false,
|
| 130 |
+
"tie_encoder_decoder": false,
|
| 131 |
+
"tie_word_embeddings": true,
|
| 132 |
+
"tokenizer_class": null,
|
| 133 |
+
"top_k": 50,
|
| 134 |
+
"top_p": 1.0,
|
| 135 |
+
"torch_dtype": null,
|
| 136 |
+
"torchscript": false,
|
| 137 |
+
"transformers_version": "4.46.0",
|
| 138 |
+
"typical_p": 1.0,
|
| 139 |
+
"use_bfloat16": false
|
| 140 |
+
},
|
| 141 |
+
"pixel_shuffle_factor": 4,
|
| 142 |
+
"prefix": null,
|
| 143 |
+
"pretraining_tp": 1,
|
| 144 |
+
"problem_type": null,
|
| 145 |
+
"pruned_heads": {},
|
| 146 |
+
"qk_layer_norms": false,
|
| 147 |
+
"remove_invalid_values": false,
|
| 148 |
+
"repetition_penalty": 1.0,
|
| 149 |
+
"return_dict": true,
|
| 150 |
+
"return_dict_in_generate": false,
|
| 151 |
+
"rms_norm_eps": 1e-05,
|
| 152 |
+
"rope_interleaved": false,
|
| 153 |
+
"rope_scaling": null,
|
| 154 |
+
"rope_theta": 100000,
|
| 155 |
+
"sep_token_id": null,
|
| 156 |
+
"suppress_tokens": null,
|
| 157 |
+
"task_specific_params": null,
|
| 158 |
+
"temperature": 1.0,
|
| 159 |
+
"tf_legacy_loss": false,
|
| 160 |
+
"tie_encoder_decoder": false,
|
| 161 |
+
"tie_word_embeddings": false,
|
| 162 |
+
"tokenizer_class": null,
|
| 163 |
+
"top_k": 50,
|
| 164 |
+
"top_p": 1.0,
|
| 165 |
+
"torch_dtype": "bfloat16",
|
| 166 |
+
"torchscript": false,
|
| 167 |
+
"transformers.js_config": {
|
| 168 |
+
"kv_cache_dtype": {
|
| 169 |
+
"fp16": "float16",
|
| 170 |
+
"q4f16": "float16"
|
| 171 |
+
}
|
| 172 |
+
},
|
| 173 |
+
"typical_p": 1.0,
|
| 174 |
+
"use_bfloat16": false,
|
| 175 |
+
"use_cache": true,
|
| 176 |
+
"use_resampler": false,
|
| 177 |
+
"vocab_size": 49192
|
| 178 |
+
},
|
| 179 |
+
"tie_word_embeddings": false,
|
| 180 |
+
"torch_dtype": "bfloat16",
|
| 181 |
+
"transformers_version": "4.46.3",
|
| 182 |
+
"use_cache": true,
|
| 183 |
+
"vision_config": {
|
| 184 |
+
"_attn_implementation_autoset": false,
|
| 185 |
+
"_name_or_path": "",
|
| 186 |
+
"add_cross_attention": false,
|
| 187 |
+
"architectures": null,
|
| 188 |
+
"attention_dropout": 0.0,
|
| 189 |
+
"bad_words_ids": null,
|
| 190 |
+
"begin_suppress_tokens": null,
|
| 191 |
+
"bos_token_id": null,
|
| 192 |
+
"chunk_size_feed_forward": 0,
|
| 193 |
+
"cross_attention_hidden_size": null,
|
| 194 |
+
"decoder_start_token_id": null,
|
| 195 |
+
"diversity_penalty": 0.0,
|
| 196 |
+
"do_sample": false,
|
| 197 |
+
"early_stopping": false,
|
| 198 |
+
"encoder_no_repeat_ngram_size": 0,
|
| 199 |
+
"eos_token_id": null,
|
| 200 |
+
"exponential_decay_length_penalty": null,
|
| 201 |
+
"finetuning_task": null,
|
| 202 |
+
"forced_bos_token_id": null,
|
| 203 |
+
"forced_eos_token_id": null,
|
| 204 |
+
"hidden_act": "gelu_pytorch_tanh",
|
| 205 |
+
"hidden_size": 768,
|
| 206 |
+
"id2label": {
|
| 207 |
+
"0": "LABEL_0",
|
| 208 |
+
"1": "LABEL_1"
|
| 209 |
+
},
|
| 210 |
+
"image_size": 512,
|
| 211 |
+
"initializer_range": 0.02,
|
| 212 |
+
"intermediate_size": 3072,
|
| 213 |
+
"is_decoder": false,
|
| 214 |
+
"is_encoder_decoder": false,
|
| 215 |
+
"label2id": {
|
| 216 |
+
"LABEL_0": 0,
|
| 217 |
+
"LABEL_1": 1
|
| 218 |
+
},
|
| 219 |
+
"layer_norm_eps": 1e-06,
|
| 220 |
+
"length_penalty": 1.0,
|
| 221 |
+
"max_image_size": {
|
| 222 |
+
"longest_edge": 512
|
| 223 |
+
},
|
| 224 |
+
"max_length": 20,
|
| 225 |
+
"min_length": 0,
|
| 226 |
+
"model_type": "idefics3",
|
| 227 |
+
"no_repeat_ngram_size": 0,
|
| 228 |
+
"num_attention_heads": 12,
|
| 229 |
+
"num_beam_groups": 1,
|
| 230 |
+
"num_beams": 1,
|
| 231 |
+
"num_channels": 3,
|
| 232 |
+
"num_hidden_layers": 12,
|
| 233 |
+
"num_return_sequences": 1,
|
| 234 |
+
"output_attentions": false,
|
| 235 |
+
"output_hidden_states": false,
|
| 236 |
+
"output_scores": false,
|
| 237 |
+
"pad_token_id": null,
|
| 238 |
+
"patch_size": 16,
|
| 239 |
+
"prefix": null,
|
| 240 |
+
"problem_type": null,
|
| 241 |
+
"pruned_heads": {},
|
| 242 |
+
"remove_invalid_values": false,
|
| 243 |
+
"repetition_penalty": 1.0,
|
| 244 |
+
"return_dict": true,
|
| 245 |
+
"return_dict_in_generate": false,
|
| 246 |
+
"sep_token_id": null,
|
| 247 |
+
"size": {
|
| 248 |
+
"longest_edge": 2048
|
| 249 |
+
},
|
| 250 |
+
"suppress_tokens": null,
|
| 251 |
+
"task_specific_params": null,
|
| 252 |
+
"temperature": 1.0,
|
| 253 |
+
"tf_legacy_loss": false,
|
| 254 |
+
"tie_encoder_decoder": false,
|
| 255 |
+
"tie_word_embeddings": false,
|
| 256 |
+
"tokenizer_class": null,
|
| 257 |
+
"top_k": 50,
|
| 258 |
+
"top_p": 1.0,
|
| 259 |
+
"torch_dtype": null,
|
| 260 |
+
"torchscript": false,
|
| 261 |
+
"typical_p": 1.0,
|
| 262 |
+
"use_bfloat16": false
|
| 263 |
+
},
|
| 264 |
+
"vocab_size": 49192
|
| 265 |
+
}
|
dump/tmp_adapter_config.json
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"alpha_pattern": {},
|
| 3 |
+
"auto_mapping": null,
|
| 4 |
+
"base_model_name_or_path": "./models/ColSmolVLM-256M-Base",
|
| 5 |
+
"bias": "none",
|
| 6 |
+
"fan_in_fan_out": false,
|
| 7 |
+
"inference_mode": true,
|
| 8 |
+
"init_lora_weights": "gaussian",
|
| 9 |
+
"layer_replication": null,
|
| 10 |
+
"layers_pattern": null,
|
| 11 |
+
"layers_to_transform": null,
|
| 12 |
+
"loftq_config": {},
|
| 13 |
+
"lora_alpha": 32,
|
| 14 |
+
"lora_dropout": 0.1,
|
| 15 |
+
"megatron_config": null,
|
| 16 |
+
"megatron_core": "megatron.core",
|
| 17 |
+
"modules_to_save": null,
|
| 18 |
+
"peft_type": "LORA",
|
| 19 |
+
"r": 32,
|
| 20 |
+
"rank_pattern": {},
|
| 21 |
+
"revision": null,
|
| 22 |
+
"target_modules": "(.*(model.text_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)",
|
| 23 |
+
"task_type": "FEATURE_EXTRACTION",
|
| 24 |
+
"use_dora": false,
|
| 25 |
+
"use_rslora": false
|
| 26 |
+
}
|
dump/tmp_adapter_model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:d307b9b7fafeea5a9e5f42a92b6d952353c74972f9b0d09e837f4fde3b4f9064
|
| 3 |
+
size 19598104
|
git_hash.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
8b4f75e476bddc6c34a02722761bd3ada6ac0d3d
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:00cef852358625358be817768b2b45ba85f80e76279a01b75e23c2b6c1cd9450
|
| 3 |
+
size 456304568
|
preprocessor_config.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"do_convert_rgb": true,
|
| 3 |
+
"do_image_splitting": true,
|
| 4 |
+
"do_normalize": true,
|
| 5 |
+
"do_pad": true,
|
| 6 |
+
"do_rescale": true,
|
| 7 |
+
"do_resize": true,
|
| 8 |
+
"image_mean": [
|
| 9 |
+
0.5,
|
| 10 |
+
0.5,
|
| 11 |
+
0.5
|
| 12 |
+
],
|
| 13 |
+
"image_processor_type": "Idefics3ImageProcessor",
|
| 14 |
+
"image_std": [
|
| 15 |
+
0.5,
|
| 16 |
+
0.5,
|
| 17 |
+
0.5
|
| 18 |
+
],
|
| 19 |
+
"max_image_size": {
|
| 20 |
+
"longest_edge": 512
|
| 21 |
+
},
|
| 22 |
+
"processor_class": "ColIdefics3Processor",
|
| 23 |
+
"resample": 1,
|
| 24 |
+
"rescale_factor": 0.00392156862745098,
|
| 25 |
+
"size": {
|
| 26 |
+
"longest_edge": 2048
|
| 27 |
+
}
|
| 28 |
+
}
|
processor_config.json
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"image_seq_len": 64,
|
| 3 |
+
"processor_class": "Idefics3Processor"
|
| 4 |
+
}
|
results.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"./data_dir/eval_vidore/arxivqa_test_subsampled": {"ndcg_at_1": 0.622, "ndcg_at_3": 0.67486, "ndcg_at_5": 0.69017, "ndcg_at_10": 0.70946, "ndcg_at_20": 0.72898, "ndcg_at_50": 0.73804, "ndcg_at_100": 0.74397, "map_at_1": 0.622, "map_at_3": 0.662, "map_at_5": 0.6703, "map_at_10": 0.67857, "map_at_20": 0.6841, "map_at_50": 0.68552, "map_at_100": 0.68607, "recall_at_1": 0.622, "recall_at_3": 0.712, "recall_at_5": 0.75, "recall_at_10": 0.808, "recall_at_20": 0.884, "recall_at_50": 0.93, "recall_at_100": 0.966, "precision_at_1": 0.622, "precision_at_3": 0.23733, "precision_at_5": 0.15, "precision_at_10": 0.0808, "precision_at_20": 0.0442, "precision_at_50": 0.0186, "precision_at_100": 0.00966, "mrr_at_1": 0.618, "mrr_at_3": 0.66, "mrr_at_5": 0.6692, "mrr_at_10": 0.6775777777777778, "mrr_at_20": 0.6824860893148819, "mrr_at_50": 0.6840505621420908, "mrr_at_100": 0.6845993176544162, "naucs_at_1_max": 0.46491355378995425, "naucs_at_1_std": -0.06263837723835318, "naucs_at_1_diff1": 0.897580000185275, "naucs_at_3_max": 0.5074962351081752, "naucs_at_3_std": -0.006120744926715115, "naucs_at_3_diff1": 0.8380872858484801, "naucs_at_5_max": 0.5076588173731037, "naucs_at_5_std": 0.00014233385661883033, "naucs_at_5_diff1": 0.818938775510204, "naucs_at_10_max": 0.5770523781854408, "naucs_at_10_std": 0.09178633684734683, "naucs_at_10_diff1": 0.7791151323981074, "naucs_at_20_max": 0.6430128856683821, "naucs_at_20_std": 0.16875963146332645, "naucs_at_20_diff1": 0.7221138397980255, "naucs_at_50_max": 0.6545151393890893, "naucs_at_50_std": 0.26363878884887104, "naucs_at_50_diff1": 0.7213151927437641, "naucs_at_100_max": 0.5877409787444364, "naucs_at_100_std": 0.19736914373592712, "naucs_at_100_diff1": 0.7009666611742652}, "./data_dir/eval_vidore/docvqa_test_subsampled": {"ndcg_at_1": 0.45676, "ndcg_at_3": 0.52515, "ndcg_at_5": 0.54233, "ndcg_at_10": 0.56915, "ndcg_at_20": 0.5816, "ndcg_at_50": 0.59505, "ndcg_at_100": 0.60258, "map_at_1": 0.45676, "map_at_3": 0.50887, "map_at_5": 0.51829, "map_at_10": 0.52954, "map_at_20": 0.53302, "map_at_50": 0.53529, "map_at_100": 0.53594, "recall_at_1": 0.45676, "recall_at_3": 0.57206, "recall_at_5": 0.61419, "recall_at_10": 0.69623, "recall_at_20": 0.74501, "recall_at_50": 0.81153, "recall_at_100": 0.85809, "precision_at_1": 0.45676, "precision_at_3": 0.19069, "precision_at_5": 0.12284, "precision_at_10": 0.06962, "precision_at_20": 0.03725, "precision_at_50": 0.01623, "precision_at_100": 0.00858, "mrr_at_1": 0.45454545454545453, "mrr_at_3": 0.5066518847006651, "mrr_at_5": 0.5170731707317073, "mrr_at_10": 0.5275815647766867, "mrr_at_20": 0.5311224770899007, "mrr_at_50": 0.5332465170438858, "mrr_at_100": 0.5339250823073732, "naucs_at_1_max": 0.3882948611231304, "naucs_at_1_std": 0.6051243703689616, "naucs_at_1_diff1": 0.8406058479800685, "naucs_at_3_max": 0.3234157964181825, "naucs_at_3_std": 0.6966233741677433, "naucs_at_3_diff1": 0.7729637232110695, "naucs_at_5_max": 0.27276717369737363, "naucs_at_5_std": 0.7006240836207518, "naucs_at_5_diff1": 0.747950791051886, "naucs_at_10_max": 0.16091474340315445, "naucs_at_10_std": 0.7256404433556597, "naucs_at_10_diff1": 0.6981512112733994, "naucs_at_20_max": 0.08633158860631607, "naucs_at_20_std": 0.7747045325493711, "naucs_at_20_diff1": 0.7160198478434746, "naucs_at_50_max": -0.04978574435251967, "naucs_at_50_std": 0.7706903740216889, "naucs_at_50_diff1": 0.6946330790179335, "naucs_at_100_max": -0.1803641306976603, "naucs_at_100_std": 0.8287710376301388, "naucs_at_100_diff1": 0.709766038885269}, "./data_dir/eval_vidore/syntheticDocQA_energy_test": {"ndcg_at_1": 0.87, "ndcg_at_3": 0.91786, "ndcg_at_5": 0.92603, "ndcg_at_10": 0.93226, "ndcg_at_20": 0.93226, "ndcg_at_50": 0.93422, "ndcg_at_100": 0.93422, "map_at_1": 0.87, "map_at_3": 0.90667, "map_at_5": 0.91117, "map_at_10": 0.9136, "map_at_20": 0.9136, "map_at_50": 0.9139, "map_at_100": 0.9139, "recall_at_1": 0.87, "recall_at_3": 0.95, "recall_at_5": 0.97, "recall_at_10": 0.99, "recall_at_20": 0.99, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.87, "precision_at_3": 0.31667, "precision_at_5": 0.194, "precision_at_10": 0.099, "precision_at_20": 0.0495, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.88, "mrr_at_3": 0.9116666666666667, "mrr_at_5": 0.9181666666666666, "mrr_at_10": 0.9195952380952381, "mrr_at_20": 0.9195952380952381, "mrr_at_50": 0.9199077380952382, "mrr_at_100": 0.9199077380952382, "naucs_at_1_max": 0.3063763608087097, "naucs_at_1_std": -0.2671628526994001, "naucs_at_1_diff1": 0.9131674442716432, "naucs_at_3_max": 0.7082166199813253, "naucs_at_3_std": 0.1034547152194234, "naucs_at_3_diff1": 0.8627450980392143, "naucs_at_5_max": 0.8202614379084955, "naucs_at_5_std": 0.0704948646125114, "naucs_at_5_diff1": 0.8638344226579531, "naucs_at_10_max": 0.8692810457516413, "naucs_at_10_std": 0.5541549953314738, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 0.8692810457516413, "naucs_at_20_std": 0.5541549953314738, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": NaN, "naucs_at_50_std": NaN, "naucs_at_50_diff1": NaN, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}, "./data_dir/eval_vidore/tatdqa_test": {"ndcg_at_1": 0.58809, "ndcg_at_3": 0.70323, "ndcg_at_5": 0.73068, "ndcg_at_10": 0.75075, "ndcg_at_20": 0.75855, "ndcg_at_50": 0.76442, "ndcg_at_100": 0.7668, "map_at_1": 0.58809, "map_at_3": 0.67517, "map_at_5": 0.69036, "map_at_10": 0.69878, "map_at_20": 0.70099, "map_at_50": 0.70196, "map_at_100": 0.70218, "recall_at_1": 0.58809, "recall_at_3": 0.78433, "recall_at_5": 0.85115, "recall_at_10": 0.91252, "recall_at_20": 0.94289, "recall_at_50": 0.97205, "recall_at_100": 0.98663, "precision_at_1": 0.58809, "precision_at_3": 0.26144, "precision_at_5": 0.17023, "precision_at_10": 0.09125, "precision_at_20": 0.04714, "precision_at_50": 0.01944, "precision_at_100": 0.00987, "mrr_at_1": 0.5911300121506683, "mrr_at_3": 0.6764884568651276, "mrr_at_5": 0.6917071688942892, "mrr_at_10": 0.7006767247198595, "mrr_at_20": 0.702628923116812, "mrr_at_50": 0.7035729931078816, "mrr_at_100": 0.7038013299596423, "naucs_at_1_max": 0.16282263440251535, "naucs_at_1_std": -0.2675726048319812, "naucs_at_1_diff1": 0.7855612887331109, "naucs_at_3_max": 0.15869758594775502, "naucs_at_3_std": -0.25252459447099584, "naucs_at_3_diff1": 0.6394956014993703, "naucs_at_5_max": 0.19188935642904395, "naucs_at_5_std": -0.19455295056436547, "naucs_at_5_diff1": 0.6071937017845587, "naucs_at_10_max": 0.2647534611934194, "naucs_at_10_std": -0.054514027748884926, "naucs_at_10_diff1": 0.5405997520135849, "naucs_at_20_max": 0.3763911903387556, "naucs_at_20_std": 0.14592272943721332, "naucs_at_20_diff1": 0.4732245856774079, "naucs_at_50_max": 0.35454897123535783, "naucs_at_50_std": 0.275162782423997, "naucs_at_50_diff1": 0.4532394718684134, "naucs_at_100_max": 0.2978782618422668, "naucs_at_100_std": 0.2997014737148475, "naucs_at_100_diff1": 0.45555904105628975}, "./data_dir/eval_vidore/infovqa_test_subsampled": {"ndcg_at_1": 0.74494, "ndcg_at_3": 0.79976, "ndcg_at_5": 0.81213, "ndcg_at_10": 0.82846, "ndcg_at_20": 0.83297, "ndcg_at_50": 0.83835, "ndcg_at_100": 0.83933, "map_at_1": 0.74494, "map_at_3": 0.78711, "map_at_5": 0.79389, "map_at_10": 0.8006, "map_at_20": 0.80179, "map_at_50": 0.80272, "map_at_100": 0.8028, "recall_at_1": 0.74494, "recall_at_3": 0.83603, "recall_at_5": 0.8664, "recall_at_10": 0.917, "recall_at_20": 0.93522, "recall_at_50": 0.96154, "recall_at_100": 0.96761, "precision_at_1": 0.74494, "precision_at_3": 0.27868, "precision_at_5": 0.17328, "precision_at_10": 0.0917, "precision_at_20": 0.04676, "precision_at_50": 0.01923, "precision_at_100": 0.00968, "mrr_at_1": 0.742914979757085, "mrr_at_3": 0.7850877192982456, "mrr_at_5": 0.7931848852901484, "mrr_at_10": 0.7990416746995694, "mrr_at_20": 0.8004113500481683, "mrr_at_50": 0.801331531125384, "mrr_at_100": 0.8014399188597515, "naucs_at_1_max": 0.4965959718308746, "naucs_at_1_std": -0.0466933622092674, "naucs_at_1_diff1": 0.8685373039401096, "naucs_at_3_max": 0.5196845357871381, "naucs_at_3_std": 0.014152907602651955, "naucs_at_3_diff1": 0.8084743558786613, "naucs_at_5_max": 0.5925268631205242, "naucs_at_5_std": 0.11651891324846737, "naucs_at_5_diff1": 0.7815885765686577, "naucs_at_10_max": 0.581202610435799, "naucs_at_10_std": 0.23681516182108397, "naucs_at_10_diff1": 0.7349454780601268, "naucs_at_20_max": 0.7922518260743858, "naucs_at_20_std": 0.5495609276681086, "naucs_at_20_diff1": 0.7372706091805555, "naucs_at_50_max": 0.8510476089522382, "naucs_at_50_std": 0.7768090684036366, "naucs_at_50_diff1": 0.6938503074292259, "naucs_at_100_max": 0.8476068082679248, "naucs_at_100_std": 0.8111079242428052, "naucs_at_100_diff1": 0.7003229329465875}, "./data_dir/eval_vidore/syntheticDocQA_healthcare_industry_test": {"ndcg_at_1": 0.92, "ndcg_at_3": 0.95393, "ndcg_at_5": 0.9578, "ndcg_at_10": 0.96113, "ndcg_at_20": 0.96113, "ndcg_at_50": 0.96113, "ndcg_at_100": 0.96113, "map_at_1": 0.92, "map_at_3": 0.945, "map_at_5": 0.947, "map_at_10": 0.94843, "map_at_20": 0.94843, "map_at_50": 0.94843, "map_at_100": 0.94843, "recall_at_1": 0.92, "recall_at_3": 0.98, "recall_at_5": 0.99, "recall_at_10": 1.0, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.92, "precision_at_3": 0.32667, "precision_at_5": 0.198, "precision_at_10": 0.1, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.93, "mrr_at_3": 0.9516666666666667, "mrr_at_5": 0.9536666666666666, "mrr_at_10": 0.9550952380952381, "mrr_at_20": 0.9550952380952381, "mrr_at_50": 0.9550952380952381, "mrr_at_100": 0.9550952380952381, "naucs_at_1_max": 0.5140056022408969, "naucs_at_1_std": -0.16888422035480954, "naucs_at_1_diff1": 0.8211367880485536, "naucs_at_3_max": 1.0, "naucs_at_3_std": 0.3384687208216551, "naucs_at_3_diff1": 0.9346405228758099, "naucs_at_5_max": 1.0, "naucs_at_5_std": 0.12278244631185926, "naucs_at_5_diff1": 1.0, "naucs_at_10_max": 1.0, "naucs_at_10_std": 1.0, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": NaN, "naucs_at_50_std": NaN, "naucs_at_50_diff1": NaN, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}, "./data_dir/eval_vidore/tabfquad_test_subsampled": {"ndcg_at_1": 0.56429, "ndcg_at_3": 0.63444, "ndcg_at_5": 0.65026, "ndcg_at_10": 0.66294, "ndcg_at_20": 0.67546, "ndcg_at_50": 0.70544, "ndcg_at_100": 0.71155, "map_at_1": 0.56429, "map_at_3": 0.61786, "map_at_5": 0.62643, "map_at_10": 0.63165, "map_at_20": 0.63503, "map_at_50": 0.63965, "map_at_100": 0.64028, "recall_at_1": 0.56429, "recall_at_3": 0.68214, "recall_at_5": 0.72143, "recall_at_10": 0.76071, "recall_at_20": 0.81071, "recall_at_50": 0.96429, "recall_at_100": 1.0, "precision_at_1": 0.56429, "precision_at_3": 0.22738, "precision_at_5": 0.14429, "precision_at_10": 0.07607, "precision_at_20": 0.04054, "precision_at_50": 0.01929, "precision_at_100": 0.01, "mrr_at_1": 0.5642857142857143, "mrr_at_3": 0.6172619047619048, "mrr_at_5": 0.6267261904761904, "mrr_at_10": 0.6319331065759637, "mrr_at_20": 0.6352391827022797, "mrr_at_50": 0.6399410284324812, "mrr_at_100": 0.6405056533320372, "naucs_at_1_max": 0.250909164994346, "naucs_at_1_std": 0.10353784876791601, "naucs_at_1_diff1": 0.7091792557580451, "naucs_at_3_max": 0.19901092567688966, "naucs_at_3_std": 0.06257940172158412, "naucs_at_3_diff1": 0.6023933615512848, "naucs_at_5_max": 0.16905697042683365, "naucs_at_5_std": 0.025700327070190307, "naucs_at_5_diff1": 0.5975052344915358, "naucs_at_10_max": 0.11986463149253845, "naucs_at_10_std": 0.04068799417636617, "naucs_at_10_diff1": 0.5762841809353437, "naucs_at_20_max": 0.16545256557084664, "naucs_at_20_std": 0.0609616431669272, "naucs_at_20_diff1": 0.511002008899236, "naucs_at_50_max": 0.4613912231559318, "naucs_at_50_std": 0.2222222222222222, "naucs_at_50_diff1": 0.5568160597572381, "naucs_at_100_max": 1.0, "naucs_at_100_std": 1.0, "naucs_at_100_diff1": 1.0}, "./data_dir/eval_vidore/syntheticDocQA_government_reports_test": {"ndcg_at_1": 0.79, "ndcg_at_3": 0.89464, "ndcg_at_5": 0.90238, "ndcg_at_10": 0.90883, "ndcg_at_20": 0.90883, "ndcg_at_50": 0.90883, "ndcg_at_100": 0.90883, "map_at_1": 0.79, "map_at_3": 0.87167, "map_at_5": 0.87567, "map_at_10": 0.87833, "map_at_20": 0.87833, "map_at_50": 0.87833, "map_at_100": 0.87833, "recall_at_1": 0.79, "recall_at_3": 0.96, "recall_at_5": 0.98, "recall_at_10": 1.0, "recall_at_20": 1.0, "recall_at_50": 1.0, "recall_at_100": 1.0, "precision_at_1": 0.79, "precision_at_3": 0.32, "precision_at_5": 0.196, "precision_at_10": 0.1, "precision_at_20": 0.05, "precision_at_50": 0.02, "precision_at_100": 0.01, "mrr_at_1": 0.85, "mrr_at_3": 0.9016666666666667, "mrr_at_5": 0.9056666666666666, "mrr_at_10": 0.9085833333333333, "mrr_at_20": 0.9085833333333333, "mrr_at_50": 0.9085833333333333, "mrr_at_100": 0.9085833333333333, "naucs_at_1_max": 0.2326688628657996, "naucs_at_1_std": 0.03888234303989203, "naucs_at_1_diff1": 0.9258181643301988, "naucs_at_3_max": 0.7480158730158707, "naucs_at_3_std": 0.6259337068160535, "naucs_at_3_diff1": 0.8231792717086835, "naucs_at_5_max": 0.9346405228758136, "naucs_at_5_std": 0.540149393090577, "naucs_at_5_diff1": 0.9346405228758136, "naucs_at_10_max": 1.0, "naucs_at_10_std": 1.0, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 1.0, "naucs_at_20_std": 1.0, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": NaN, "naucs_at_50_std": NaN, "naucs_at_50_diff1": NaN, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}, "./data_dir/eval_vidore/shiftproject_test": {"ndcg_at_1": 0.48, "ndcg_at_3": 0.59202, "ndcg_at_5": 0.64626, "ndcg_at_10": 0.67623, "ndcg_at_20": 0.68119, "ndcg_at_50": 0.69074, "ndcg_at_100": 0.69411, "map_at_1": 0.48, "map_at_3": 0.565, "map_at_5": 0.5955, "map_at_10": 0.60836, "map_at_20": 0.60966, "map_at_50": 0.61104, "map_at_100": 0.61137, "recall_at_1": 0.48, "recall_at_3": 0.67, "recall_at_5": 0.8, "recall_at_10": 0.89, "recall_at_20": 0.91, "recall_at_50": 0.96, "recall_at_100": 0.98, "precision_at_1": 0.48, "precision_at_3": 0.22333, "precision_at_5": 0.16, "precision_at_10": 0.089, "precision_at_20": 0.0455, "precision_at_50": 0.0192, "precision_at_100": 0.0098, "mrr_at_1": 0.49, "mrr_at_3": 0.5883333333333334, "mrr_at_5": 0.6138333333333333, "mrr_at_10": 0.622702380952381, "mrr_at_20": 0.6240598470157294, "mrr_at_50": 0.6257612271725352, "mrr_at_100": 0.6259278938392019, "naucs_at_1_max": 0.10312797713559874, "naucs_at_1_std": -0.02315549910024338, "naucs_at_1_diff1": 0.6334947602413465, "naucs_at_3_max": -0.0512264901559134, "naucs_at_3_std": -0.15811481099016378, "naucs_at_3_diff1": 0.42873829798039664, "naucs_at_5_max": 0.20339660339660318, "naucs_at_5_std": 0.09702797202797177, "naucs_at_5_diff1": 0.4798701298701295, "naucs_at_10_max": 0.15213469633193094, "naucs_at_10_std": -0.08190877072416533, "naucs_at_10_diff1": 0.5727171205222922, "naucs_at_20_max": 0.265380226164541, "naucs_at_20_std": 0.09165888577653315, "naucs_at_20_diff1": 0.4984956945741262, "naucs_at_50_max": 0.2783613445378158, "naucs_at_50_std": 0.06430905695611724, "naucs_at_50_diff1": 0.1669000933706846, "naucs_at_100_max": -0.22035480859009582, "naucs_at_100_std": -0.22035480859009582, "naucs_at_100_diff1": -0.22035480859009582}, "./data_dir/eval_vidore/syntheticDocQA_artificial_intelligence_test": {"ndcg_at_1": 0.92, "ndcg_at_3": 0.95655, "ndcg_at_5": 0.96085, "ndcg_at_10": 0.96085, "ndcg_at_20": 0.96085, "ndcg_at_50": 0.96085, "ndcg_at_100": 0.9625, "map_at_1": 0.92, "map_at_3": 0.94833, "map_at_5": 0.95083, "map_at_10": 0.95083, "map_at_20": 0.95083, "map_at_50": 0.95083, "map_at_100": 0.95098, "recall_at_1": 0.92, "recall_at_3": 0.98, "recall_at_5": 0.99, "recall_at_10": 0.99, "recall_at_20": 0.99, "recall_at_50": 0.99, "recall_at_100": 1.0, "precision_at_1": 0.92, "precision_at_3": 0.32667, "precision_at_5": 0.198, "precision_at_10": 0.099, "precision_at_20": 0.0495, "precision_at_50": 0.0198, "precision_at_100": 0.01, "mrr_at_1": 0.92, "mrr_at_3": 0.9516666666666667, "mrr_at_5": 0.9516666666666667, "mrr_at_10": 0.9516666666666667, "mrr_at_20": 0.9516666666666667, "mrr_at_50": 0.9516666666666667, "mrr_at_100": 0.9518205128205127, "naucs_at_1_max": 0.5828081232492984, "naucs_at_1_std": 0.2532096171802052, "naucs_at_1_diff1": 0.8768674136321195, "naucs_at_3_max": 0.8611111111111119, "naucs_at_3_std": 0.0793650793650649, "naucs_at_3_diff1": 1.0, "naucs_at_5_max": 0.7222222222222276, "naucs_at_5_std": -0.5634920634920767, "naucs_at_5_diff1": 1.0, "naucs_at_10_max": 0.7222222222222276, "naucs_at_10_std": -0.5634920634920767, "naucs_at_10_diff1": 1.0, "naucs_at_20_max": 0.7222222222222276, "naucs_at_20_std": -0.5634920634920767, "naucs_at_20_diff1": 1.0, "naucs_at_50_max": 0.7222222222222041, "naucs_at_50_std": -0.5634920634920583, "naucs_at_50_diff1": 1.0, "naucs_at_100_max": NaN, "naucs_at_100_std": NaN, "naucs_at_100_diff1": NaN}}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
{
|
| 4 |
+
"content": "<fake_token_around_image>",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false
|
| 9 |
+
},
|
| 10 |
+
{
|
| 11 |
+
"content": "<image>",
|
| 12 |
+
"lstrip": false,
|
| 13 |
+
"normalized": false,
|
| 14 |
+
"rstrip": false,
|
| 15 |
+
"single_word": false
|
| 16 |
+
},
|
| 17 |
+
{
|
| 18 |
+
"content": "<end_of_utterance>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
],
|
| 25 |
+
"bos_token": {
|
| 26 |
+
"content": "<|im_start|>",
|
| 27 |
+
"lstrip": false,
|
| 28 |
+
"normalized": false,
|
| 29 |
+
"rstrip": false,
|
| 30 |
+
"single_word": false
|
| 31 |
+
},
|
| 32 |
+
"eos_token": {
|
| 33 |
+
"content": "<|im_end|>",
|
| 34 |
+
"lstrip": false,
|
| 35 |
+
"normalized": false,
|
| 36 |
+
"rstrip": false,
|
| 37 |
+
"single_word": false
|
| 38 |
+
},
|
| 39 |
+
"pad_token": {
|
| 40 |
+
"content": "<|im_end|>",
|
| 41 |
+
"lstrip": false,
|
| 42 |
+
"normalized": false,
|
| 43 |
+
"rstrip": false,
|
| 44 |
+
"single_word": false
|
| 45 |
+
},
|
| 46 |
+
"unk_token": {
|
| 47 |
+
"content": "<|endoftext|>",
|
| 48 |
+
"lstrip": false,
|
| 49 |
+
"normalized": false,
|
| 50 |
+
"rstrip": false,
|
| 51 |
+
"single_word": false
|
| 52 |
+
}
|
| 53 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,479 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"added_tokens_decoder": {
|
| 4 |
+
"0": {
|
| 5 |
+
"content": "<|endoftext|>",
|
| 6 |
+
"lstrip": false,
|
| 7 |
+
"normalized": false,
|
| 8 |
+
"rstrip": false,
|
| 9 |
+
"single_word": false,
|
| 10 |
+
"special": true
|
| 11 |
+
},
|
| 12 |
+
"1": {
|
| 13 |
+
"content": "<|im_start|>",
|
| 14 |
+
"lstrip": false,
|
| 15 |
+
"normalized": false,
|
| 16 |
+
"rstrip": false,
|
| 17 |
+
"single_word": false,
|
| 18 |
+
"special": true
|
| 19 |
+
},
|
| 20 |
+
"2": {
|
| 21 |
+
"content": "<|im_end|>",
|
| 22 |
+
"lstrip": false,
|
| 23 |
+
"normalized": false,
|
| 24 |
+
"rstrip": false,
|
| 25 |
+
"single_word": false,
|
| 26 |
+
"special": true
|
| 27 |
+
},
|
| 28 |
+
"3": {
|
| 29 |
+
"content": "<repo_name>",
|
| 30 |
+
"lstrip": false,
|
| 31 |
+
"normalized": false,
|
| 32 |
+
"rstrip": false,
|
| 33 |
+
"single_word": false,
|
| 34 |
+
"special": true
|
| 35 |
+
},
|
| 36 |
+
"4": {
|
| 37 |
+
"content": "<reponame>",
|
| 38 |
+
"lstrip": false,
|
| 39 |
+
"normalized": false,
|
| 40 |
+
"rstrip": false,
|
| 41 |
+
"single_word": false,
|
| 42 |
+
"special": true
|
| 43 |
+
},
|
| 44 |
+
"5": {
|
| 45 |
+
"content": "<file_sep>",
|
| 46 |
+
"lstrip": false,
|
| 47 |
+
"normalized": false,
|
| 48 |
+
"rstrip": false,
|
| 49 |
+
"single_word": false,
|
| 50 |
+
"special": true
|
| 51 |
+
},
|
| 52 |
+
"6": {
|
| 53 |
+
"content": "<filename>",
|
| 54 |
+
"lstrip": false,
|
| 55 |
+
"normalized": false,
|
| 56 |
+
"rstrip": false,
|
| 57 |
+
"single_word": false,
|
| 58 |
+
"special": true
|
| 59 |
+
},
|
| 60 |
+
"7": {
|
| 61 |
+
"content": "<gh_stars>",
|
| 62 |
+
"lstrip": false,
|
| 63 |
+
"normalized": false,
|
| 64 |
+
"rstrip": false,
|
| 65 |
+
"single_word": false,
|
| 66 |
+
"special": true
|
| 67 |
+
},
|
| 68 |
+
"8": {
|
| 69 |
+
"content": "<issue_start>",
|
| 70 |
+
"lstrip": false,
|
| 71 |
+
"normalized": false,
|
| 72 |
+
"rstrip": false,
|
| 73 |
+
"single_word": false,
|
| 74 |
+
"special": true
|
| 75 |
+
},
|
| 76 |
+
"9": {
|
| 77 |
+
"content": "<issue_comment>",
|
| 78 |
+
"lstrip": false,
|
| 79 |
+
"normalized": false,
|
| 80 |
+
"rstrip": false,
|
| 81 |
+
"single_word": false,
|
| 82 |
+
"special": true
|
| 83 |
+
},
|
| 84 |
+
"10": {
|
| 85 |
+
"content": "<issue_closed>",
|
| 86 |
+
"lstrip": false,
|
| 87 |
+
"normalized": false,
|
| 88 |
+
"rstrip": false,
|
| 89 |
+
"single_word": false,
|
| 90 |
+
"special": true
|
| 91 |
+
},
|
| 92 |
+
"11": {
|
| 93 |
+
"content": "<jupyter_start>",
|
| 94 |
+
"lstrip": false,
|
| 95 |
+
"normalized": false,
|
| 96 |
+
"rstrip": false,
|
| 97 |
+
"single_word": false,
|
| 98 |
+
"special": true
|
| 99 |
+
},
|
| 100 |
+
"12": {
|
| 101 |
+
"content": "<jupyter_text>",
|
| 102 |
+
"lstrip": false,
|
| 103 |
+
"normalized": false,
|
| 104 |
+
"rstrip": false,
|
| 105 |
+
"single_word": false,
|
| 106 |
+
"special": true
|
| 107 |
+
},
|
| 108 |
+
"13": {
|
| 109 |
+
"content": "<jupyter_code>",
|
| 110 |
+
"lstrip": false,
|
| 111 |
+
"normalized": false,
|
| 112 |
+
"rstrip": false,
|
| 113 |
+
"single_word": false,
|
| 114 |
+
"special": true
|
| 115 |
+
},
|
| 116 |
+
"14": {
|
| 117 |
+
"content": "<jupyter_output>",
|
| 118 |
+
"lstrip": false,
|
| 119 |
+
"normalized": false,
|
| 120 |
+
"rstrip": false,
|
| 121 |
+
"single_word": false,
|
| 122 |
+
"special": true
|
| 123 |
+
},
|
| 124 |
+
"15": {
|
| 125 |
+
"content": "<jupyter_script>",
|
| 126 |
+
"lstrip": false,
|
| 127 |
+
"normalized": false,
|
| 128 |
+
"rstrip": false,
|
| 129 |
+
"single_word": false,
|
| 130 |
+
"special": true
|
| 131 |
+
},
|
| 132 |
+
"16": {
|
| 133 |
+
"content": "<empty_output>",
|
| 134 |
+
"lstrip": false,
|
| 135 |
+
"normalized": false,
|
| 136 |
+
"rstrip": false,
|
| 137 |
+
"single_word": false,
|
| 138 |
+
"special": true
|
| 139 |
+
},
|
| 140 |
+
"49152": {
|
| 141 |
+
"content": "<global-img>",
|
| 142 |
+
"lstrip": false,
|
| 143 |
+
"normalized": false,
|
| 144 |
+
"rstrip": false,
|
| 145 |
+
"single_word": false,
|
| 146 |
+
"special": true
|
| 147 |
+
},
|
| 148 |
+
"49153": {
|
| 149 |
+
"content": "<row_1_col_1>",
|
| 150 |
+
"lstrip": false,
|
| 151 |
+
"normalized": false,
|
| 152 |
+
"rstrip": false,
|
| 153 |
+
"single_word": false,
|
| 154 |
+
"special": true
|
| 155 |
+
},
|
| 156 |
+
"49154": {
|
| 157 |
+
"content": "<row_1_col_2>",
|
| 158 |
+
"lstrip": false,
|
| 159 |
+
"normalized": false,
|
| 160 |
+
"rstrip": false,
|
| 161 |
+
"single_word": false,
|
| 162 |
+
"special": true
|
| 163 |
+
},
|
| 164 |
+
"49155": {
|
| 165 |
+
"content": "<row_1_col_3>",
|
| 166 |
+
"lstrip": false,
|
| 167 |
+
"normalized": false,
|
| 168 |
+
"rstrip": false,
|
| 169 |
+
"single_word": false,
|
| 170 |
+
"special": true
|
| 171 |
+
},
|
| 172 |
+
"49156": {
|
| 173 |
+
"content": "<row_1_col_4>",
|
| 174 |
+
"lstrip": false,
|
| 175 |
+
"normalized": false,
|
| 176 |
+
"rstrip": false,
|
| 177 |
+
"single_word": false,
|
| 178 |
+
"special": true
|
| 179 |
+
},
|
| 180 |
+
"49157": {
|
| 181 |
+
"content": "<row_1_col_5>",
|
| 182 |
+
"lstrip": false,
|
| 183 |
+
"normalized": false,
|
| 184 |
+
"rstrip": false,
|
| 185 |
+
"single_word": false,
|
| 186 |
+
"special": true
|
| 187 |
+
},
|
| 188 |
+
"49158": {
|
| 189 |
+
"content": "<row_1_col_6>",
|
| 190 |
+
"lstrip": false,
|
| 191 |
+
"normalized": false,
|
| 192 |
+
"rstrip": false,
|
| 193 |
+
"single_word": false,
|
| 194 |
+
"special": true
|
| 195 |
+
},
|
| 196 |
+
"49159": {
|
| 197 |
+
"content": "<row_2_col_1>",
|
| 198 |
+
"lstrip": false,
|
| 199 |
+
"normalized": false,
|
| 200 |
+
"rstrip": false,
|
| 201 |
+
"single_word": false,
|
| 202 |
+
"special": true
|
| 203 |
+
},
|
| 204 |
+
"49160": {
|
| 205 |
+
"content": "<row_2_col_2>",
|
| 206 |
+
"lstrip": false,
|
| 207 |
+
"normalized": false,
|
| 208 |
+
"rstrip": false,
|
| 209 |
+
"single_word": false,
|
| 210 |
+
"special": true
|
| 211 |
+
},
|
| 212 |
+
"49161": {
|
| 213 |
+
"content": "<row_2_col_3>",
|
| 214 |
+
"lstrip": false,
|
| 215 |
+
"normalized": false,
|
| 216 |
+
"rstrip": false,
|
| 217 |
+
"single_word": false,
|
| 218 |
+
"special": true
|
| 219 |
+
},
|
| 220 |
+
"49162": {
|
| 221 |
+
"content": "<row_2_col_4>",
|
| 222 |
+
"lstrip": false,
|
| 223 |
+
"normalized": false,
|
| 224 |
+
"rstrip": false,
|
| 225 |
+
"single_word": false,
|
| 226 |
+
"special": true
|
| 227 |
+
},
|
| 228 |
+
"49163": {
|
| 229 |
+
"content": "<row_2_col_5>",
|
| 230 |
+
"lstrip": false,
|
| 231 |
+
"normalized": false,
|
| 232 |
+
"rstrip": false,
|
| 233 |
+
"single_word": false,
|
| 234 |
+
"special": true
|
| 235 |
+
},
|
| 236 |
+
"49164": {
|
| 237 |
+
"content": "<row_2_col_6>",
|
| 238 |
+
"lstrip": false,
|
| 239 |
+
"normalized": false,
|
| 240 |
+
"rstrip": false,
|
| 241 |
+
"single_word": false,
|
| 242 |
+
"special": true
|
| 243 |
+
},
|
| 244 |
+
"49165": {
|
| 245 |
+
"content": "<row_3_col_1>",
|
| 246 |
+
"lstrip": false,
|
| 247 |
+
"normalized": false,
|
| 248 |
+
"rstrip": false,
|
| 249 |
+
"single_word": false,
|
| 250 |
+
"special": true
|
| 251 |
+
},
|
| 252 |
+
"49166": {
|
| 253 |
+
"content": "<row_3_col_2>",
|
| 254 |
+
"lstrip": false,
|
| 255 |
+
"normalized": false,
|
| 256 |
+
"rstrip": false,
|
| 257 |
+
"single_word": false,
|
| 258 |
+
"special": true
|
| 259 |
+
},
|
| 260 |
+
"49167": {
|
| 261 |
+
"content": "<row_3_col_3>",
|
| 262 |
+
"lstrip": false,
|
| 263 |
+
"normalized": false,
|
| 264 |
+
"rstrip": false,
|
| 265 |
+
"single_word": false,
|
| 266 |
+
"special": true
|
| 267 |
+
},
|
| 268 |
+
"49168": {
|
| 269 |
+
"content": "<row_3_col_4>",
|
| 270 |
+
"lstrip": false,
|
| 271 |
+
"normalized": false,
|
| 272 |
+
"rstrip": false,
|
| 273 |
+
"single_word": false,
|
| 274 |
+
"special": true
|
| 275 |
+
},
|
| 276 |
+
"49169": {
|
| 277 |
+
"content": "<row_3_col_5>",
|
| 278 |
+
"lstrip": false,
|
| 279 |
+
"normalized": false,
|
| 280 |
+
"rstrip": false,
|
| 281 |
+
"single_word": false,
|
| 282 |
+
"special": true
|
| 283 |
+
},
|
| 284 |
+
"49170": {
|
| 285 |
+
"content": "<row_3_col_6>",
|
| 286 |
+
"lstrip": false,
|
| 287 |
+
"normalized": false,
|
| 288 |
+
"rstrip": false,
|
| 289 |
+
"single_word": false,
|
| 290 |
+
"special": true
|
| 291 |
+
},
|
| 292 |
+
"49171": {
|
| 293 |
+
"content": "<row_4_col_1>",
|
| 294 |
+
"lstrip": false,
|
| 295 |
+
"normalized": false,
|
| 296 |
+
"rstrip": false,
|
| 297 |
+
"single_word": false,
|
| 298 |
+
"special": true
|
| 299 |
+
},
|
| 300 |
+
"49172": {
|
| 301 |
+
"content": "<row_4_col_2>",
|
| 302 |
+
"lstrip": false,
|
| 303 |
+
"normalized": false,
|
| 304 |
+
"rstrip": false,
|
| 305 |
+
"single_word": false,
|
| 306 |
+
"special": true
|
| 307 |
+
},
|
| 308 |
+
"49173": {
|
| 309 |
+
"content": "<row_4_col_3>",
|
| 310 |
+
"lstrip": false,
|
| 311 |
+
"normalized": false,
|
| 312 |
+
"rstrip": false,
|
| 313 |
+
"single_word": false,
|
| 314 |
+
"special": true
|
| 315 |
+
},
|
| 316 |
+
"49174": {
|
| 317 |
+
"content": "<row_4_col_4>",
|
| 318 |
+
"lstrip": false,
|
| 319 |
+
"normalized": false,
|
| 320 |
+
"rstrip": false,
|
| 321 |
+
"single_word": false,
|
| 322 |
+
"special": true
|
| 323 |
+
},
|
| 324 |
+
"49175": {
|
| 325 |
+
"content": "<row_4_col_5>",
|
| 326 |
+
"lstrip": false,
|
| 327 |
+
"normalized": false,
|
| 328 |
+
"rstrip": false,
|
| 329 |
+
"single_word": false,
|
| 330 |
+
"special": true
|
| 331 |
+
},
|
| 332 |
+
"49176": {
|
| 333 |
+
"content": "<row_4_col_6>",
|
| 334 |
+
"lstrip": false,
|
| 335 |
+
"normalized": false,
|
| 336 |
+
"rstrip": false,
|
| 337 |
+
"single_word": false,
|
| 338 |
+
"special": true
|
| 339 |
+
},
|
| 340 |
+
"49177": {
|
| 341 |
+
"content": "<row_5_col_1>",
|
| 342 |
+
"lstrip": false,
|
| 343 |
+
"normalized": false,
|
| 344 |
+
"rstrip": false,
|
| 345 |
+
"single_word": false,
|
| 346 |
+
"special": true
|
| 347 |
+
},
|
| 348 |
+
"49178": {
|
| 349 |
+
"content": "<row_5_col_2>",
|
| 350 |
+
"lstrip": false,
|
| 351 |
+
"normalized": false,
|
| 352 |
+
"rstrip": false,
|
| 353 |
+
"single_word": false,
|
| 354 |
+
"special": true
|
| 355 |
+
},
|
| 356 |
+
"49179": {
|
| 357 |
+
"content": "<row_5_col_3>",
|
| 358 |
+
"lstrip": false,
|
| 359 |
+
"normalized": false,
|
| 360 |
+
"rstrip": false,
|
| 361 |
+
"single_word": false,
|
| 362 |
+
"special": true
|
| 363 |
+
},
|
| 364 |
+
"49180": {
|
| 365 |
+
"content": "<row_5_col_4>",
|
| 366 |
+
"lstrip": false,
|
| 367 |
+
"normalized": false,
|
| 368 |
+
"rstrip": false,
|
| 369 |
+
"single_word": false,
|
| 370 |
+
"special": true
|
| 371 |
+
},
|
| 372 |
+
"49181": {
|
| 373 |
+
"content": "<row_5_col_5>",
|
| 374 |
+
"lstrip": false,
|
| 375 |
+
"normalized": false,
|
| 376 |
+
"rstrip": false,
|
| 377 |
+
"single_word": false,
|
| 378 |
+
"special": true
|
| 379 |
+
},
|
| 380 |
+
"49182": {
|
| 381 |
+
"content": "<row_5_col_6>",
|
| 382 |
+
"lstrip": false,
|
| 383 |
+
"normalized": false,
|
| 384 |
+
"rstrip": false,
|
| 385 |
+
"single_word": false,
|
| 386 |
+
"special": true
|
| 387 |
+
},
|
| 388 |
+
"49183": {
|
| 389 |
+
"content": "<row_6_col_1>",
|
| 390 |
+
"lstrip": false,
|
| 391 |
+
"normalized": false,
|
| 392 |
+
"rstrip": false,
|
| 393 |
+
"single_word": false,
|
| 394 |
+
"special": true
|
| 395 |
+
},
|
| 396 |
+
"49184": {
|
| 397 |
+
"content": "<row_6_col_2>",
|
| 398 |
+
"lstrip": false,
|
| 399 |
+
"normalized": false,
|
| 400 |
+
"rstrip": false,
|
| 401 |
+
"single_word": false,
|
| 402 |
+
"special": true
|
| 403 |
+
},
|
| 404 |
+
"49185": {
|
| 405 |
+
"content": "<row_6_col_3>",
|
| 406 |
+
"lstrip": false,
|
| 407 |
+
"normalized": false,
|
| 408 |
+
"rstrip": false,
|
| 409 |
+
"single_word": false,
|
| 410 |
+
"special": true
|
| 411 |
+
},
|
| 412 |
+
"49186": {
|
| 413 |
+
"content": "<row_6_col_4>",
|
| 414 |
+
"lstrip": false,
|
| 415 |
+
"normalized": false,
|
| 416 |
+
"rstrip": false,
|
| 417 |
+
"single_word": false,
|
| 418 |
+
"special": true
|
| 419 |
+
},
|
| 420 |
+
"49187": {
|
| 421 |
+
"content": "<row_6_col_5>",
|
| 422 |
+
"lstrip": false,
|
| 423 |
+
"normalized": false,
|
| 424 |
+
"rstrip": false,
|
| 425 |
+
"single_word": false,
|
| 426 |
+
"special": true
|
| 427 |
+
},
|
| 428 |
+
"49188": {
|
| 429 |
+
"content": "<row_6_col_6>",
|
| 430 |
+
"lstrip": false,
|
| 431 |
+
"normalized": false,
|
| 432 |
+
"rstrip": false,
|
| 433 |
+
"single_word": false,
|
| 434 |
+
"special": true
|
| 435 |
+
},
|
| 436 |
+
"49189": {
|
| 437 |
+
"content": "<fake_token_around_image>",
|
| 438 |
+
"lstrip": false,
|
| 439 |
+
"normalized": false,
|
| 440 |
+
"rstrip": false,
|
| 441 |
+
"single_word": false,
|
| 442 |
+
"special": true
|
| 443 |
+
},
|
| 444 |
+
"49190": {
|
| 445 |
+
"content": "<image>",
|
| 446 |
+
"lstrip": false,
|
| 447 |
+
"normalized": false,
|
| 448 |
+
"rstrip": false,
|
| 449 |
+
"single_word": false,
|
| 450 |
+
"special": true
|
| 451 |
+
},
|
| 452 |
+
"49191": {
|
| 453 |
+
"content": "<end_of_utterance>",
|
| 454 |
+
"lstrip": false,
|
| 455 |
+
"normalized": false,
|
| 456 |
+
"rstrip": false,
|
| 457 |
+
"single_word": false,
|
| 458 |
+
"special": true
|
| 459 |
+
}
|
| 460 |
+
},
|
| 461 |
+
"additional_special_tokens": [
|
| 462 |
+
"<fake_token_around_image>",
|
| 463 |
+
"<image>",
|
| 464 |
+
"<end_of_utterance>"
|
| 465 |
+
],
|
| 466 |
+
"bos_token": "<|im_start|>",
|
| 467 |
+
"chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful AI assistant named SmolLM, trained by Hugging Face<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
|
| 468 |
+
"clean_up_tokenization_spaces": false,
|
| 469 |
+
"eos_token": "<|im_end|>",
|
| 470 |
+
"extra_special_tokens": {},
|
| 471 |
+
"legacy": false,
|
| 472 |
+
"model_max_length": 8192,
|
| 473 |
+
"pad_token": "<|im_end|>",
|
| 474 |
+
"processor_class": "ColIdefics3Processor",
|
| 475 |
+
"tokenizer_class": "GPT2Tokenizer",
|
| 476 |
+
"truncation_side": "left",
|
| 477 |
+
"unk_token": "<|endoftext|>",
|
| 478 |
+
"vocab_size": 49152
|
| 479 |
+
}
|
training_config.yml
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
config:
|
| 2 |
+
(): colpali_engine.trainer.colmodel_training.ColModelTrainingConfig
|
| 3 |
+
output_dir: !path ../../../models/ColSmolVLM-256M-Base
|
| 4 |
+
processor:
|
| 5 |
+
(): colpali_engine.utils.transformers_wrappers.AllPurposeWrapper
|
| 6 |
+
class_to_instanciate: !ext colpali_engine.models.ColIdefics3Processor
|
| 7 |
+
pretrained_model_name_or_path: "./models/ColSmolVLM-256M-Base"
|
| 8 |
+
# num_image_tokens: 2048
|
| 9 |
+
# max_length: 50
|
| 10 |
+
|
| 11 |
+
model:
|
| 12 |
+
(): colpali_engine.utils.transformers_wrappers.AllPurposeWrapper
|
| 13 |
+
class_to_instanciate: !ext colpali_engine.models.ColIdefics3
|
| 14 |
+
pretrained_model_name_or_path: "./models/ColSmolVLM-256M-Base"
|
| 15 |
+
torch_dtype: !ext torch.bfloat16
|
| 16 |
+
# use_cache: false
|
| 17 |
+
attn_implementation: "flash_attention_2"
|
| 18 |
+
# device_map: "auto"
|
| 19 |
+
# quantization_config:
|
| 20 |
+
# (): transformers.BitsAndBytesConfig
|
| 21 |
+
# load_in_4bit: true
|
| 22 |
+
# bnb_4bit_quant_type: "nf4"
|
| 23 |
+
# bnb_4bit_compute_dtype: "bfloat16"
|
| 24 |
+
# bnb_4bit_use_double_quant: true
|
| 25 |
+
|
| 26 |
+
dataset_loading_func: !ext colpali_engine.utils.dataset_transformation.load_train_set
|
| 27 |
+
eval_dataset_loader: !import ../data/test_data.yaml
|
| 28 |
+
|
| 29 |
+
# max_length: 50
|
| 30 |
+
run_eval: true
|
| 31 |
+
loss_func:
|
| 32 |
+
(): colpali_engine.loss.late_interaction_losses.ColbertPairwiseCELoss
|
| 33 |
+
tr_args:
|
| 34 |
+
(): transformers.training_args.TrainingArguments
|
| 35 |
+
output_dir: null
|
| 36 |
+
overwrite_output_dir: true
|
| 37 |
+
num_train_epochs: 3
|
| 38 |
+
per_device_train_batch_size: 8
|
| 39 |
+
gradient_checkpointing: true
|
| 40 |
+
gradient_checkpointing_kwargs: { "use_reentrant": false }
|
| 41 |
+
# gradient_checkpointing: true
|
| 42 |
+
# 6 x 8 gpus = 48 batch size
|
| 43 |
+
# gradient_accumulation_steps: 4
|
| 44 |
+
per_device_eval_batch_size: 8
|
| 45 |
+
eval_strategy: "steps"
|
| 46 |
+
dataloader_num_workers: 4
|
| 47 |
+
# bf16: true
|
| 48 |
+
save_steps: 500
|
| 49 |
+
logging_steps: 10
|
| 50 |
+
eval_steps: 100
|
| 51 |
+
warmup_steps: 100
|
| 52 |
+
learning_rate: 5e-4
|
| 53 |
+
save_total_limit: 1
|
| 54 |
+
resume_from_checkpoint: true
|
| 55 |
+
# optim: "paged_adamw_8bit"
|
| 56 |
+
# wandb logging
|
| 57 |
+
# wandb_project: "colqwen2"
|
| 58 |
+
# run_name: "colqwen2-ba32-nolora"
|
| 59 |
+
report_to: "wandb"
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
peft_config:
|
| 63 |
+
(): peft.LoraConfig
|
| 64 |
+
r: 32
|
| 65 |
+
lora_alpha: 32
|
| 66 |
+
lora_dropout: 0.1
|
| 67 |
+
init_lora_weights: "gaussian"
|
| 68 |
+
bias: "none"
|
| 69 |
+
task_type: "FEATURE_EXTRACTION"
|
| 70 |
+
target_modules: '(.*(model.text_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)'
|
| 71 |
+
# target_modules: '(.*(language_model).*(down_proj|gate_proj|up_proj|k_proj|q_proj|v_proj|o_proj).*$|.*(custom_text_proj).*$)'
|
| 72 |
+
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|