Update generation_config.json (#4)
Browse files- Update generation_config.json (a679c1ef6ca95c544fa370ab9a777ee4e0690e14)
- Update tokenizer_config.json (6382a9dceb2c60b849df298ed659e6bf031d3500)
- Update config.json (1aa0c9f2786123d54b11ad5dd355b9232c37c225)
Co-authored-by: Quentin Gallouédec <[email protected]>
- config.json +1 -1
- generation_config.json +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
| 5 |
],
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 151643,
|
| 8 |
-
"eos_token_id":
|
| 9 |
"hidden_act": "silu",
|
| 10 |
"hidden_size": 5120,
|
| 11 |
"initializer_range": 0.02,
|
|
|
|
| 5 |
],
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 151643,
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
"hidden_act": "silu",
|
| 10 |
"hidden_size": 5120,
|
| 11 |
"initializer_range": 0.02,
|
generation_config.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
"bos_token_id": 151643,
|
| 4 |
-
"eos_token_id":
|
| 5 |
"transformers_version": "4.47.1"
|
| 6 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"_from_model_config": true,
|
| 3 |
"bos_token_id": 151643,
|
| 4 |
+
"eos_token_id": 151645,
|
| 5 |
"transformers_version": "4.47.1"
|
| 6 |
}
|
tokenizer_config.json
CHANGED
|
@@ -197,7 +197,7 @@
|
|
| 197 |
"bos_token": null,
|
| 198 |
"chat_template": "{%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}\n{%- else %}\n {{- '<|im_start|>system\n<|im_end|>\n' }}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == 'user') or (message.role == 'system' and not loop.first) or (message.role == 'assistant') %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}",
|
| 199 |
"clean_up_tokenization_spaces": false,
|
| 200 |
-
"eos_token": "<|
|
| 201 |
"errors": "replace",
|
| 202 |
"extra_special_tokens": {},
|
| 203 |
"model_max_length": 131072,
|
|
|
|
| 197 |
"bos_token": null,
|
| 198 |
"chat_template": "{%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}\n{%- else %}\n {{- '<|im_start|>system\n<|im_end|>\n' }}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == 'user') or (message.role == 'system' and not loop.first) or (message.role == 'assistant') %}\n {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\n' }}\n{%- endif %}",
|
| 199 |
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|im_end|>",
|
| 201 |
"errors": "replace",
|
| 202 |
"extra_special_tokens": {},
|
| 203 |
"model_max_length": 131072,
|