Training done
Browse files- tokenizer.json +16 -2
- tokenizer_config.json +7 -0
tokenizer.json
CHANGED
|
@@ -1,7 +1,21 @@
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
-
"truncation":
|
| 4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
"added_tokens": [
|
| 6 |
{
|
| 7 |
"id": 0,
|
|
|
|
| 1 |
{
|
| 2 |
"version": "1.0",
|
| 3 |
+
"truncation": {
|
| 4 |
+
"direction": "Right",
|
| 5 |
+
"max_length": 768,
|
| 6 |
+
"strategy": "LongestFirst",
|
| 7 |
+
"stride": 0
|
| 8 |
+
},
|
| 9 |
+
"padding": {
|
| 10 |
+
"strategy": {
|
| 11 |
+
"Fixed": 768
|
| 12 |
+
},
|
| 13 |
+
"direction": "Right",
|
| 14 |
+
"pad_to_multiple_of": null,
|
| 15 |
+
"pad_id": 1,
|
| 16 |
+
"pad_type_id": 0,
|
| 17 |
+
"pad_token": "<pad>"
|
| 18 |
+
},
|
| 19 |
"added_tokens": [
|
| 20 |
{
|
| 21 |
"id": 0,
|
tokenizer_config.json
CHANGED
|
@@ -498,11 +498,18 @@
|
|
| 498 |
"cls_token": "<s>",
|
| 499 |
"eos_token": "</s>",
|
| 500 |
"mask_token": "<mask>",
|
|
|
|
| 501 |
"model_max_length": 1000000000000000019884624838656,
|
|
|
|
| 502 |
"pad_token": "<pad>",
|
|
|
|
|
|
|
| 503 |
"processor_class": "DonutProcessor",
|
| 504 |
"sep_token": "</s>",
|
| 505 |
"sp_model_kwargs": {},
|
|
|
|
| 506 |
"tokenizer_class": "XLMRobertaTokenizer",
|
|
|
|
|
|
|
| 507 |
"unk_token": "<unk>"
|
| 508 |
}
|
|
|
|
| 498 |
"cls_token": "<s>",
|
| 499 |
"eos_token": "</s>",
|
| 500 |
"mask_token": "<mask>",
|
| 501 |
+
"max_length": 768,
|
| 502 |
"model_max_length": 1000000000000000019884624838656,
|
| 503 |
+
"pad_to_multiple_of": null,
|
| 504 |
"pad_token": "<pad>",
|
| 505 |
+
"pad_token_type_id": 0,
|
| 506 |
+
"padding_side": "right",
|
| 507 |
"processor_class": "DonutProcessor",
|
| 508 |
"sep_token": "</s>",
|
| 509 |
"sp_model_kwargs": {},
|
| 510 |
+
"stride": 0,
|
| 511 |
"tokenizer_class": "XLMRobertaTokenizer",
|
| 512 |
+
"truncation_side": "right",
|
| 513 |
+
"truncation_strategy": "longest_first",
|
| 514 |
"unk_token": "<unk>"
|
| 515 |
}
|