Upload tokenizer
Browse files- merges.txt +0 -0
- special_tokens_map.json +7 -1
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
special_tokens_map.json
CHANGED
|
@@ -20,5 +20,11 @@
|
|
| 20 |
"rstrip": true,
|
| 21 |
"single_word": false
|
| 22 |
},
|
| 23 |
-
"unk_token":
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
}
|
|
|
|
| 20 |
"rstrip": true,
|
| 21 |
"single_word": false
|
| 22 |
},
|
| 23 |
+
"unk_token": {
|
| 24 |
+
"content": "�",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
}
|
| 30 |
}
|