| { | |
| "add_bos_token": true, | |
| "add_eos_token": false, | |
| "added_tokens_decoder": { | |
| "0": { | |
| "content": "<unk>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "1": { | |
| "content": "<s>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "2": { | |
| "content": "</s>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "32000": { | |
| "content": "[PAD]", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "32001": { | |
| "content": "β<PRE>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "32002": { | |
| "content": "β<MID>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "32003": { | |
| "content": "β<SUF>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "32004": { | |
| "content": "β<EOT>", | |
| "lstrip": true, | |
| "normalized": false, | |
| "rstrip": true, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "additional_special_tokens": [ | |
| "β<PRE>", | |
| "β<MID>", | |
| "β<SUF>", | |
| "β<EOT>" | |
| ], | |
| "bos_token": "</s>", | |
| "clean_up_tokenization_spaces": false, | |
| "eos_token": "</s>", | |
| "eot_token": "β<EOT>", | |
| "fill_token": "<FILL_ME>", | |
| "legacy": false, | |
| "middle_token": "β<MID>", | |
| "model_max_length": 1000000000000000019884624838656, | |
| "pad_token": "[PAD]", | |
| "padding_side": "right", | |
| "prefix_token": "β<PRE>", | |
| "sp_model_kwargs": {}, | |
| "suffix_first": false, | |
| "suffix_token": "β<SUF>", | |
| "tokenizer_class": "CodeLlamaTokenizer", | |
| "tokenizer_file": "/root/.cache/huggingface/hub/models--WizardLM--WizardCoder-Python-13B-V1.0/snapshots/d920d26e2108377de0f676a3c4be666f5212f4a1/tokenizer.json", | |
| "trust_remote_code": false, | |
| "unk_token": "</s>", | |
| "use_default_system_prompt": false, | |
| "use_fast": true | |
| } | |