| { | |
| "add_bos_token": false, | |
| "add_eos_token": false, | |
| "add_prefix_space": false, | |
| "added_tokens_decoder": { | |
| "0": { | |
| "content": "<unk>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "1": { | |
| "content": "<s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| }, | |
| "2": { | |
| "content": "</s>", | |
| "lstrip": false, | |
| "normalized": false, | |
| "rstrip": false, | |
| "single_word": false, | |
| "special": true | |
| } | |
| }, | |
| "additional_special_tokens": [], | |
| "bos_token": "<s>", | |
| "chat_template": "{% for message in messages %}{% if loop.index0 != 0 and message['role'] == 'system' %}{{ raise_exception('Conversation roles must alternate system(optional)/user/assistant/user/assistant/...') }}{% elif messages[0]['role'] == 'system' and ((message['role'] == 'user' and (loop.index0 % 2 == 0)) or (message['role'] == 'assistant' and (loop.index0 % 2 == 1))) %}{{ raise_exception('Conversation roles must alternate system(optional)/user/assistant/user/assistant/...') }}{% elif messages[0]['role'] != 'system' and ((message['role'] == 'user' and (loop.index0 % 2 != 0)) or (message['role'] == 'assistant' and (loop.index0 % 2 != 1))) %}{{ raise_exception('Conversation roles must alternate system(optional)/user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '<|prompt|>' + message['content'].strip() + eos_token }}{% elif message['role'] == 'system' %}{{ '<|system|>' + message['content'].strip() + eos_token }}{% elif message['role'] == 'assistant' %}{{ '<|answer|>' + message['content'].strip() + eos_token }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|answer|>' }}{% endif %}", | |
| "clean_up_tokenization_spaces": false, | |
| "cls_token": "</s>", | |
| "eos_token": "</s>", | |
| "legacy": false, | |
| "model_max_length": 1000000000000000019884624838656, | |
| "pad_token": "<unk>", | |
| "sep_token": "</s>", | |
| "sp_model_kwargs": {}, | |
| "spaces_between_special_tokens": false, | |
| "tokenizer_class": "LlamaTokenizer", | |
| "unk_token": "<unk>", | |
| "use_default_system_prompt": false | |
| } | |