{ "add_bos_token": false, "add_eos_token": false, "add_prefix_space": false, "bos_token": "<|endoftext|>", "clean_up_tokenization_spaces": true, "eos_token": "<|endoftext|>", "errors": "replace", "model_max_length": 4096, "pad_token": "<|endoftext|>", "tokenizer_class": "AutoTokenizer", "unk_token": "<|endoftext|>", "auto_map": { "AutoTokenizer": [ "transformers", "AutoTokenizer" ] }, "_tokenizer_fallbacks": [ "GPT2Tokenizer", "LlamaTokenizer", "PreTrainedTokenizerFast" ] }