{ "tokenizer_class": "GPT2Tokenizer", "model_max_length": 1024, "pad_token": "<|endoftext|>", "eos_token": "<|endoftext|>", "bos_token": "<|endoftext|>", "unk_token": "<|endoftext|>", "additional_special_tokens": [ "<|im_start|>", "<|im_end|>", "", "" ] }