Update tokenizer_config.json
Browse files- tokenizer_config.json +1 -1
    	
        tokenizer_config.json
    CHANGED
    
    | @@ -38,7 +38,7 @@ | |
| 38 | 
             
              "bos_token": "<|endoftext|>",
         | 
| 39 | 
             
              "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n'  + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
         | 
| 40 | 
             
              "clean_up_tokenization_spaces": true,
         | 
| 41 | 
            -
              "eos_token": "<| | 
| 42 | 
             
              "tokenizer_class": "GPT2TokenizerFast",
         | 
| 43 | 
             
              "model_max_length": 4096,
         | 
| 44 | 
             
              "pad_token": "<|endoftext|>",
         | 
|  | |
| 38 | 
             
              "bos_token": "<|endoftext|>",
         | 
| 39 | 
             
              "chat_template": "{% for message in messages %}\n{% if message['role'] == 'user' %}\n{{ '<|user|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'system' %}\n{{ '<|system|>\n' + message['content'] + eos_token }}\n{% elif message['role'] == 'assistant' %}\n{{ '<|assistant|>\n'  + message['content'] + eos_token }}\n{% endif %}\n{% if loop.last and add_generation_prompt %}\n{{ '<|assistant|>' }}\n{% endif %}\n{% endfor %}",
         | 
| 40 | 
             
              "clean_up_tokenization_spaces": true,
         | 
| 41 | 
            +
              "eos_token": "<|endoftext|>",
         | 
| 42 | 
             
              "tokenizer_class": "GPT2TokenizerFast",
         | 
| 43 | 
             
              "model_max_length": 4096,
         | 
| 44 | 
             
              "pad_token": "<|endoftext|>",
         | 

