Upload tokenizer
Browse files- special_tokens_map.json +1 -1
- tokenizer_config.json +3 -3
special_tokens_map.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
{
|
| 2 |
"bos_token": {
|
| 3 |
-
"content": "<
|
| 4 |
"lstrip": false,
|
| 5 |
"normalized": true,
|
| 6 |
"rstrip": false,
|
|
|
|
| 1 |
{
|
| 2 |
"bos_token": {
|
| 3 |
+
"content": "<LM>",
|
| 4 |
"lstrip": false,
|
| 5 |
"normalized": true,
|
| 6 |
"rstrip": false,
|
tokenizer_config.json
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
{
|
| 2 |
-
"add_bos_token":
|
| 3 |
"add_prefix_space": false,
|
| 4 |
"added_tokens_decoder": {
|
| 5 |
"0": {
|
|
@@ -848,7 +848,7 @@
|
|
| 848 |
"normalized": true,
|
| 849 |
"rstrip": false,
|
| 850 |
"single_word": false,
|
| 851 |
-
"special":
|
| 852 |
},
|
| 853 |
"50358": {
|
| 854 |
"content": "<SC1>",
|
|
@@ -899,7 +899,7 @@
|
|
| 899 |
"special": false
|
| 900 |
}
|
| 901 |
},
|
| 902 |
-
"bos_token": "<
|
| 903 |
"clean_up_tokenization_spaces": true,
|
| 904 |
"eos_token": "</s>",
|
| 905 |
"errors": "replace",
|
|
|
|
| 1 |
{
|
| 2 |
+
"add_bos_token": true,
|
| 3 |
"add_prefix_space": false,
|
| 4 |
"added_tokens_decoder": {
|
| 5 |
"0": {
|
|
|
|
| 848 |
"normalized": true,
|
| 849 |
"rstrip": false,
|
| 850 |
"single_word": false,
|
| 851 |
+
"special": true
|
| 852 |
},
|
| 853 |
"50358": {
|
| 854 |
"content": "<SC1>",
|
|
|
|
| 899 |
"special": false
|
| 900 |
}
|
| 901 |
},
|
| 902 |
+
"bos_token": "<LM>",
|
| 903 |
"clean_up_tokenization_spaces": true,
|
| 904 |
"eos_token": "</s>",
|
| 905 |
"errors": "replace",
|