mazegpt / tokenizer.json
gytdau's picture
Upload 3 files
6081797 verified
raw
history blame contribute delete
632 Bytes
{
"version": "1.0",
"truncation": null,
"padding": null,
"added_tokens": [],
"normalizer": null,
"pre_tokenizer": null,
"post_processor": null,
"decoder": null,
"model": {
"type": "BPE",
"dropout": null,
"unk_token": null,
"continuing_subword_prefix": null,
"end_of_word_suffix": null,
"fuse_unk": false,
"byte_fallback": false,
"ignore_merges": false,
"vocab": {
" ": 0,
"#": 1,
"s": 2,
"e": 3,
".": 4,
";": 5,
"S": 6,
"N": 7,
"E": 8,
"W": 9,
"\n": 10,
"c": 11,
"m": 12
},
"merges": []
}
}