OpenSourceRonin commited on
Commit
8838286
·
verified ·
1 Parent(s): cdea967

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json CHANGED
The diff for this file is too large to render. See raw diff
 
generation_config.json CHANGED
@@ -1,12 +1,6 @@
1
  {
2
- "attn_implementation": "flash_attention_2",
3
  "bos_token_id": 128000,
4
- "cache_config": null,
5
- "do_sample": true,
6
  "eos_token_id": 128001,
7
- "max_length": 4096,
8
- "temperature": 0.6,
9
- "top_p": 0.9,
10
- "transformers_version": "4.37.2",
11
- "watermarking_config": null
12
  }
 
1
  {
2
+ "_from_model_config": true,
3
  "bos_token_id": 128000,
 
 
4
  "eos_token_id": 128001,
5
+ "transformers_version": "4.49.0"
 
 
 
 
6
  }
model.safetensors.index.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -2052,11 +2052,12 @@
2052
  "bos_token": "<|begin_of_text|>",
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|end_of_text|>",
 
2055
  "legacy": false,
2056
  "model_input_names": [
2057
  "input_ids",
2058
  "attention_mask"
2059
  ],
2060
  "model_max_length": 1000000000000000019884624838656,
2061
- "tokenizer_class": "PreTrainedTokenizerFast"
2062
  }
 
2052
  "bos_token": "<|begin_of_text|>",
2053
  "clean_up_tokenization_spaces": true,
2054
  "eos_token": "<|end_of_text|>",
2055
+ "extra_special_tokens": {},
2056
  "legacy": false,
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 1000000000000000019884624838656,
2062
+ "tokenizer_class": "PreTrainedTokenizer"
2063
  }