silent666 commited on
Commit
b27ed7c
·
verified ·
1 Parent(s): ab63984

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -10,20 +10,20 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 256,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 128,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
23
  "qkv_proj",
24
- "down_proj",
25
  "gate_up_proj",
26
- "o_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 240,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 120,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "o_proj",
24
  "qkv_proj",
 
25
  "gate_up_proj",
26
+ "down_proj"
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:96e869aaa502d616f406b3f768d0d6c2915c514f7020a58dea5c3c5fdc01dd11
3
- size 1782623560
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c89f12182f2d870b44abfafea4d2dd7e953f9a97042bed334a70545731d754b
3
+ size 1671212336
optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:027191d0457b5d2c326422fb664c713277a291d1f12dd0568e9e19cbacf8b4a2
3
+ size 849024404
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fa770fc6958cea00eccdaa3cda5d37687d23c795db34540b4a20675363350ad
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a388ae19ccd5c0b70dc94b6ae8d20b7852d575cdeb28a7a1e26b5c3f8afc75c4
3
+ size 1064
trainer_state.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.9733333333333334,
5
+ "eval_steps": 500,
6
+ "global_step": 37,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.0666666666666667,
13
+ "grad_norm": 1.152064323425293,
14
+ "learning_rate": 0.0001756226478225652,
15
+ "loss": 1.0404,
16
+ "step": 20
17
+ }
18
+ ],
19
+ "logging_steps": 20,
20
+ "max_steps": 54,
21
+ "num_input_tokens_seen": 0,
22
+ "num_train_epochs": 3,
23
+ "save_steps": 500,
24
+ "stateful_callbacks": {
25
+ "TrainerControl": {
26
+ "args": {
27
+ "should_epoch_stop": false,
28
+ "should_evaluate": false,
29
+ "should_log": false,
30
+ "should_save": true,
31
+ "should_training_stop": false
32
+ },
33
+ "attributes": {}
34
+ }
35
+ },
36
+ "total_flos": 2.67429272620032e+16,
37
+ "train_batch_size": 1,
38
+ "trial_name": null,
39
+ "trial_params": null
40
+ }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:53e0a794f41a35d21c9055c661d36d4d62a2251a4663119a94e098e2edd8e938
3
- size 5432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd74584b43df3cbbf5bb5814c57d617bdf1e64f4da0334244bd625527a27acd7
3
+ size 5496