Liu-Xiang commited on
Commit
588770a
·
verified ·
1 Parent(s): d6c5257

Training in progress, epoch 1

Browse files
adapter_config.json CHANGED
@@ -19,13 +19,13 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "o_proj",
23
- "gate_proj",
24
- "q_proj",
25
- "v_proj",
26
  "k_proj",
27
  "down_proj",
28
- "up_proj"
 
 
 
 
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
 
 
 
22
  "k_proj",
23
  "down_proj",
24
+ "o_proj",
25
+ "up_proj",
26
+ "q_proj",
27
+ "gate_proj",
28
+ "v_proj"
29
  ],
30
  "task_type": "CAUSAL_LM",
31
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:25dd308cf6fdf6a26a2ab182e80a61013c0903905301f27212f4b9a7c48e1386
3
  size 1803907984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a30cf08ae254153cc8ff9d3a82dc59616ebd53aa09792056ac13f238631b1fa0
3
  size 1803907984
runs/Jul18_01-50-54_llm-dpo-finetuning-workbench-0/events.out.tfevents.1721267462.llm-dpo-finetuning-workbench-0.245.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4759990b7ba1db30c9987aebcad5df9b940f2afdce861fddd03197536a26b195
3
+ size 5706
runs/Jul18_02-23-21_llm-dpo-finetuning-workbench-0/events.out.tfevents.1721269409.llm-dpo-finetuning-workbench-0.222.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc5146fea71dbf5e8452a4d3c18ea5f2cc6575e56609faafb6ac469ff66d5489
3
+ size 5706
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad621eaf9b810eb479cc7aba4658e65cf5b66393e285dabb2d2a559b76469937
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe5dcd2155d81c37b552dcad51615bd4eafc6e118c1a78b652496e17c839c2ae
3
  size 4728