andrewatef commited on
Commit
332db56
·
verified ·
1 Parent(s): 3b6de10

Training in progress, step 1380

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "o_proj",
27
  "v_proj",
28
- "up_proj",
29
  "k_proj",
30
- "q_proj",
31
  "gate_proj",
32
- "down_proj"
 
 
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
26
  "v_proj",
 
27
  "k_proj",
 
28
  "gate_proj",
29
+ "q_proj",
30
+ "up_proj",
31
+ "down_proj",
32
+ "o_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:263ac165d76d977836a6599d78fbf3bd9d8f083c49b745e03a170c0e1d23cf06
3
  size 200068512
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59921b49ec6ae8ba8b759e1d19e7cc0d1e6bd96fad224184d7b8868c546cefaa
3
  size 200068512
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4d8da22cc851342a46aa7769d3e5de4a8cf03f26f294499666b8260369391530
3
  size 5624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef254c376356ad2c4eff9d767b09c39e50e0cb43532f9f5a8a32019935f2b29b
3
  size 5624