robertou2 commited on
Commit
742d82a
·
verified ·
1 Parent(s): d02ab8c

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -199,4 +199,4 @@ Carbon emissions can be estimated using the [Machine Learning Impact calculator]
199
  [More Information Needed]
200
  ### Framework versions
201
 
202
- - PEFT 0.13.2
 
199
  [More Information Needed]
200
  ### Framework versions
201
 
202
+ - PEFT 0.15.0
adapter_config.json CHANGED
@@ -10,13 +10,13 @@
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
- "lora_alpha": 128,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
- "r": 8,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
@@ -27,5 +27,5 @@
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
30
- "use_rslora": true
31
  }
 
10
  "layers_pattern": null,
11
  "layers_to_transform": null,
12
  "loftq_config": {},
13
+ "lora_alpha": 32,
14
  "lora_dropout": 0.05,
15
  "megatron_config": null,
16
  "megatron_core": "megatron.core",
17
  "modules_to_save": null,
18
  "peft_type": "LORA",
19
+ "r": 16,
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
 
27
  ],
28
  "task_type": "CAUSAL_LM",
29
  "use_dora": false,
30
+ "use_rslora": false
31
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fcabf4d1dde1ac9c5d179ff769c4a1e20c8f126a7a6f368c5db9005ae877ca82
3
- size 50365768
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21b3178d6c747660b0ba51c47c81ae02b087763530517cc2d30d0062a0f9c98d
3
+ size 100697728
tokenizer_config.json CHANGED
@@ -125,7 +125,7 @@
125
  "max_seq_length": 131072,
126
  "model_max_length": 131072,
127
  "pad_token": "<|endoftext|>",
128
- "padding_side": "right",
129
  "sp_model_kwargs": {},
130
  "tokenizer_class": "LlamaTokenizer",
131
  "unk_token": "<unk>",
 
125
  "max_seq_length": 131072,
126
  "model_max_length": 131072,
127
  "pad_token": "<|endoftext|>",
128
+ "padding_side": "left",
129
  "sp_model_kwargs": {},
130
  "tokenizer_class": "LlamaTokenizer",
131
  "unk_token": "<unk>",