Commit
·
2183335
1
Parent(s):
6a3a844
Update README.md
Browse files
README.md
CHANGED
|
@@ -18,7 +18,7 @@ This is a generative model converted to fp16 format based on [ai-forever/ruGPT-3
|
|
| 18 |
from transformers import AutoTokenizer
|
| 19 |
from auto_gptq import AutoGPTQForCausalLM
|
| 20 |
|
| 21 |
-
model = AutoGPTQForCausalLM.
|
| 22 |
tokenizer = AutoTokenizer.from_pretrained('Gaivoronsky/ruGPT-3.5-13B-8bit')
|
| 23 |
|
| 24 |
request = "Человек: Сколько весит жираф? Помощник: "
|
|
|
|
| 18 |
from transformers import AutoTokenizer
|
| 19 |
from auto_gptq import AutoGPTQForCausalLM
|
| 20 |
|
| 21 |
+
model = AutoGPTQForCausalLM.from_quantized('Gaivoronsky/ruGPT-3.5-13B-8bit', device="cuda:0", use_triton=False)
|
| 22 |
tokenizer = AutoTokenizer.from_pretrained('Gaivoronsky/ruGPT-3.5-13B-8bit')
|
| 23 |
|
| 24 |
request = "Человек: Сколько весит жираф? Помощник: "
|