fix(README): remove `trust_remote_code` requirement from tokenizer snippet
Browse files
README.md
CHANGED
|
@@ -30,7 +30,7 @@ Get started generating text with `Stable LM 2 1.6B` by using the following code
|
|
| 30 |
|
| 31 |
```python
|
| 32 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 33 |
-
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b"
|
| 34 |
model = AutoModelForCausalLM.from_pretrained(
|
| 35 |
"stabilityai/stablelm-2-1_6b",
|
| 36 |
torch_dtype="auto",
|
|
@@ -54,7 +54,7 @@ print(tokenizer.decode(tokens[0], skip_special_tokens=True))
|
|
| 54 |
|
| 55 |
```python
|
| 56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 57 |
-
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b"
|
| 58 |
model = AutoModelForCausalLM.from_pretrained(
|
| 59 |
"stabilityai/stablelm-2-1_6b",
|
| 60 |
torch_dtype="auto",
|
|
|
|
| 30 |
|
| 31 |
```python
|
| 32 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 33 |
+
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b")
|
| 34 |
model = AutoModelForCausalLM.from_pretrained(
|
| 35 |
"stabilityai/stablelm-2-1_6b",
|
| 36 |
torch_dtype="auto",
|
|
|
|
| 54 |
|
| 55 |
```python
|
| 56 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 57 |
+
tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-2-1_6b")
|
| 58 |
model = AutoModelForCausalLM.from_pretrained(
|
| 59 |
"stabilityai/stablelm-2-1_6b",
|
| 60 |
torch_dtype="auto",
|