Spaces:
Sleeping
Sleeping
Updated the controller
Browse files
app.py
CHANGED
@@ -98,7 +98,7 @@ Analyst_Instructions: <...>
|
|
98 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
99 |
outputs = model.generate(
|
100 |
inputs,
|
101 |
-
max_length=
|
102 |
temperature=0.7,
|
103 |
do_sample=True,
|
104 |
top_p=0.9,
|
@@ -131,7 +131,7 @@ If out of scope/unethical, politely refuse.
|
|
131 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
132 |
outputs = model.generate(
|
133 |
inputs,
|
134 |
-
max_length=
|
135 |
temperature=0.7,
|
136 |
do_sample=True,
|
137 |
top_p=0.9,
|
@@ -163,7 +163,7 @@ If out of scope/unethical, politely refuse.
|
|
163 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
164 |
outputs = model.generate(
|
165 |
inputs,
|
166 |
-
max_length=
|
167 |
temperature=0.7,
|
168 |
do_sample=True,
|
169 |
top_p=0.9,
|
|
|
98 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
99 |
outputs = model.generate(
|
100 |
inputs,
|
101 |
+
max_length=256, # Extend length for better outputs
|
102 |
temperature=0.7,
|
103 |
do_sample=True,
|
104 |
top_p=0.9,
|
|
|
131 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
132 |
outputs = model.generate(
|
133 |
inputs,
|
134 |
+
max_length=256, # Extend length for detailed outputs
|
135 |
temperature=0.7,
|
136 |
do_sample=True,
|
137 |
top_p=0.9,
|
|
|
163 |
inputs = tokenizer.encode(prompt, return_tensors="pt")
|
164 |
outputs = model.generate(
|
165 |
inputs,
|
166 |
+
max_length=256, # Extend length for detailed outputs
|
167 |
temperature=0.7,
|
168 |
do_sample=True,
|
169 |
top_p=0.9,
|