Spaces:
Sleeping
Sleeping
Switched controller model
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import streamlit as st
|
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, AutoConfig, pipeline
|
4 |
|
5 |
try:
|
6 |
-
config = AutoConfig.from_pretrained("microsoft/Phi-3
|
7 |
print("Model configuration loaded successfully:")
|
8 |
print(config)
|
9 |
except KeyError as e:
|
@@ -39,10 +39,10 @@ Keep your responses concise. If the request is unethical or out of scope, polite
|
|
39 |
|
40 |
@st.cache_resource
|
41 |
def load_model_controller():
|
42 |
-
# Controller: microsoft/Phi-3
|
43 |
-
pipe = pipeline("text-generation", model="microsoft/Phi-3
|
44 |
-
tokenizerC = AutoTokenizer.from_pretrained("microsoft/Phi-3
|
45 |
-
modelC = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3
|
46 |
return tokenizerC, modelC, pipe
|
47 |
|
48 |
@st.cache_resource
|
|
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, AutoConfig, pipeline
|
4 |
|
5 |
try:
|
6 |
+
config = AutoConfig.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
7 |
print("Model configuration loaded successfully:")
|
8 |
print(config)
|
9 |
except KeyError as e:
|
|
|
39 |
|
40 |
@st.cache_resource
|
41 |
def load_model_controller():
|
42 |
+
# Controller: microsoft/Phi-3-mini-4k-instruct
|
43 |
+
pipe = pipeline("text-generation", model="microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
44 |
+
tokenizerC = AutoTokenizer.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
45 |
+
modelC = AutoModelForCausalLM.from_pretrained("microsoft/Phi-3-mini-4k-instruct", trust_remote_code=True)
|
46 |
return tokenizerC, modelC, pipe
|
47 |
|
48 |
@st.cache_resource
|