Spaces:
Sleeping
Sleeping
Switched controller model phi4 with phi3.5 instruct to improve performance
Browse files
app.py
CHANGED
@@ -3,7 +3,7 @@ import streamlit as st
|
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, AutoConfig, pipeline
|
4 |
|
5 |
try:
|
6 |
-
config = AutoConfig.from_pretrained("microsoft/phi-
|
7 |
print("Model configuration loaded successfully:")
|
8 |
print(config)
|
9 |
except KeyError as e:
|
@@ -40,11 +40,11 @@ Keep your responses concise. If the request is unethical or out of scope, polite
|
|
40 |
|
41 |
@st.cache_resource
|
42 |
def load_model_controller():
|
43 |
-
# Controller: microsoft/phi-
|
44 |
pipe = pipeline
|
45 |
-
pipe = pipeline("text-generation", model="microsoft/phi-
|
46 |
-
tokenizerC = AutoTokenizer.from_pretrained("microsoft/phi-
|
47 |
-
modelC = AutoModelForCausalLM.from_pretrained("microsoft/phi-
|
48 |
return tokenizerC, modelC, pipe
|
49 |
|
50 |
@st.cache_resource
|
|
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM, AutoConfig, pipeline
|
4 |
|
5 |
try:
|
6 |
+
config = AutoConfig.from_pretrained("microsoft/phi-3.5-mini-instruct", trust_remote_code=True)
|
7 |
print("Model configuration loaded successfully:")
|
8 |
print(config)
|
9 |
except KeyError as e:
|
|
|
40 |
|
41 |
@st.cache_resource
|
42 |
def load_model_controller():
|
43 |
+
# Controller: microsoft/phi-3.5-mini-instruct
|
44 |
pipe = pipeline
|
45 |
+
pipe = pipeline("text-generation", model="microsoft/phi-3.5-mini-instruct", trust_remote_code=True)
|
46 |
+
tokenizerC = AutoTokenizer.from_pretrained("microsoft/phi-3.5-mini-instruct", trust_remote_code=True)
|
47 |
+
modelC = AutoModelForCausalLM.from_pretrained("microsoft/phi-3.5-mini-instruct", trust_remote_code=True)
|
48 |
return tokenizerC, modelC, pipe
|
49 |
|
50 |
@st.cache_resource
|