HaveAI commited on
Commit
2655d77
·
verified ·
1 Parent(s): cb7d534

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -57
app.py CHANGED
@@ -1,71 +1,35 @@
1
- import os
2
  import gradio as gr
 
3
  import requests
4
 
5
- # Загружаем токен и имя модели из переменных окружения
6
  HF_TOKEN = os.getenv("HF_API_TOKEN")
7
  MODEL_NAME = os.getenv("MODEL_NAME", "google/flan-t5-base")
8
 
9
- # URL запроса к модели через Hugging Face Inference API
10
  API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
11
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
12
 
13
-
14
- # Функция запроса к модели
15
- def query(payload):
16
- print("⏳ Отправка запроса к модели...")
17
- response = requests.post(API_URL, headers=HEADERS, json=payload)
18
- print("📡 Статус:", response.status_code)
19
- print("📩 Ответ:", response.text)
20
-
21
- if response.status_code != 200:
22
- return f" Ошибка API: {response.status_code} {response.text}"
23
- return response.json()
24
-
25
-
26
- # Функция чата
27
- def chat(message, history):
28
- if history is None:
29
- history = []
30
-
31
- print("👤 Сообщение пользователя:", message)
32
- history.append((message, ""))
33
-
34
- # Запрос к модели
35
- response = query({"inputs": message})
36
-
37
- # Обработка ответа
38
- output = ""
39
- if isinstance(response, dict):
40
- if "error" in response:
41
- output = "❌ Ошибка: " + response["error"]
42
- elif "generated_text" in response:
43
- output = response["generated_text"]
44
- elif isinstance(response.get("choices"), list) and response["choices"]:
45
- output = response["choices"][0].get("message", {}).get("content", "")
46
  else:
47
- output = str(response)
48
- elif isinstance(response, list) and "generated_text" in response[0]:
49
- output = response[0]["generated_text"]
50
- else:
51
- output = str(response)
52
-
53
- if not isinstance(output, str):
54
- output = str(output)
55
 
56
- history[-1] = (message, output)
57
- return history, history
58
-
59
-
60
- # Интерфейс Gradio
61
- chat_ui = gr.ChatInterface(
62
- fn=chat,
63
  title="FlareGPT",
64
  retry_btn="🔄 Retry",
65
  undo_btn="↩️ Undo",
66
- clear_btn="🗑️ Clear"
67
- )
68
-
69
- # Запуск
70
- if __name__ == "__main__":
71
- chat_ui.launch()
 
 
1
  import gradio as gr
2
+ import os
3
  import requests
4
 
 
5
  HF_TOKEN = os.getenv("HF_API_TOKEN")
6
  MODEL_NAME = os.getenv("MODEL_NAME", "google/flan-t5-base")
7
 
 
8
  API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
9
  HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
10
 
11
+ def chat_fn(message, history):
12
+ try:
13
+ payload = {"inputs": message}
14
+ response = requests.post(API_URL, headers=HEADERS, json=payload)
15
+ if response.status_code != 200:
16
+ return f" Ошибка API: {response.status_code}\n{response.text}"
17
+ result = response.json()
18
+
19
+ # Обработка разных форматов ответа
20
+ if isinstance(result, list) and "generated_text" in result[0]:
21
+ return result[0]["generated_text"]
22
+ elif isinstance(result, dict) and "generated_text" in result:
23
+ return result["generated_text"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  else:
25
+ return "❌ Ответ не распознан"
26
+ except Exception as e:
27
+ return f"❌ Ошибка: {str(e)}"
 
 
 
 
 
28
 
29
+ gr.ChatInterface(
30
+ fn=chat_fn,
 
 
 
 
 
31
  title="FlareGPT",
32
  retry_btn="🔄 Retry",
33
  undo_btn="↩️ Undo",
34
+ clear_btn="🗑️ Clear",
35
+ ).launch()