File size: 1,848 Bytes
9e594ee b986c5c 8ffe745 9e594ee 8ffe745 a788b34 9e594ee 8ffe745 a788b34 b986c5c 8ffe745 a788b34 8ffe745 a788b34 9e594ee a788b34 b986c5c a788b34 8ffe745 f2c1196 b986c5c a788b34 9e594ee 8ffe745 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 |
import os
import gradio as gr
import requests
HF_TOKEN = os.getenv("HF_API_TOKEN")
MODEL_NAME = os.getenv("MODEL_NAME", "google/flan-t5-base")
API_URL = f"https://api-inference.huggingface.co/models/{MODEL_NAME}"
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
def query(payload):
response = requests.post(API_URL, headers=HEADERS, json=payload)
if response.status_code != 200:
return f"❌ Ошибка API: {response.status_code} {response.text}"
return response.json()
def chat(message, history):
if history is None:
history = []
# Добавляем запрос пользователя с пустым ответом
history.append((message, ""))
response = query({"inputs": message})
# Парсим ответ — пытаемся получить строку
output = ""
if isinstance(response, dict):
if "error" in response:
output = response["error"]
elif "generated_text" in response:
output = response["generated_text"]
elif isinstance(response.get("choices"), list) and len(response["choices"]) > 0:
output = response["choices"][0].get("message", {}).get("content", "")
else:
output = str(response)
elif isinstance(response, list) and len(response) > 0 and "generated_text" in response[0]:
output = response[0]["generated_text"]
else:
output = str(response)
# Гарантируем, что output — строка
if not isinstance(output, str):
output = str(output)
# Обновляем последний элемент в истории — теперь с ответом
history[-1] = (message, output)
return history, history
chat_ui = gr.ChatInterface(fn=chat, title="FlareGPT")
if __name__ == "__main__":
chat_ui.launch()
|