import gradio as gr import pandas as pd import os import requests # Get Hugging Face API token from secret HF_TOKEN = os.getenv("HF_TOKEN") MODEL_ID = "deepset/tinyroberta-squad2" file_data = {"text": None} headers = {"Authorization": f"Bearer {HF_TOKEN}"} # Handle file upload def upload_file(file): if file is None or not hasattr(file, "name"): return [], "❌ No file uploaded." try: if file.name.endswith(".csv"): df = pd.read_csv(file.name) file_data["text"] = df.to_string(index=False) elif file.name.endswith(".txt"): file_data["text"] = file.read().decode("utf-8") else: return [], "❌ Please upload a .txt or .csv file." return [], "✅ File uploaded. Now ask your question." except Exception as e: return [], f"❌ Error reading file: {e}" # Handle user question def ask_question(message, history): if not message or not file_data["text"]: return "", history + [[message, "📄 Please upload a file and type a question."]] context = file_data["text"][:1500] # truncate long content payload = { "inputs": { "question": message, "context": context } } try: response = requests.post( f"https://api-inference.huggingface.co/models/{MODEL_ID}", headers=headers, json=payload ) result = response.json() answer = result.get("answer", "🤷 No answer found.") except Exception as e: answer = f"❌ API error: {e}" return "", history + [[message, answer]] # Build the Gradio UI with gr.Blocks() as demo: chatbot = gr.Chatbot(label="Chat with Your File") file_input = gr.File(label="Upload .txt or .csv") msg = gr.Textbox(label="Your Question") send = gr.Button("Send") status = gr.Textbox(label="Status", interactive=False) file_input.change(upload_file, inputs=file_input, outputs=[chatbot, status]) send.click(ask_question, inputs=[msg, chatbot], outputs=[msg, chatbot]) gr.ClearButton([msg, file_input, chatbot]) demo.launch()