import torch from transformers import BertTokenizer, BertForSequenceClassification import gradio as gr # Load model from local folder model = BertForSequenceClassification.from_pretrained("bert-expense-classifier/bert-expense-classifier", trust_remote_code=True) tokenizer = BertTokenizer.from_pretrained("bert-expense-classifier/bert-expense-classifier") model.eval() label_map = {0: "statement", 1: "query"} def classify_sentence(text): inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True, max_length=128) with torch.no_grad(): outputs = model(**inputs) predicted_class = torch.argmax(outputs.logits, dim=1).item() return label_map[predicted_class] interface = gr.Interface( fn=classify_sentence, inputs=gr.Textbox(lines=2, placeholder="Enter a sentence..."), outputs=gr.Textbox(label="Prediction"), title="Expense Sentence Classifier", description="Classifies whether a sentence is a user question or a statement for an expense tracker." ) if __name__ == "__main__": interface.launch()