File size: 4,458 Bytes
36e5180
ef92b60
 
 
 
ca12785
36e5180
ef92b60
34531ec
 
b9b3c97
ef92b60
 
34531ec
 
ef92b60
f0ef3c4
 
ef92b60
f0ef3c4
 
ef92b60
f0ef3c4
ef92b60
 
 
 
 
 
 
f0ef3c4
 
 
 
 
 
 
ef92b60
f0ef3c4
 
bb5b784
34531ec
ef92b60
0a036bb
f0ef3c4
ef92b60
 
f0ef3c4
ef92b60
f0ef3c4
ef92b60
f0ef3c4
 
ef92b60
f0ef3c4
 
 
 
 
 
 
 
ef92b60
f0ef3c4
 
 
ef92b60
f0ef3c4
 
ef92b60
f0ef3c4
 
 
 
 
 
 
398fb47
 
 
 
34531ec
f0ef3c4
 
 
 
 
 
 
 
 
 
34531ec
f0ef3c4
 
34531ec
f0ef3c4
01cff1f
f0ef3c4
ef92b60
 
f0ef3c4
398fb47
ca12785
 
 
ef92b60
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import gradio as gr
from transformers import (
    RobertaTokenizer, RobertaForSequenceClassification,
    AutoTokenizer, AutoModelForSequenceClassification
)
import torch

# Define available models including DeBERTa
model_options = {
    "GoalZero/aidetection-ada-v0.2": "GoalZero/aidetection-ada-v0.2",
    "GoalZero/aidetection-ada-v0.1": "GoalZero/aidetection-ada-v0.1",
    "GoalZero/babbage-mini-v0.1": "GoalZero/babbage-mini-v0.1",
    "GoalZero/ada-2534": "GoalZero/ada-2534"
}

# Initialize global variables
model = None
tokenizer = None
current_model_name = None

def load_model(model_name):
    """Load model and tokenizer, handling both RoBERTa and DeBERTa"""
    try:
        if "deberta" in model_name.lower() or "ada-2534" in model_name.lower():
            model = AutoModelForSequenceClassification.from_pretrained(model_name)
            tokenizer = AutoTokenizer.from_pretrained(model_name)
        else:
            model = RobertaForSequenceClassification.from_pretrained(model_name)
            tokenizer = RobertaTokenizer.from_pretrained(model_name)
        return model, tokenizer
    except Exception as e:
        raise Exception(f"Failed to load model {model_name}: {str(e)}")

# Load default model
try:
    default_model = "GoalZero/aidetection-ada-v0.2"
    model, tokenizer = load_model(default_model)
    current_model_name = default_model
except Exception as e:
    print(f"Error loading default model: {str(e)}")

def classify_text(text, model_choice):
    global model, tokenizer, current_model_name
    
    try:
        # Reload model if needed
        if model is None or model_choice != current_model_name:
            model, tokenizer = load_model(model_choice)
            current_model_name = model_choice
        
        # Clean input
        cleaned_text = text.replace('.', '').replace('\n', ' ')
        
        # Tokenize
        inputs = tokenizer(
            cleaned_text,
            return_tensors='pt',
            padding=True,
            truncation=True,
            max_length=128
        )
        
        # Predict
        with torch.no_grad():
            outputs = model(**inputs)
            probabilities = torch.nn.functional.softmax(outputs.logits, dim=-1)
            prob_ai = probabilities[0][1].item()
            
            return {
                "AI Probability": round(prob_ai * 100, 10),
                "Model used": model_choice
            }
    except Exception as e:
        return {
            "error": f"An error occurred: {str(e)}",
            "Model used": model_choice
        }

# Create the Gradio interface
iface = gr.Interface(
    fn=classify_text,
    inputs=[
        gr.Textbox(
            lines=2,
            placeholder="Enter text here...",
            label="Input Text"
        ),
        gr.Dropdown(
            choices=list(model_options.keys()),
            value="GoalZero/aidetection-ada-v0.2",
            label="Select Model Version"
        )
    ],
    outputs=gr.JSON(label="Results"),
    title="GoalZero Ada AI Detection",
    description="Enter text to get the probability of it being AI-written. Select a model version to use.",
    examples=[
        ["Waymo is an American autonomous driving technology company that originated as the Google Self-Driving Car Project in 2009. It is now a subsidiary of Alphabet Inc., headquartered in Mountain View, California. The name \"Waymo\" was adopted in December 2016 when the project was rebranded and spun out of Google to focus on developing fully autonomous vehicles aimed at improving transportation safety and convenience", "GoalZero/babbage-mini-v0.1"],
        ["WWII demonstrated the importance of alliances in global conflicts. The Axis and Allied powers were formed as countries sought to protect their interests and expand their influence. This lesson underscores the potential for future global conflicts to involve complex alliances, similar to the Cold War era’s NATO and Warsaw Pact alignments.", "GoalZero/aidetection-ada-v0.2"],
        ["Eustace was a thorough gentleman. There was candor in his quack, and affability in his waddle; and underneath his snowy down beat a pure and sympathetic heart. In short, he was a most exemplary duck.", "GoalZero/aidetection-ada-v0.1"],
        ["This is an example of AI-written text using the DeBERTa model for testing purposes.", "GoalZero/ada-2534"]
    ]
)

# Launch the app
if __name__ == "__main__":
    iface.launch(share=True)