Spaces:
Running
Running
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline | |
from peft import PeftModel | |
# --- Load base model + your LoRA adapter --- | |
BASE_MODEL = "EleutherAI/gpt-neo-125M" | |
ADAPTER_MODEL = "khaliqabdull/humanizer3.0-lora" | |
# Load tokenizer | |
tokenizer = AutoTokenizer.from_pretrained(ADAPTER_MODEL) | |
if tokenizer.pad_token is None: | |
tokenizer.pad_token = tokenizer.eos_token | |
# Load base model | |
model = AutoModelForCausalLM.from_pretrained( | |
BASE_MODEL, | |
device_map="auto", | |
load_in_8bit=True | |
) | |
# Attach LoRA adapter | |
model = PeftModel.from_pretrained(model, ADAPTER_MODEL) | |
# Create pipeline | |
pipe = pipeline( | |
"text-generation", | |
model=model, | |
tokenizer=tokenizer | |
) | |
# --- Humanizer function --- | |
def humanize_text(text): | |
prompt = f"Input:\n{text}\n\nHuman-like rewrite:\n" | |
result = pipe( | |
prompt, | |
max_new_tokens=120, | |
do_sample=True, | |
temperature=0.7, | |
top_p=0.9 | |
) | |
return result[0]["generated_text"] | |
# --- Gradio UI --- | |
iface = gr.Interface( | |
fn=humanize_text, | |
inputs=gr.Textbox(lines=6, placeholder="Paste AI-like text here..."), | |
outputs="text", | |
title="🤖 Humanizer 3.0", | |
description="Enter AI-like text and get a human-like rewrite." | |
) | |
if __name__ == "__main__": | |
iface.launch() | |