from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained('quidangz/LLamaRE-8B-Instruct-ZeroShot')
model = AutoModelForCausalLM.from_pretrained(
    'quidangz/LLamaRE-8B-Instruct-ZeroShot',
    torch_dtype="auto",
    device_map="cuda",
)

if tokenizer.pad_token is None:
    tokenizer.pad_token = tokenizer.eos_token
    model.config.pad_token_id = model.config.eos_token_id

user_prompt = """
  Extract relationships between entities in text **strictly using ONLY the provided Relationship List** below and **MUST** strictly adhere to the output format.
  Format each relationship as '<relation_type>: <head_entity>, <tail_entity>' and separated multiple relationship by '|'. Return 'None' if no relationships are identified.
  Relationship List: {re_labels}
  Text: {text}
"""

query = 'An art exhibit at the Hakawati Theatre in Arab east Jerusalem was a series of portraits of Palestinians killed in the rebellion.'
re_labels = ["Organization based in", "Located in", "Live in", "Work for", "Kill"]

user_prompt = user_prompt.format(re_labels=re_labels, text=query)

messages = [
  {
      "role": "system",
      "content": "You are an expert in Relation Extraction (RE) task."
  },
  {
      "role": "user",
      "content": user_prompt
  }
]

text = tokenizer.apply_chat_template(
        messages,
        tokenize=False,
        add_generation_prompt=True
    )
    
model_inputs = tokenizer(text, return_tensors="pt").to(model.device)

generated_ids = model.generate(
    **model_inputs,
    max_new_tokens=512,
)

generated_ids = [
    output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
]

response = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]

print(response) # Organization based in: Hakawati Theatre, Jerusalem

Contact

Email: [email protected]

LinkedIn: Qui Dang

Facebook: ฤแบทng Bรก Qรบi

Citation

Please cite as

@misc{LlamaRE-8B-Instruct-ZeroShot,
  title={LlamaRE: An Large Language Model for Relation Extraction},
  author={Qui Dang Ba},
  year={2025},
  publisher={Huggingface},
}
Downloads last month
1
Safetensors
Model size
8B params
Tensor type
BF16
ยท
Inference Providers NEW
This model isn't deployed by any Inference Provider. ๐Ÿ™‹ Ask for provider support

Model tree for quidangz/LLamaRE-8B-Instruct-ZeroShot