metadata
license: apache-2.0
NeuroBLAST 1.9B Instruct Early Preview
WIP
Inference code
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
torch.random.manual_seed(0)
model = AutoModelForCausalLM.from_pretrained(
"meditsolutions/NeuroBLAST-1.9B-Instruct-Early-Preview",
device_map="cuda",
torch_dtype=torch.bfloat16,
trust_remote_code=True,
)
tokenizer = AutoTokenizer.from_pretrained("meditsolutions/NeuroBLAST-1.9B-Instruct-Early-Preview")
messages = [
{"role": "user", "content": "What is enalapril?"},
]
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
)
generation_args = {
"max_new_tokens": 500,
"return_full_text": False,
"temperature": 0.0,
"do_sample": False,
}
output = pipe(messages, **generation_args)
print(output[0]['generated_text'])