ariG23498 HF Staff commited on
Commit
fce10d5
·
verified ·
1 Parent(s): 438653f

Upload Menlo_Lucy-128k_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. Menlo_Lucy-128k_1.py +41 -0
Menlo_Lucy-128k_1.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "transformers",
5
+ # "torch",
6
+ # ]
7
+ # ///
8
+
9
+ try:
10
+ # Load model directly
11
+ from transformers import AutoTokenizer, AutoModelForCausalLM
12
+
13
+ tokenizer = AutoTokenizer.from_pretrained("Menlo/Lucy-128k")
14
+ model = AutoModelForCausalLM.from_pretrained("Menlo/Lucy-128k")
15
+ messages = [
16
+ {"role": "user", "content": "Who are you?"},
17
+ ]
18
+ inputs = tokenizer.apply_chat_template(
19
+ messages,
20
+ add_generation_prompt=True,
21
+ tokenize=True,
22
+ return_dict=True,
23
+ return_tensors="pt",
24
+ ).to(model.device)
25
+
26
+ outputs = model.generate(**inputs, max_new_tokens=40)
27
+ print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
28
+ with open('Menlo_Lucy-128k_1.txt', 'w') as f:
29
+ f.write('Everything was good in Menlo_Lucy-128k_1.txt')
30
+ except Exception as e:
31
+ with open('Menlo_Lucy-128k_1.txt', 'w') as f:
32
+ import traceback
33
+ traceback.print_exc(file=f)
34
+ finally:
35
+ from huggingface_hub import upload_file
36
+ upload_file(
37
+ path_or_fileobj='Menlo_Lucy-128k_1.txt',
38
+ repo_id='model-metadata/custom_code_execution_files',
39
+ path_in_repo='Menlo_Lucy-128k_1.txt',
40
+ repo_type='dataset',
41
+ )