Dataset Viewer
Auto-converted to Parquet
model_id
stringclasses
10 values
vram
float64
0
1.66k
scripts
listlengths
0
2
code_urls
listlengths
0
2
execution_urls
listlengths
0
2
moonshotai/Kimi-K2-Instruct
0
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"moonshotai/Kimi-K2-Instruct\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('moonshotai_Kimi-K2-Instruct_0.txt', 'w') as f:\n f.write('Everything was good in moonshotai_Kimi-K2-Instruct_0.txt')\nexcept Exception as e:\n with open('moonshotai_Kimi-K2-Instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moonshotai_Kimi-K2-Instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moonshotai_Kimi-K2-Instruct_0.txt',\n repo_type='dataset',\n )", "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"moonshotai/Kimi-K2-Instruct\", trust_remote_code=True, torch_dtype=\"auto\"),\n with open('moonshotai_Kimi-K2-Instruct_1.txt', 'w') as f:\n f.write('Everything was good in moonshotai_Kimi-K2-Instruct_1.txt')\nexcept Exception as e:\n with open('moonshotai_Kimi-K2-Instruct_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moonshotai_Kimi-K2-Instruct_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moonshotai_Kimi-K2-Instruct_1.txt',\n repo_type='dataset',\n )" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moonshotai_Kimi-K2-Instruct_0.py", "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moonshotai_Kimi-K2-Instruct_1.py" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moonshotai_Kimi-K2-Instruct_0.txt", "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moonshotai_Kimi-K2-Instruct_1.txt" ]
internlm/Intern-S1
582.86
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('internlm_Intern-S1_0.txt', 'w') as f:\n f.write('Everything was good in internlm_Intern-S1_0.txt')\nexcept Exception as e:\n with open('internlm_Intern-S1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='internlm_Intern-S1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='internlm_Intern-S1_0.txt',\n repo_type='dataset',\n )" ]
[ "DO NOT EXECUTE" ]
[ "WAS NOT EXECUTED" ]
nvidia/Llama-3_3-Nemotron-Super-49B-v1_5
120.75
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"nvidia/Llama-3_3-Nemotron-Super-49B-v1_5\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt', 'w') as f:\n f.write('Everything was good in nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt')\nexcept Exception as e:\n with open('nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt',\n repo_type='dataset',\n )", "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"nvidia/Llama-3_3-Nemotron-Super-49B-v1_5\", trust_remote_code=True, torch_dtype=\"auto\"),\n with open('nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt', 'w') as f:\n f.write('Everything was good in nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt')\nexcept Exception as e:\n with open('nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt',\n repo_type='dataset',\n )" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.py", "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.py" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_0.txt", "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/nvidia_Llama-3_3-Nemotron-Super-49B-v1_5_1.txt" ]
naver-hyperclovax/HyperCLOVAX-SEED-Think-14B
71.42
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"naver-hyperclovax/HyperCLOVAX-SEED-Think-14B\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt', 'w') as f:\n f.write('Everything was good in naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt')\nexcept Exception as e:\n with open('naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt',\n repo_type='dataset',\n )", "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"naver-hyperclovax/HyperCLOVAX-SEED-Think-14B\", trust_remote_code=True, torch_dtype=\"auto\"),\n with open('naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt', 'w') as f:\n f.write('Everything was good in naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt')\nexcept Exception as e:\n with open('naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt',\n repo_type='dataset',\n )" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.py", "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.py" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_0.txt", "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/naver-hyperclovax_HyperCLOVAX-SEED-Think-14B_1.txt" ]
inclusionAI/Ming-Lite-Omni-1.5
45.74
[]
[]
[]
kakaocorp/kanana-1.5-v-3b-instruct
8.88
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('kakaocorp_kanana-1.5-v-3b-instruct_0.txt', 'w') as f:\n f.write('Everything was good in kakaocorp_kanana-1.5-v-3b-instruct_0.txt')\nexcept Exception as e:\n with open('kakaocorp_kanana-1.5-v-3b-instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='kakaocorp_kanana-1.5-v-3b-instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='kakaocorp_kanana-1.5-v-3b-instruct_0.txt',\n repo_type='dataset',\n )" ]
[ "DO NOT EXECUTE" ]
[ "WAS NOT EXECUTED" ]
internlm/Intern-S1-FP8
1,165.73
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('internlm_Intern-S1-FP8_0.txt', 'w') as f:\n f.write('Everything was good in internlm_Intern-S1-FP8_0.txt')\nexcept Exception as e:\n with open('internlm_Intern-S1-FP8_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='internlm_Intern-S1-FP8_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='internlm_Intern-S1-FP8_0.txt',\n repo_type='dataset',\n )" ]
[ "DO NOT EXECUTE" ]
[ "WAS NOT EXECUTED" ]
deepseek-ai/DeepSeek-R1
1,657.55
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1_0.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1_0.txt',\n repo_type='dataset',\n )", "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n inputs = tokenizer.apply_chat_template(\n \tmessages,\n \tadd_generation_prompt=True,\n \ttokenize=True,\n \treturn_dict=True,\n \treturn_tensors=\"pt\",\n ).to(model.device)\n \n outputs = model.generate(**inputs, max_new_tokens=40)\n print(tokenizer.decode(outputs[0][inputs[\"input_ids\"].shape[-1]:]))\n with open('deepseek-ai_DeepSeek-R1_1.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1_1.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1_1.txt',\n repo_type='dataset',\n )" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1_0.py", "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1_1.py" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1_0.txt", "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1_1.txt" ]
PowerInfer/SmallThinker-4BA0.6B-Instruct
10.34
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('PowerInfer_SmallThinker-4BA0.6B-Instruct_0.txt', 'w') as f:\n f.write('Everything was good in PowerInfer_SmallThinker-4BA0.6B-Instruct_0.txt')\nexcept Exception as e:\n with open('PowerInfer_SmallThinker-4BA0.6B-Instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='PowerInfer_SmallThinker-4BA0.6B-Instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='PowerInfer_SmallThinker-4BA0.6B-Instruct_0.txt',\n repo_type='dataset',\n )" ]
[ "DO NOT EXECUTE" ]
[ "WAS NOT EXECUTED" ]
deepseek-ai/DeepSeek-R1-0528
1,657.55
[ "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('deepseek-ai_DeepSeek-R1-0528_0.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1-0528_0.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1-0528_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1-0528_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1-0528_0.txt',\n repo_type='dataset',\n )", "# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n inputs = tokenizer.apply_chat_template(\n \tmessages,\n \tadd_generation_prompt=True,\n \ttokenize=True,\n \treturn_dict=True,\n \treturn_tensors=\"pt\",\n ).to(model.device)\n \n outputs = model.generate(**inputs, max_new_tokens=40)\n print(tokenizer.decode(outputs[0][inputs[\"input_ids\"].shape[-1]:]))\n with open('deepseek-ai_DeepSeek-R1-0528_1.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1-0528_1.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1-0528_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1-0528_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1-0528_1.txt',\n repo_type='dataset',\n )" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1-0528_0.py", "https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1-0528_1.py" ]
[ "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1-0528_0.txt", "https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1-0528_1.txt" ]
README.md exists but content is empty.
Downloads last month
236

Collection including model-metadata/model_vram_code