Important
Collection
4 items
•
Updated
•
1
model_id
string | vram
float64 | scripts
list | code_urls
list | execution_urls
list |
---|---|---|---|---|
moondream/moondream3-preview
| 22.44 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"moondream/moondream3-preview\", trust_remote_code=True)\n with open('moondream_moondream3-preview_0.txt', 'w') as f:\n f.write('Everything was good in moondream_moondream3-preview_0.txt')\nexcept Exception as e:\n with open('moondream_moondream3-preview_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moondream_moondream3-preview_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moondream_moondream3-preview_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"moondream/moondream3-preview\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('moondream_moondream3-preview_1.txt', 'w') as f:\n f.write('Everything was good in moondream_moondream3-preview_1.txt')\nexcept Exception as e:\n with open('moondream_moondream3-preview_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moondream_moondream3-preview_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moondream_moondream3-preview_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moondream_moondream3-preview_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moondream_moondream3-preview_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moondream_moondream3-preview_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moondream_moondream3-preview_1.txt"
] |
deepseek-ai/DeepSeek-V3.1-Terminus
| 1,657.55 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-V3.1-Terminus\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('deepseek-ai_DeepSeek-V3.1-Terminus_0.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-V3.1-Terminus_0.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-V3.1-Terminus_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-V3.1-Terminus_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-V3.1-Terminus_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-V3.1-Terminus\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-V3.1-Terminus\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n inputs = tokenizer.apply_chat_template(\n \tmessages,\n \tadd_generation_prompt=True,\n \ttokenize=True,\n \treturn_dict=True,\n \treturn_tensors=\"pt\",\n ).to(model.device)\n \n outputs = model.generate(**inputs, max_new_tokens=40)\n print(tokenizer.decode(outputs[0][inputs[\"input_ids\"].shape[-1]:]))\n with open('deepseek-ai_DeepSeek-V3.1-Terminus_1.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-V3.1-Terminus_1.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-V3.1-Terminus_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-V3.1-Terminus_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-V3.1-Terminus_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-V3.1-Terminus_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-V3.1-Terminus_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-V3.1-Terminus_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-V3.1-Terminus_1.txt"
] |
meituan-longcat/LongCat-Flash-Thinking
| 1,360.52 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # integration status unknown.\n \n # Please clone model and use locally.\n \n # Also feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('meituan-longcat_LongCat-Flash-Thinking_0.txt', 'w') as f:\n f.write('Everything was good in meituan-longcat_LongCat-Flash-Thinking_0.txt')\nexcept Exception as e:\n with open('meituan-longcat_LongCat-Flash-Thinking_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='meituan-longcat_LongCat-Flash-Thinking_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='meituan-longcat_LongCat-Flash-Thinking_0.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/meituan-longcat_LongCat-Flash-Thinking_0.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/meituan-longcat_LongCat-Flash-Thinking_0.txt"
] |
inclusionAI/Ling-flash-2.0
| 249.14 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"inclusionAI/Ling-flash-2.0\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('inclusionAI_Ling-flash-2.0_0.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-flash-2.0_0.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-flash-2.0_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-flash-2.0_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-flash-2.0_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"inclusionAI/Ling-flash-2.0\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('inclusionAI_Ling-flash-2.0_1.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-flash-2.0_1.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-flash-2.0_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-flash-2.0_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-flash-2.0_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-flash-2.0_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-flash-2.0_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-flash-2.0_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-flash-2.0_1.txt"
] |
PerceptronAI/Isaac-0.1
| 12.43 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"PerceptronAI/Isaac-0.1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('PerceptronAI_Isaac-0.1_0.txt', 'w') as f:\n f.write('Everything was good in PerceptronAI_Isaac-0.1_0.txt')\nexcept Exception as e:\n with open('PerceptronAI_Isaac-0.1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='PerceptronAI_Isaac-0.1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='PerceptronAI_Isaac-0.1_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"PerceptronAI/Isaac-0.1\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('PerceptronAI_Isaac-0.1_1.txt', 'w') as f:\n f.write('Everything was good in PerceptronAI_Isaac-0.1_1.txt')\nexcept Exception as e:\n with open('PerceptronAI_Isaac-0.1_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='PerceptronAI_Isaac-0.1_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='PerceptronAI_Isaac-0.1_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/PerceptronAI_Isaac-0.1_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/PerceptronAI_Isaac-0.1_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/PerceptronAI_Isaac-0.1_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/PerceptronAI_Isaac-0.1_1.txt"
] |
inclusionAI/Ring-flash-2.0
| 249.14 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"inclusionAI/Ring-flash-2.0\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('inclusionAI_Ring-flash-2.0_0.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ring-flash-2.0_0.txt')\nexcept Exception as e:\n with open('inclusionAI_Ring-flash-2.0_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ring-flash-2.0_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ring-flash-2.0_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"inclusionAI/Ring-flash-2.0\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('inclusionAI_Ring-flash-2.0_1.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ring-flash-2.0_1.txt')\nexcept Exception as e:\n with open('inclusionAI_Ring-flash-2.0_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ring-flash-2.0_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ring-flash-2.0_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ring-flash-2.0_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ring-flash-2.0_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ring-flash-2.0_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ring-flash-2.0_1.txt"
] |
rednote-hilab/dots.ocr
| 7.36 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # integration status unknown.\n \n # Please clone model and use locally.\n \n # Also feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('rednote-hilab_dots.ocr_0.txt', 'w') as f:\n f.write('Everything was good in rednote-hilab_dots.ocr_0.txt')\nexcept Exception as e:\n with open('rednote-hilab_dots.ocr_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='rednote-hilab_dots.ocr_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='rednote-hilab_dots.ocr_0.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/rednote-hilab_dots.ocr_0.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/rednote-hilab_dots.ocr_0.txt"
] |
InternRobotics/VLAC
| 5.34 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"InternRobotics/VLAC\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('InternRobotics_VLAC_0.txt', 'w') as f:\n f.write('Everything was good in InternRobotics_VLAC_0.txt')\nexcept Exception as e:\n with open('InternRobotics_VLAC_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='InternRobotics_VLAC_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='InternRobotics_VLAC_0.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/InternRobotics_VLAC_0.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/InternRobotics_VLAC_0.txt"
] |
inclusionAI/Ring-mini-2.0
| 39.36 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"inclusionAI/Ring-mini-2.0\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('inclusionAI_Ring-mini-2.0_0.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ring-mini-2.0_0.txt')\nexcept Exception as e:\n with open('inclusionAI_Ring-mini-2.0_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ring-mini-2.0_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ring-mini-2.0_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"inclusionAI/Ring-mini-2.0\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('inclusionAI_Ring-mini-2.0_1.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ring-mini-2.0_1.txt')\nexcept Exception as e:\n with open('inclusionAI_Ring-mini-2.0_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ring-mini-2.0_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ring-mini-2.0_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ring-mini-2.0_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ring-mini-2.0_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ring-mini-2.0_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ring-mini-2.0_1.txt"
] |
baidu/Qianfan-VL-70B
| 173.95 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"baidu/Qianfan-VL-70B\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('baidu_Qianfan-VL-70B_0.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-70B_0.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-70B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-70B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-70B_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"baidu/Qianfan-VL-70B\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('baidu_Qianfan-VL-70B_1.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-70B_1.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-70B_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-70B_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-70B_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-70B_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-70B_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-70B_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-70B_1.txt"
] |
openbmb/MiniCPM-V-4_5
| 21.06 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"openbmb/MiniCPM-V-4_5\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('openbmb_MiniCPM-V-4_5_0.txt', 'w') as f:\n f.write('Everything was good in openbmb_MiniCPM-V-4_5_0.txt')\nexcept Exception as e:\n with open('openbmb_MiniCPM-V-4_5_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='openbmb_MiniCPM-V-4_5_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='openbmb_MiniCPM-V-4_5_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"openbmb/MiniCPM-V-4_5\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('openbmb_MiniCPM-V-4_5_1.txt', 'w') as f:\n f.write('Everything was good in openbmb_MiniCPM-V-4_5_1.txt')\nexcept Exception as e:\n with open('openbmb_MiniCPM-V-4_5_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='openbmb_MiniCPM-V-4_5_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='openbmb_MiniCPM-V-4_5_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/openbmb_MiniCPM-V-4_5_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/openbmb_MiniCPM-V-4_5_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/openbmb_MiniCPM-V-4_5_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/openbmb_MiniCPM-V-4_5_1.txt"
] |
baidu/Qianfan-VL-8B
| 21.33 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"baidu/Qianfan-VL-8B\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('baidu_Qianfan-VL-8B_0.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-8B_0.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-8B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-8B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-8B_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"baidu/Qianfan-VL-8B\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('baidu_Qianfan-VL-8B_1.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-8B_1.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-8B_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-8B_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-8B_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-8B_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-8B_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-8B_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-8B_1.txt"
] |
inclusionAI/Ling-mini-2.0
| 39.36 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"inclusionAI/Ling-mini-2.0\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('inclusionAI_Ling-mini-2.0_0.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-mini-2.0_0.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-mini-2.0_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-mini-2.0_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-mini-2.0_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"inclusionAI/Ling-mini-2.0\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('inclusionAI_Ling-mini-2.0_1.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-mini-2.0_1.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-mini-2.0_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-mini-2.0_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-mini-2.0_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-mini-2.0_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-mini-2.0_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-mini-2.0_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-mini-2.0_1.txt"
] |
baidu/Qianfan-VL-3B
| 8.99 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"baidu/Qianfan-VL-3B\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('baidu_Qianfan-VL-3B_0.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-3B_0.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-3B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-3B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-3B_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"baidu/Qianfan-VL-3B\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('baidu_Qianfan-VL-3B_1.txt', 'w') as f:\n f.write('Everything was good in baidu_Qianfan-VL-3B_1.txt')\nexcept Exception as e:\n with open('baidu_Qianfan-VL-3B_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='baidu_Qianfan-VL-3B_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='baidu_Qianfan-VL-3B_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-3B_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/baidu_Qianfan-VL-3B_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-3B_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/baidu_Qianfan-VL-3B_1.txt"
] |
tencent/POINTS-Reader
| 9.95 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"tencent/POINTS-Reader\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('tencent_POINTS-Reader_0.txt', 'w') as f:\n f.write('Everything was good in tencent_POINTS-Reader_0.txt')\nexcept Exception as e:\n with open('tencent_POINTS-Reader_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='tencent_POINTS-Reader_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='tencent_POINTS-Reader_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"tencent/POINTS-Reader\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('tencent_POINTS-Reader_1.txt', 'w') as f:\n f.write('Everything was good in tencent_POINTS-Reader_1.txt')\nexcept Exception as e:\n with open('tencent_POINTS-Reader_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='tencent_POINTS-Reader_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='tencent_POINTS-Reader_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/tencent_POINTS-Reader_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/tencent_POINTS-Reader_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/tencent_POINTS-Reader_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/tencent_POINTS-Reader_1.txt"
] |
deepseek-ai/DeepSeek-V3.1
| 1,657.55 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-V3.1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('deepseek-ai_DeepSeek-V3.1_0.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-V3.1_0.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-V3.1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-V3.1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-V3.1_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-V3.1\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-V3.1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n inputs = tokenizer.apply_chat_template(\n \tmessages,\n \tadd_generation_prompt=True,\n \ttokenize=True,\n \treturn_dict=True,\n \treturn_tensors=\"pt\",\n ).to(model.device)\n \n outputs = model.generate(**inputs, max_new_tokens=40)\n print(tokenizer.decode(outputs[0][inputs[\"input_ids\"].shape[-1]:]))\n with open('deepseek-ai_DeepSeek-V3.1_1.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-V3.1_1.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-V3.1_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-V3.1_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-V3.1_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-V3.1_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-V3.1_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-V3.1_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-V3.1_1.txt"
] |
moonshotai/Kimi-K2-Instruct-0905
| 0 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"moonshotai/Kimi-K2-Instruct-0905\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('moonshotai_Kimi-K2-Instruct-0905_0.txt', 'w') as f:\n f.write('Everything was good in moonshotai_Kimi-K2-Instruct-0905_0.txt')\nexcept Exception as e:\n with open('moonshotai_Kimi-K2-Instruct-0905_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moonshotai_Kimi-K2-Instruct-0905_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moonshotai_Kimi-K2-Instruct-0905_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"moonshotai/Kimi-K2-Instruct-0905\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('moonshotai_Kimi-K2-Instruct-0905_1.txt', 'w') as f:\n f.write('Everything was good in moonshotai_Kimi-K2-Instruct-0905_1.txt')\nexcept Exception as e:\n with open('moonshotai_Kimi-K2-Instruct-0905_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='moonshotai_Kimi-K2-Instruct-0905_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='moonshotai_Kimi-K2-Instruct-0905_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moonshotai_Kimi-K2-Instruct-0905_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/moonshotai_Kimi-K2-Instruct-0905_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moonshotai_Kimi-K2-Instruct-0905_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/moonshotai_Kimi-K2-Instruct-0905_1.txt"
] |
lmms-lab/LLaVA-OneVision-1.5-8B-Instruct
| 20.65 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('lmms-lab_LLaVA-OneVision-1.5-8B-Instruct_0.txt', 'w') as f:\n f.write('Everything was good in lmms-lab_LLaVA-OneVision-1.5-8B-Instruct_0.txt')\nexcept Exception as e:\n with open('lmms-lab_LLaVA-OneVision-1.5-8B-Instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='lmms-lab_LLaVA-OneVision-1.5-8B-Instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='lmms-lab_LLaVA-OneVision-1.5-8B-Instruct_0.txt',\n repo_type='dataset',\n )"
] |
[
"DO NOT EXECUTE"
] |
[
"WAS NOT EXECUTED"
] |
inclusionAI/Ling-flash-base-2.0
| 257.15 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"inclusionAI/Ling-flash-base-2.0\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('inclusionAI_Ling-flash-base-2.0_0.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-flash-base-2.0_0.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-flash-base-2.0_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-flash-base-2.0_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-flash-base-2.0_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"inclusionAI/Ling-flash-base-2.0\", trust_remote_code=True, torch_dtype=\"auto\")\n with open('inclusionAI_Ling-flash-base-2.0_1.txt', 'w') as f:\n f.write('Everything was good in inclusionAI_Ling-flash-base-2.0_1.txt')\nexcept Exception as e:\n with open('inclusionAI_Ling-flash-base-2.0_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='inclusionAI_Ling-flash-base-2.0_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='inclusionAI_Ling-flash-base-2.0_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-flash-base-2.0_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/inclusionAI_Ling-flash-base-2.0_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-flash-base-2.0_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/inclusionAI_Ling-flash-base-2.0_1.txt"
] |
deepseek-ai/DeepSeek-R1
| 1,657.55 |
[
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1_0.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n inputs = tokenizer.apply_chat_template(\n \tmessages,\n \tadd_generation_prompt=True,\n \ttokenize=True,\n \treturn_dict=True,\n \treturn_tensors=\"pt\",\n ).to(model.device)\n \n outputs = model.generate(**inputs, max_new_tokens=40)\n print(tokenizer.decode(outputs[0][inputs[\"input_ids\"].shape[-1]:]))\n with open('deepseek-ai_DeepSeek-R1_1.txt', 'w') as f:\n f.write('Everything was good in deepseek-ai_DeepSeek-R1_1.txt')\nexcept Exception as e:\n with open('deepseek-ai_DeepSeek-R1_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='deepseek-ai_DeepSeek-R1_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='deepseek-ai_DeepSeek-R1_1.txt',\n repo_type='dataset',\n )"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/deepseek-ai_DeepSeek-R1_1.py"
] |
[
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/deepseek-ai_DeepSeek-R1_1.txt"
] |