Dataset Viewer
model_id
string | vram
float64 | scripts
list | code_urls
list | execution_urls
list |
---|---|---|---|---|
apple/DiffuCoder-7B-cpGRPO | 18.44 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('DiffuCoder-7B-cpGRPO_0.txt', 'w') as f:\n f.write('Everything was good in DiffuCoder-7B-cpGRPO_0')\nexcept Exception as e:\n with open('DiffuCoder-7B-cpGRPO_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DiffuCoder-7B-cpGRPO_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DiffuCoder-7B-cpGRPO_0.txt',\n repo_type='dataset',\n )"
] | [
"DO NOT EXECUTE"
] | [
"WAS NOT EXECUTED"
] |
tngtech/DeepSeek-TNG-R1T2-Chimera | 3,315.1 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"tngtech/DeepSeek-TNG-R1T2-Chimera\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('DeepSeek-TNG-R1T2-Chimera_0.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-TNG-R1T2-Chimera_0')\nexcept Exception as e:\n with open('DeepSeek-TNG-R1T2-Chimera_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-TNG-R1T2-Chimera_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-TNG-R1T2-Chimera_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"tngtech/DeepSeek-TNG-R1T2-Chimera\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"tngtech/DeepSeek-TNG-R1T2-Chimera\", trust_remote_code=True)\n with open('DeepSeek-TNG-R1T2-Chimera_1.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-TNG-R1T2-Chimera_1')\nexcept Exception as e:\n with open('DeepSeek-TNG-R1T2-Chimera_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-TNG-R1T2-Chimera_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-TNG-R1T2-Chimera_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-TNG-R1T2-Chimera_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-TNG-R1T2-Chimera_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-TNG-R1T2-Chimera_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-TNG-R1T2-Chimera_1.txt"
] |
tencent/Hunyuan-A13B-Instruct | 389.33 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"tencent/Hunyuan-A13B-Instruct\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('Hunyuan-A13B-Instruct_0.txt', 'w') as f:\n f.write('Everything was good in Hunyuan-A13B-Instruct_0')\nexcept Exception as e:\n with open('Hunyuan-A13B-Instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Hunyuan-A13B-Instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Hunyuan-A13B-Instruct_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"tencent/Hunyuan-A13B-Instruct\", trust_remote_code=True)\n with open('Hunyuan-A13B-Instruct_1.txt', 'w') as f:\n f.write('Everything was good in Hunyuan-A13B-Instruct_1')\nexcept Exception as e:\n with open('Hunyuan-A13B-Instruct_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Hunyuan-A13B-Instruct_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Hunyuan-A13B-Instruct_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Hunyuan-A13B-Instruct_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Hunyuan-A13B-Instruct_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Hunyuan-A13B-Instruct_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Hunyuan-A13B-Instruct_1.txt"
] |
AIDC-AI/Ovis-U1-3B | 8.82 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"AIDC-AI/Ovis-U1-3B\", trust_remote_code=True)\n with open('Ovis-U1-3B_0.txt', 'w') as f:\n f.write('Everything was good in Ovis-U1-3B_0')\nexcept Exception as e:\n with open('Ovis-U1-3B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Ovis-U1-3B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Ovis-U1-3B_0.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Ovis-U1-3B_0.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Ovis-U1-3B_0.txt"
] |
baidu/ERNIE-4.5-21B-A3B-PT | 53.15 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"baidu/ERNIE-4.5-21B-A3B-PT\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('ERNIE-4.5-21B-A3B-PT_0.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-21B-A3B-PT_0')\nexcept Exception as e:\n with open('ERNIE-4.5-21B-A3B-PT_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-21B-A3B-PT_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-21B-A3B-PT_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"baidu/ERNIE-4.5-21B-A3B-PT\", trust_remote_code=True)\n with open('ERNIE-4.5-21B-A3B-PT_1.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-21B-A3B-PT_1')\nexcept Exception as e:\n with open('ERNIE-4.5-21B-A3B-PT_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-21B-A3B-PT_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-21B-A3B-PT_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-21B-A3B-PT_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-21B-A3B-PT_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-21B-A3B-PT_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-21B-A3B-PT_1.txt"
] |
jinaai/jina-embeddings-v4 | 9.09 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"jinaai/jina-embeddings-v4\", trust_remote_code=True)\n with open('jina-embeddings-v4_0.txt', 'w') as f:\n f.write('Everything was good in jina-embeddings-v4_0')\nexcept Exception as e:\n with open('jina-embeddings-v4_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='jina-embeddings-v4_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='jina-embeddings-v4_0.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/jina-embeddings-v4_0.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/jina-embeddings-v4_0.txt"
] |
moelanoby/phi-3-M3-coder | 9.26 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"moelanoby/phi-3-M3-coder\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('phi-3-M3-coder_0.txt', 'w') as f:\n f.write('Everything was good in phi-3-M3-coder_0')\nexcept Exception as e:\n with open('phi-3-M3-coder_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='phi-3-M3-coder_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='phi-3-M3-coder_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"moelanoby/phi-3-M3-coder\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"moelanoby/phi-3-M3-coder\", trust_remote_code=True)\n with open('phi-3-M3-coder_1.txt', 'w') as f:\n f.write('Everything was good in phi-3-M3-coder_1')\nexcept Exception as e:\n with open('phi-3-M3-coder_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='phi-3-M3-coder_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='phi-3-M3-coder_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/phi-3-M3-coder_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/phi-3-M3-coder_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/phi-3-M3-coder_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/phi-3-M3-coder_1.txt"
] |
apple/DiffuCoder-7B-Instruct | 18.44 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # ⚠️ Type of model/library unknown.\n \n # Feel free to open a Pull request \n # for integration of the huggingface model hub\n # into the corresponding library =)\n with open('DiffuCoder-7B-Instruct_0.txt', 'w') as f:\n f.write('Everything was good in DiffuCoder-7B-Instruct_0')\nexcept Exception as e:\n with open('DiffuCoder-7B-Instruct_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DiffuCoder-7B-Instruct_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DiffuCoder-7B-Instruct_0.txt',\n repo_type='dataset',\n )"
] | [
"DO NOT EXECUTE"
] | [
"WAS NOT EXECUTED"
] |
deepseek-ai/DeepSeek-R1-0528 | 1,657.55 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('DeepSeek-R1-0528_0.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-R1-0528_0')\nexcept Exception as e:\n with open('DeepSeek-R1-0528_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-R1-0528_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-R1-0528_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-R1-0528\", trust_remote_code=True)\n with open('DeepSeek-R1-0528_1.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-R1-0528_1')\nexcept Exception as e:\n with open('DeepSeek-R1-0528_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-R1-0528_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-R1-0528_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-R1-0528_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-R1-0528_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-R1-0528_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-R1-0528_1.txt"
] |
MiniMaxAI/MiniMax-M1-80k | 1,104.39 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"MiniMaxAI/MiniMax-M1-80k\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('MiniMax-M1-80k_0.txt', 'w') as f:\n f.write('Everything was good in MiniMax-M1-80k_0')\nexcept Exception as e:\n with open('MiniMax-M1-80k_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='MiniMax-M1-80k_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='MiniMax-M1-80k_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"MiniMaxAI/MiniMax-M1-80k\", trust_remote_code=True)\n with open('MiniMax-M1-80k_1.txt', 'w') as f:\n f.write('Everything was good in MiniMax-M1-80k_1')\nexcept Exception as e:\n with open('MiniMax-M1-80k_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='MiniMax-M1-80k_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='MiniMax-M1-80k_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/MiniMax-M1-80k_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/MiniMax-M1-80k_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/MiniMax-M1-80k_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/MiniMax-M1-80k_1.txt"
] |
baidu/ERNIE-4.5-0.3B-PT | 0 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"baidu/ERNIE-4.5-0.3B-PT\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('ERNIE-4.5-0.3B-PT_0.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-0.3B-PT_0')\nexcept Exception as e:\n with open('ERNIE-4.5-0.3B-PT_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-0.3B-PT_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-0.3B-PT_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"baidu/ERNIE-4.5-0.3B-PT\", trust_remote_code=True)\n with open('ERNIE-4.5-0.3B-PT_1.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-0.3B-PT_1')\nexcept Exception as e:\n with open('ERNIE-4.5-0.3B-PT_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-0.3B-PT_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-0.3B-PT_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-0.3B-PT_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-0.3B-PT_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-0.3B-PT_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-0.3B-PT_1.txt"
] |
Kwai-Keye/Keye-VL-8B-Preview | 21.06 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"Kwai-Keye/Keye-VL-8B-Preview\", trust_remote_code=True)\n with open('Keye-VL-8B-Preview_0.txt', 'w') as f:\n f.write('Everything was good in Keye-VL-8B-Preview_0')\nexcept Exception as e:\n with open('Keye-VL-8B-Preview_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Keye-VL-8B-Preview_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Keye-VL-8B-Preview_0.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Keye-VL-8B-Preview_0.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Keye-VL-8B-Preview_0.txt"
] |
IntervitensInc/pangu-pro-moe-model | 174.32 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"IntervitensInc/pangu-pro-moe-model\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('pangu-pro-moe-model_0.txt', 'w') as f:\n f.write('Everything was good in pangu-pro-moe-model_0')\nexcept Exception as e:\n with open('pangu-pro-moe-model_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='pangu-pro-moe-model_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='pangu-pro-moe-model_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModelForCausalLM\n model = AutoModelForCausalLM.from_pretrained(\"IntervitensInc/pangu-pro-moe-model\", trust_remote_code=True)\n with open('pangu-pro-moe-model_1.txt', 'w') as f:\n f.write('Everything was good in pangu-pro-moe-model_1')\nexcept Exception as e:\n with open('pangu-pro-moe-model_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='pangu-pro-moe-model_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='pangu-pro-moe-model_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/pangu-pro-moe-model_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/pangu-pro-moe-model_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/pangu-pro-moe-model_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/pangu-pro-moe-model_1.txt"
] |
Skywork/Skywork-R1V3-38B | 92.95 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"visual-question-answering\", model=\"Skywork/Skywork-R1V3-38B\", trust_remote_code=True)\n with open('Skywork-R1V3-38B_0.txt', 'w') as f:\n f.write('Everything was good in Skywork-R1V3-38B_0')\nexcept Exception as e:\n with open('Skywork-R1V3-38B_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Skywork-R1V3-38B_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Skywork-R1V3-38B_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"Skywork/Skywork-R1V3-38B\", trust_remote_code=True)\n with open('Skywork-R1V3-38B_1.txt', 'w') as f:\n f.write('Everything was good in Skywork-R1V3-38B_1')\nexcept Exception as e:\n with open('Skywork-R1V3-38B_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='Skywork-R1V3-38B_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='Skywork-R1V3-38B_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Skywork-R1V3-38B_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/Skywork-R1V3-38B_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Skywork-R1V3-38B_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/Skywork-R1V3-38B_1.txt"
] |
deepseek-ai/DeepSeek-R1 | 1,657.55 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"text-generation\", model=\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n messages = [\n {\"role\": \"user\", \"content\": \"Who are you?\"},\n ]\n pipe(messages)\n with open('DeepSeek-R1_0.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-R1_0')\nexcept Exception as e:\n with open('DeepSeek-R1_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-R1_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-R1_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoTokenizer, AutoModelForCausalLM\n \n tokenizer = AutoTokenizer.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n model = AutoModelForCausalLM.from_pretrained(\"deepseek-ai/DeepSeek-R1\", trust_remote_code=True)\n with open('DeepSeek-R1_1.txt', 'w') as f:\n f.write('Everything was good in DeepSeek-R1_1')\nexcept Exception as e:\n with open('DeepSeek-R1_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='DeepSeek-R1_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='DeepSeek-R1_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-R1_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/DeepSeek-R1_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-R1_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/DeepSeek-R1_1.txt"
] |
baidu/ERNIE-4.5-VL-424B-A47B-Base-PT | 1,025.54 | [
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Use a pipeline as a high-level helper\n from transformers import pipeline\n \n pipe = pipeline(\"image-text-to-text\", model=\"baidu/ERNIE-4.5-VL-424B-A47B-Base-PT\", trust_remote_code=True)\n messages = [\n {\n \"role\": \"user\",\n \"content\": [\n {\"type\": \"image\", \"url\": \"https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG\"},\n {\"type\": \"text\", \"text\": \"What animal is on the candy?\"}\n ]\n },\n ]\n pipe(text=messages)\n with open('ERNIE-4.5-VL-424B-A47B-Base-PT_0.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-VL-424B-A47B-Base-PT_0')\nexcept Exception as e:\n with open('ERNIE-4.5-VL-424B-A47B-Base-PT_0.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-VL-424B-A47B-Base-PT_0.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-VL-424B-A47B-Base-PT_0.txt',\n repo_type='dataset',\n )",
"# /// script\n# requires-python = \">=3.12\"\n# dependencies = [\n# \"transformers\",\n# \"torch\",\n# ]\n# ///\n\ntry:\n # Load model directly\n from transformers import AutoModel\n model = AutoModel.from_pretrained(\"baidu/ERNIE-4.5-VL-424B-A47B-Base-PT\", trust_remote_code=True)\n with open('ERNIE-4.5-VL-424B-A47B-Base-PT_1.txt', 'w') as f:\n f.write('Everything was good in ERNIE-4.5-VL-424B-A47B-Base-PT_1')\nexcept Exception as e:\n with open('ERNIE-4.5-VL-424B-A47B-Base-PT_1.txt', 'w') as f:\n import traceback\n traceback.print_exc(file=f)\nfinally:\n from huggingface_hub import upload_file\n upload_file(\n path_or_fileobj='ERNIE-4.5-VL-424B-A47B-Base-PT_1.txt',\n repo_id='model-metadata/custom_code_execution_files',\n path_in_repo='ERNIE-4.5-VL-424B-A47B-Base-PT_1.txt',\n repo_type='dataset',\n )"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-VL-424B-A47B-Base-PT_0.py",
"https://huggingface.co/datasets/model-metadata/custom_code_py_files/raw/main/ERNIE-4.5-VL-424B-A47B-Base-PT_1.py"
] | [
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-VL-424B-A47B-Base-PT_0.txt",
"https://huggingface.co/datasets/model-metadata/custom_code_execution_files/raw/main/ERNIE-4.5-VL-424B-A47B-Base-PT_1.txt"
] |
README.md exists but content is empty.
- Downloads last month
- 0