Dataset Viewer
suite_config
dict | split
stringclasses 1
value | results
listlengths 1
11
| submission
dict |
---|---|---|---|
{
"name": "asta-bench",
"version": "1.0.0",
"splits": [
{
"name": "validation",
"tasks": [
{
"name": "ArxivDIGESTables_Clean_validation",
"path": "astabench/arxivdigestables_validation",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_validation",
"path": "astabench/sqa_dev",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_validation",
"path": "astabench/litqa2_validation",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "PaperFindingBench_validation",
"path": "astabench/paper_finder_validation",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_validation",
"path": "astabench/paper_finder_litqa2_validation",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_validation",
"path": "astabench/discoverybench_validation",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_validation",
"path": "astabench/core_bench_validation",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_validation",
"path": "astabench/ds1000_validation",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_validation",
"path": "astabench/e2e_discovery_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_validation",
"path": "astabench/e2e_discovery_hard_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_validation",
"path": "astabench/super_validation",
"primary_metric": "entrypoint/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_validation",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_validation",
"weight": 0.5
}
]
},
{
"name": "test",
"tasks": [
{
"name": "PaperFindingBench_test",
"path": "astabench/paper_finder_test",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_test",
"path": "astabench/paper_finder_litqa2_test",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_test",
"path": "astabench/sqa_test",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "ArxivDIGESTables_Clean_test",
"path": "astabench/arxivdigestables_test",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_test",
"path": "astabench/litqa2_test",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_test",
"path": "astabench/discoverybench_test",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_test",
"path": "astabench/core_bench_test",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_test",
"path": "astabench/ds1000_test",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_test",
"path": "astabench/e2e_discovery_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_test",
"path": "astabench/e2e_discovery_hard_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_test",
"path": "astabench/super_test",
"primary_metric": "output_match/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_test",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_test",
"weight": 0.5
}
]
}
]
}
|
validation
|
[
{
"task_name": "PaperFindingBench_validation",
"eval_spec": {
"solver": "astabench/solvers/search/paper_finder.py@ai2i_paper_finder",
"solver_args": "{\"base_url\": \"http://35.247.123.160:8000\"}",
"model": "openai/gpt-4o-mini",
"model_args": "{}",
"task_args": "{\"with_search_tools\": false}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "ca5b0ad"
},
"packages": "{\"inspect_ai\": \"0.3.114\"}"
},
"metrics": [
{
"name": "score_paper_finder/semantic_f1",
"value": 0.3991389077954741
},
{
"name": "score_paper_finder/specific_f1",
"value": 0.2
},
{
"name": "score_paper_finder/metadata_f1",
"value": 0.4869762288477035
},
{
"name": "score_paper_finder/adjusted_f1_micro_avg",
"value": 0.3796132940146119
},
{
"name": "score_paper_finder/stderr",
"value": 0.029428016906952455
}
],
"model_usages": [
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3490,
"output_tokens": 28,
"total_tokens": 3518,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 168134,
"output_tokens": 34180,
"total_tokens": 202314,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3495,
"output_tokens": 36,
"total_tokens": 3531,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 339782,
"output_tokens": 76846,
"total_tokens": 416628,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3486,
"output_tokens": 29,
"total_tokens": 3515,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 560531,
"output_tokens": 94799,
"total_tokens": 655330,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3489,
"output_tokens": 32,
"total_tokens": 3521,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 421496,
"output_tokens": 74287,
"total_tokens": 495783,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3499,
"output_tokens": 48,
"total_tokens": 3547,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 608301,
"output_tokens": 100811,
"total_tokens": 709112,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3499,
"output_tokens": 37,
"total_tokens": 3536,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 715599,
"output_tokens": 118139,
"total_tokens": 833738,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3498,
"output_tokens": 38,
"total_tokens": 3536,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 481360,
"output_tokens": 101699,
"total_tokens": 583059,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3494,
"output_tokens": 34,
"total_tokens": 3528,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 674838,
"output_tokens": 102341,
"total_tokens": 777179,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3504,
"output_tokens": 44,
"total_tokens": 3548,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 443033,
"output_tokens": 87187,
"total_tokens": 530220,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3499,
"output_tokens": 49,
"total_tokens": 3548,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 136628,
"output_tokens": 28219,
"total_tokens": 164847,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3503,
"output_tokens": 35,
"total_tokens": 3538,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 277043,
"output_tokens": 39367,
"total_tokens": 316410,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3501,
"output_tokens": 42,
"total_tokens": 3543,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 823767,
"output_tokens": 122200,
"total_tokens": 945967,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3540,
"output_tokens": 68,
"total_tokens": 3608,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 767663,
"output_tokens": 140144,
"total_tokens": 907807,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3503,
"output_tokens": 33,
"total_tokens": 3536,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 168158,
"output_tokens": 36183,
"total_tokens": 204341,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 29,
"total_tokens": 3517,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 263367,
"output_tokens": 46650,
"total_tokens": 310017,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3490,
"output_tokens": 24,
"total_tokens": 3514,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 253486,
"output_tokens": 55352,
"total_tokens": 308838,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3511,
"output_tokens": 36,
"total_tokens": 3547,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 223203,
"output_tokens": 41457,
"total_tokens": 264660,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 23,
"total_tokens": 3511,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 156487,
"output_tokens": 31058,
"total_tokens": 187545,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3525,
"output_tokens": 69,
"total_tokens": 3594,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 575980,
"output_tokens": 96635,
"total_tokens": 672615,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3497,
"output_tokens": 38,
"total_tokens": 3535,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 751168,
"output_tokens": 133345,
"total_tokens": 884513,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3491,
"output_tokens": 32,
"total_tokens": 3523,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 564152,
"output_tokens": 95719,
"total_tokens": 659871,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3504,
"output_tokens": 48,
"total_tokens": 3552,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 460566,
"output_tokens": 96436,
"total_tokens": 557002,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3500,
"output_tokens": 38,
"total_tokens": 3538,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 608547,
"output_tokens": 50993,
"total_tokens": 659540,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 33,
"total_tokens": 3521,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 552088,
"output_tokens": 75059,
"total_tokens": 627147,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3487,
"output_tokens": 45,
"total_tokens": 3532,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 317688,
"output_tokens": 58623,
"total_tokens": 376311,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3493,
"output_tokens": 36,
"total_tokens": 3529,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 723708,
"output_tokens": 83932,
"total_tokens": 807640,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3505,
"output_tokens": 49,
"total_tokens": 3554,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 417704,
"output_tokens": 88948,
"total_tokens": 506652,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 29,
"total_tokens": 3513,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 731872,
"output_tokens": 116456,
"total_tokens": 848328,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3504,
"output_tokens": 45,
"total_tokens": 3549,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 348601,
"output_tokens": 69961,
"total_tokens": 418562,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3487,
"output_tokens": 28,
"total_tokens": 3515,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 345232,
"output_tokens": 71406,
"total_tokens": 416638,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3481,
"output_tokens": 24,
"total_tokens": 3505,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 122966,
"output_tokens": 20184,
"total_tokens": 143150,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3494,
"output_tokens": 36,
"total_tokens": 3530,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 776706,
"output_tokens": 84228,
"total_tokens": 860934,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 45,
"total_tokens": 3533,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 639421,
"output_tokens": 110616,
"total_tokens": 750037,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3502,
"output_tokens": 45,
"total_tokens": 3547,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 659376,
"output_tokens": 141704,
"total_tokens": 801080,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 25,
"total_tokens": 3509,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 317831,
"output_tokens": 54628,
"total_tokens": 372459,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3492,
"output_tokens": 41,
"total_tokens": 3533,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 315342,
"output_tokens": 43774,
"total_tokens": 359116,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 33,
"total_tokens": 3521,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 644873,
"output_tokens": 112266,
"total_tokens": 757139,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 28,
"total_tokens": 3512,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 521722,
"output_tokens": 74452,
"total_tokens": 596174,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3498,
"output_tokens": 35,
"total_tokens": 3533,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 606261,
"output_tokens": 118499,
"total_tokens": 724760,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3501,
"output_tokens": 40,
"total_tokens": 3541,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 388984,
"output_tokens": 90648,
"total_tokens": 479632,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 25,
"total_tokens": 3509,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 539753,
"output_tokens": 88238,
"total_tokens": 627991,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 29,
"total_tokens": 3517,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 490439,
"output_tokens": 77388,
"total_tokens": 567827,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 28,
"total_tokens": 3512,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 342393,
"output_tokens": 63366,
"total_tokens": 405759,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3523,
"output_tokens": 66,
"total_tokens": 3589,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 509291,
"output_tokens": 99194,
"total_tokens": 608485,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3486,
"output_tokens": 28,
"total_tokens": 3514,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 165938,
"output_tokens": 28813,
"total_tokens": 194751,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3500,
"output_tokens": 41,
"total_tokens": 3541,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 295853,
"output_tokens": 44789,
"total_tokens": 340642,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3498,
"output_tokens": 39,
"total_tokens": 3537,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 660150,
"output_tokens": 108437,
"total_tokens": 768587,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3494,
"output_tokens": 35,
"total_tokens": 3529,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 789532,
"output_tokens": 114391,
"total_tokens": 903923,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3481,
"output_tokens": 67,
"total_tokens": 3548,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 31897,
"output_tokens": 5003,
"total_tokens": 36900,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 86,
"total_tokens": 3570,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4841,
"output_tokens": 284,
"total_tokens": 5125,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 70,
"total_tokens": 3554,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4976,
"output_tokens": 311,
"total_tokens": 5287,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3481,
"output_tokens": 23,
"total_tokens": 3504,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5424,
"output_tokens": 377,
"total_tokens": 5801,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3478,
"output_tokens": 39,
"total_tokens": 3517,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4859,
"output_tokens": 297,
"total_tokens": 5156,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3477,
"output_tokens": 39,
"total_tokens": 3516,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4846,
"output_tokens": 333,
"total_tokens": 5179,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3480,
"output_tokens": 43,
"total_tokens": 3523,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5312,
"output_tokens": 362,
"total_tokens": 5674,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3477,
"output_tokens": 48,
"total_tokens": 3525,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5178,
"output_tokens": 274,
"total_tokens": 5452,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3478,
"output_tokens": 41,
"total_tokens": 3519,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5345,
"output_tokens": 366,
"total_tokens": 5711,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3480,
"output_tokens": 36,
"total_tokens": 3516,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4872,
"output_tokens": 333,
"total_tokens": 5205,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3482,
"output_tokens": 37,
"total_tokens": 3519,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4337,
"output_tokens": 366,
"total_tokens": 4703,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3508,
"output_tokens": 223,
"total_tokens": 3731,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4980,
"output_tokens": 329,
"total_tokens": 5309,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3488,
"output_tokens": 97,
"total_tokens": 3585,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4447,
"output_tokens": 334,
"total_tokens": 4781,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3493,
"output_tokens": 81,
"total_tokens": 3574,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5223,
"output_tokens": 260,
"total_tokens": 5483,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3484,
"output_tokens": 111,
"total_tokens": 3595,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4997,
"output_tokens": 363,
"total_tokens": 5360,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 5583,
"output_tokens": 273,
"total_tokens": 5856,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5578,
"output_tokens": 345,
"total_tokens": 5923,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3489,
"output_tokens": 54,
"total_tokens": 3543,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 4703,
"output_tokens": 302,
"total_tokens": 5005,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3514,
"output_tokens": 104,
"total_tokens": 3618,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5271,
"output_tokens": 300,
"total_tokens": 5571,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
]
],
"model_costs": [
0.0394904,
0.0738141,
0.10297769999999999,
0.0809069,
0.110382,
0.127933,
0.09794059999999999,
0.1174952,
0.0883781,
0.0341879,
0.0525586,
0.1404292,
0.1423539,
0.040376499999999996,
0.054006700000000005,
0.0564544,
0.048040599999999996,
0.037021899999999996,
0.1057545,
0.13757729999999999,
0.1037503,
0.09387099999999998,
0.0903819,
0.09428239999999999,
0.06438550000000001,
0.1150361,
0.08660209999999999,
0.12876959999999998,
0.0720545,
0.0720831,
0.029312699999999997,
0.12045679999999999,
0.11735849999999999,
0.1318242,
0.06259429999999999,
0.0581838,
0.1184437,
0.090943,
0.11712070000000001,
0.08431009999999999,
0.0982305,
0.0890091,
0.0685757,
0.1000742,
0.037114,
0.0566609,
0.11852479999999999,
0.1337946,
0.0145634,
0.0101677,
0.010032000000000001,
0.0096257,
0.0096897,
0.0097003,
0.009806,
0.0097999,
0.009785900000000002,
0.0096804,
0.009655100000000002,
0.011629599999999999,
0.010268300000000001,
0.0101688,
0.010464900000000001,
0.0173833,
0.009853600000000002,
0.0104721
]
},
{
"task_name": "LitQA2_FullText_Search_validation",
"eval_spec": {
"solver": "astabench/solvers/search/paper_finder.py@ai2i_paper_finder",
"solver_args": "{\"base_url\": \"http://35.247.123.160:8000\"}",
"model": "openai/gpt-4o-mini",
"model_args": "{}",
"task_args": "{\"with_search_tools\": false}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "ca5b0ad"
},
"packages": "{\"inspect_ai\": \"0.3.114\"}"
},
"metrics": [
{
"name": "score_paper_finder/recall_at_30",
"value": 0.8
},
{
"name": "score_paper_finder/stderr",
"value": 0.13333333333333333
}
],
"model_usages": [
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3495,
"output_tokens": 39,
"total_tokens": 3534,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 678272,
"output_tokens": 119314,
"total_tokens": 797586,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3517,
"output_tokens": 65,
"total_tokens": 3582,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 627944,
"output_tokens": 97148,
"total_tokens": 725092,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3506,
"output_tokens": 44,
"total_tokens": 3550,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 811496,
"output_tokens": 66162,
"total_tokens": 877658,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3540,
"output_tokens": 71,
"total_tokens": 3611,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 779809,
"output_tokens": 108070,
"total_tokens": 887879,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3491,
"output_tokens": 35,
"total_tokens": 3526,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 677015,
"output_tokens": 65326,
"total_tokens": 742341,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3498,
"output_tokens": 41,
"total_tokens": 3539,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 5160,
"output_tokens": 395,
"total_tokens": 5555,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3494,
"output_tokens": 35,
"total_tokens": 3529,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 735209,
"output_tokens": 90043,
"total_tokens": 825252,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3495,
"output_tokens": 36,
"total_tokens": 3531,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 695687,
"output_tokens": 114082,
"total_tokens": 809769,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3491,
"output_tokens": 33,
"total_tokens": 3524,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 706809,
"output_tokens": 58363,
"total_tokens": 765172,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
],
[
{
"model": "openai/gpt-4o-2024-08-06",
"usage": {
"input_tokens": 3496,
"output_tokens": 40,
"total_tokens": 3536,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
},
{
"model": "gemini/gemini-2.0-flash",
"usage": {
"input_tokens": 323996,
"output_tokens": 63804,
"total_tokens": 387800,
"input_tokens_cache_write": null,
"input_tokens_cache_read": null,
"reasoning_tokens": null
}
}
]
],
"model_costs": [
0.12468029999999998,
0.1110961,
0.1168194,
0.1307689,
0.1029094,
0.009829000000000001,
0.1186231,
0.12429899999999999,
0.10308359999999998,
0.0670612
]
}
] |
{
"submit_time": "2025-08-07T19:06:21.862000",
"username": "Ai2",
"agent_name": "Asta Paper Finder",
"agent_description": null,
"agent_url": null,
"logs_url": null,
"logs_url_public": "hf://datasets/allenai/asta-bench-submissions/1.0.0-dev1/validation/aryeh_tiktinsky_ai2_Asta_Paper_Finder_2025-08-07T19-06-21",
"summary_url": null,
"openness": "Open source & closed weights",
"tool_usage": "Custom interface"
}
|
{
"name": "asta-bench",
"version": "1.0.0",
"splits": [
{
"name": "validation",
"tasks": [
{
"name": "ArxivDIGESTables_Clean_validation",
"path": "astabench/arxivdigestables_validation",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_validation",
"path": "astabench/sqa_dev",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_validation",
"path": "astabench/litqa2_validation",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "PaperFindingBench_validation",
"path": "astabench/paper_finder_validation",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_validation",
"path": "astabench/paper_finder_litqa2_validation",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_validation",
"path": "astabench/discoverybench_validation",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_validation",
"path": "astabench/core_bench_validation",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_validation",
"path": "astabench/ds1000_validation",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_validation",
"path": "astabench/e2e_discovery_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_validation",
"path": "astabench/e2e_discovery_hard_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_validation",
"path": "astabench/super_validation",
"primary_metric": "entrypoint/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_validation",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_validation",
"weight": 0.5
}
]
},
{
"name": "test",
"tasks": [
{
"name": "PaperFindingBench_test",
"path": "astabench/paper_finder_test",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_test",
"path": "astabench/paper_finder_litqa2_test",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_test",
"path": "astabench/sqa_test",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "ArxivDIGESTables_Clean_test",
"path": "astabench/arxivdigestables_test",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_test",
"path": "astabench/litqa2_test",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_test",
"path": "astabench/discoverybench_test",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_test",
"path": "astabench/core_bench_test",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_test",
"path": "astabench/ds1000_test",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_test",
"path": "astabench/e2e_discovery_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_test",
"path": "astabench/e2e_discovery_hard_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_test",
"path": "astabench/super_test",
"primary_metric": "output_match/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_test",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_test",
"weight": 0.5
}
]
}
]
}
|
validation
|
[
{
"task_name": "PaperFindingBench_validation",
"eval_spec": {
"solver": "/home/aryeht/PycharmProjects/asta-bench/astabench/solvers/search/youcom_search.py@youcom_solver",
"solver_args": "{}",
"model": "openai/gpt-4o-mini",
"model_args": "{}",
"task_args": "{\"with_search_tools\": false}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "ce99d55"
},
"packages": "{\"inspect_ai\": \"0.3.106\"}"
},
"metrics": [
{
"name": "score_paper_finder/semantic_f1",
"value": 0.030522991829073212
},
{
"name": "score_paper_finder/specific_f1",
"value": 0.286475468975469
},
{
"name": "score_paper_finder/metadata_f1",
"value": 0
},
{
"name": "score_paper_finder/adjusted_f1_micro_avg",
"value": 0.06560391359924551
},
{
"name": "score_paper_finder/stderr",
"value": 0.014752214406566292
}
],
"model_usages": [
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1422,
"output_tokens": 179,
"total_tokens": 1601,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1447,
"output_tokens": 243,
"total_tokens": 1690,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1358,
"output_tokens": 224,
"total_tokens": 1582,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1476,
"output_tokens": 228,
"total_tokens": 1704,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1436,
"output_tokens": 202,
"total_tokens": 1638,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1405,
"output_tokens": 212,
"total_tokens": 1617,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1404,
"output_tokens": 222,
"total_tokens": 1626,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1474,
"output_tokens": 255,
"total_tokens": 1729,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1360,
"output_tokens": 210,
"total_tokens": 1570,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1348,
"output_tokens": 196,
"total_tokens": 1544,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1428,
"output_tokens": 186,
"total_tokens": 1614,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1496,
"output_tokens": 229,
"total_tokens": 1725,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1416,
"output_tokens": 228,
"total_tokens": 1644,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1386,
"output_tokens": 214,
"total_tokens": 1600,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1496,
"output_tokens": 214,
"total_tokens": 1710,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1488,
"output_tokens": 297,
"total_tokens": 1785,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1449,
"output_tokens": 223,
"total_tokens": 1672,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 7254,
"output_tokens": 1108,
"total_tokens": 8362,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1419,
"output_tokens": 262,
"total_tokens": 1681,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1443,
"output_tokens": 249,
"total_tokens": 1692,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1392,
"output_tokens": 184,
"total_tokens": 1576,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1493,
"output_tokens": 269,
"total_tokens": 1762,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1475,
"output_tokens": 212,
"total_tokens": 1687,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1457,
"output_tokens": 261,
"total_tokens": 1718,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1469,
"output_tokens": 201,
"total_tokens": 1670,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1464,
"output_tokens": 211,
"total_tokens": 1675,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1519,
"output_tokens": 274,
"total_tokens": 1793,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1426,
"output_tokens": 194,
"total_tokens": 1620,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1441,
"output_tokens": 194,
"total_tokens": 1635,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1488,
"output_tokens": 205,
"total_tokens": 1693,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1499,
"output_tokens": 225,
"total_tokens": 1724,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1481,
"output_tokens": 195,
"total_tokens": 1676,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1483,
"output_tokens": 211,
"total_tokens": 1694,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1365,
"output_tokens": 167,
"total_tokens": 1532,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1351,
"output_tokens": 216,
"total_tokens": 1567,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1451,
"output_tokens": 201,
"total_tokens": 1652,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1340,
"output_tokens": 165,
"total_tokens": 1505,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1474,
"output_tokens": 255,
"total_tokens": 1729,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1384,
"output_tokens": 224,
"total_tokens": 1608,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1462,
"output_tokens": 216,
"total_tokens": 1678,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1432,
"output_tokens": 244,
"total_tokens": 1676,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1368,
"output_tokens": 178,
"total_tokens": 1546,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1444,
"output_tokens": 236,
"total_tokens": 1680,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1478,
"output_tokens": 280,
"total_tokens": 1758,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1441,
"output_tokens": 207,
"total_tokens": 1648,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1523,
"output_tokens": 246,
"total_tokens": 1769,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1438,
"output_tokens": 204,
"total_tokens": 1642,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 6844,
"output_tokens": 1018,
"total_tokens": 7862,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1434,
"output_tokens": 257,
"total_tokens": 1691,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1413,
"output_tokens": 190,
"total_tokens": 1603,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1516,
"output_tokens": 240,
"total_tokens": 1756,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1468,
"output_tokens": 273,
"total_tokens": 1741,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1499,
"output_tokens": 211,
"total_tokens": 1710,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1224,
"output_tokens": 161,
"total_tokens": 1385,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1392,
"output_tokens": 230,
"total_tokens": 1622,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1479,
"output_tokens": 272,
"total_tokens": 1751,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1458,
"output_tokens": 157,
"total_tokens": 1615,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1367,
"output_tokens": 148,
"total_tokens": 1515,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1281,
"output_tokens": 136,
"total_tokens": 1417,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1040,
"output_tokens": 146,
"total_tokens": 1186,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1252,
"output_tokens": 171,
"total_tokens": 1423,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1476,
"output_tokens": 327,
"total_tokens": 1803,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1334,
"output_tokens": 158,
"total_tokens": 1492,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1387,
"output_tokens": 196,
"total_tokens": 1583,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1399,
"output_tokens": 193,
"total_tokens": 1592,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1494,
"output_tokens": 223,
"total_tokens": 1717,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
]
],
"model_costs": [
0.0003207,
0.0003628499999999999,
0.00033810000000000003,
0.00035820000000000003,
0.0003366,
0.00033795000000000006,
0.0003438,
0.00037410000000000004,
0.00033,
0.0003198,
0.0003258,
0.00036180000000000007,
0.0003492,
0.00033630000000000004,
0.0003528,
0.0004014,
0.00035114999999999994,
0.0017529000000000008,
0.0003700499999999999,
0.00036585,
0.0003191999999999999,
0.00038535,
0.00034844999999999993,
0.00037515,
0.00034094999999999997,
0.0003462,
0.00039225000000000013,
0.00033030000000000006,
0.00033255,
0.0003462,
0.00035985000000000005,
0.00033915,
0.0003490500000000001,
0.00030494999999999996,
0.00033225,
0.00033825,
0.00030000000000000003,
0.0003741,
0.000342,
0.0003489000000000001,
0.0003612,
0.000312,
0.0003582,
0.0003897,
0.00034034999999999995,
0.00037605,
0.0003381,
0.0016373999999999996,
0.0003693,
0.00032595,
0.0003714,
0.000384,
0.00035145,
0.0002802,
0.0003468,
0.00038505,
0.00031289999999999996,
0.00029385,
0.00027374999999999996,
0.0002436,
0.0002904,
0.00041759999999999996,
0.0002949,
0.00032565,
0.0003256499999999999,
0.00035789999999999997
]
},
{
"task_name": "LitQA2_FullText_Search_validation",
"eval_spec": {
"solver": "/home/aryeht/PycharmProjects/asta-bench/astabench/solvers/search/youcom_search.py@youcom_solver",
"solver_args": "{}",
"model": "openai/gpt-4o-mini",
"model_args": "{}",
"task_args": "{\"with_search_tools\": false}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "ce99d55"
},
"packages": "{\"inspect_ai\": \"0.3.106\"}"
},
"metrics": [
{
"name": "score_paper_finder/recall_at_30",
"value": 0.5
},
{
"name": "score_paper_finder/stderr",
"value": 0.16666666666666666
}
],
"model_usages": [
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1474,
"output_tokens": 288,
"total_tokens": 1762,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1486,
"output_tokens": 328,
"total_tokens": 1814,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1511,
"output_tokens": 288,
"total_tokens": 1799,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1373,
"output_tokens": 288,
"total_tokens": 1661,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1548,
"output_tokens": 338,
"total_tokens": 1886,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1558,
"output_tokens": 365,
"total_tokens": 1923,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1434,
"output_tokens": 186,
"total_tokens": 1620,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1517,
"output_tokens": 319,
"total_tokens": 1836,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1205,
"output_tokens": 209,
"total_tokens": 1414,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
],
[
{
"model": "gpt-4o-mini-2024-07-18",
"usage": {
"input_tokens": 1519,
"output_tokens": 400,
"total_tokens": 1919,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 0,
"reasoning_tokens": 0
}
}
]
],
"model_costs": [
0.0003938999999999999,
0.0004197,
0.00039944999999999987,
0.0003787499999999999,
0.000435,
0.0004527,
0.0003267,
0.0004189500000000001,
0.00030615000000000004,
0.00046784999999999985
]
}
] |
{
"submit_time": "2025-08-07T18:54:02.182000",
"username": "Ai2",
"agent_name": "You.com Search API",
"agent_description": null,
"agent_url": null,
"logs_url": null,
"logs_url_public": "hf://datasets/allenai/asta-bench-submissions/1.0.0-dev1/validation/aryeh_tiktinsky_ai2_You.com_Search_API_2025-08-07T18-54-02",
"summary_url": null,
"openness": "Closed source & API available",
"tool_usage": "Fully custom"
}
|
{
"name": "asta-bench",
"version": "1.0.0",
"splits": [
{
"name": "validation",
"tasks": [
{
"name": "ArxivDIGESTables_Clean_validation",
"path": "astabench/arxivdigestables_validation",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_validation",
"path": "astabench/sqa_dev",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_validation",
"path": "astabench/litqa2_validation",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "PaperFindingBench_validation",
"path": "astabench/paper_finder_validation",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_validation",
"path": "astabench/paper_finder_litqa2_validation",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_validation",
"path": "astabench/discoverybench_validation",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_validation",
"path": "astabench/core_bench_validation",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_validation",
"path": "astabench/ds1000_validation",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_validation",
"path": "astabench/e2e_discovery_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_validation",
"path": "astabench/e2e_discovery_hard_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_validation",
"path": "astabench/super_validation",
"primary_metric": "entrypoint/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_validation",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_validation",
"weight": 0.5
}
]
},
{
"name": "test",
"tasks": [
{
"name": "PaperFindingBench_test",
"path": "astabench/paper_finder_test",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_test",
"path": "astabench/paper_finder_litqa2_test",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_test",
"path": "astabench/sqa_test",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "ArxivDIGESTables_Clean_test",
"path": "astabench/arxivdigestables_test",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_test",
"path": "astabench/litqa2_test",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_test",
"path": "astabench/discoverybench_test",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_test",
"path": "astabench/core_bench_test",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_test",
"path": "astabench/ds1000_test",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_test",
"path": "astabench/e2e_discovery_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_test",
"path": "astabench/e2e_discovery_hard_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_test",
"path": "astabench/super_test",
"primary_metric": "output_match/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_test",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_test",
"weight": 0.5
}
]
}
]
}
|
validation
|
[
{
"task_name": "SUPER_Expert_validation",
"eval_spec": {
"solver": "astabench/solvers/code_agent/agent.py@code_agent",
"solver_args": "{\"json_output\": 1, \"max_context_tokens\": 1000000, \"max_tries\": 200}",
"model": "openai/gpt-5-mini-2025-08-07",
"model_args": "{}",
"task_args": "{}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "be252e2"
},
"packages": "{\"inspect_ai\": \"0.3.114\"}"
},
"metrics": [
{
"name": "submitted/mean",
"value": 0.7
},
{
"name": "submitted/stderr",
"value": 0.06546536707079771
},
{
"name": "entrypoint/mean",
"value": 0.26
},
{
"name": "entrypoint/stderr",
"value": 0.06266203485560375
}
],
"model_usages": [
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 478880,
"output_tokens": 35038,
"total_tokens": 513918,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 253312,
"reasoning_tokens": 24704
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 116247,
"output_tokens": 25429,
"total_tokens": 141676,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 53632,
"reasoning_tokens": 13568
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 146547,
"output_tokens": 18905,
"total_tokens": 165452,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 82816,
"reasoning_tokens": 9984
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 274905,
"output_tokens": 21113,
"total_tokens": 296018,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 166656,
"reasoning_tokens": 5888
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 89930,
"output_tokens": 12890,
"total_tokens": 102820,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 39040,
"reasoning_tokens": 6976
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 346542,
"output_tokens": 35303,
"total_tokens": 381845,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 215680,
"reasoning_tokens": 19136
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 65933,
"output_tokens": 9272,
"total_tokens": 75205,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 35584,
"reasoning_tokens": 4736
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 360892,
"output_tokens": 32589,
"total_tokens": 393481,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 188544,
"reasoning_tokens": 19968
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 157839,
"output_tokens": 18374,
"total_tokens": 176213,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 65408,
"reasoning_tokens": 12032
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 404249,
"output_tokens": 42858,
"total_tokens": 447107,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 183680,
"reasoning_tokens": 17984
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 57806,
"output_tokens": 11878,
"total_tokens": 69684,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 24192,
"reasoning_tokens": 7040
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 319958,
"output_tokens": 46134,
"total_tokens": 366092,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 201856,
"reasoning_tokens": 21760
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 184530,
"output_tokens": 9354,
"total_tokens": 193884,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 110976,
"reasoning_tokens": 5248
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 153897,
"output_tokens": 15312,
"total_tokens": 169209,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 82304,
"reasoning_tokens": 7104
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 442739,
"output_tokens": 40915,
"total_tokens": 483654,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 280704,
"reasoning_tokens": 21568
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 187181,
"output_tokens": 20143,
"total_tokens": 207324,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 88448,
"reasoning_tokens": 6848
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 450944,
"output_tokens": 37475,
"total_tokens": 488419,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 289408,
"reasoning_tokens": 15808
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 497282,
"output_tokens": 31254,
"total_tokens": 528536,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 297856,
"reasoning_tokens": 18368
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 557613,
"output_tokens": 42550,
"total_tokens": 600163,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 366464,
"reasoning_tokens": 24512
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 506368,
"output_tokens": 54428,
"total_tokens": 560796,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 329216,
"reasoning_tokens": 29824
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 1028138,
"output_tokens": 50655,
"total_tokens": 1078793,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 769152,
"reasoning_tokens": 27520
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 204715,
"output_tokens": 20559,
"total_tokens": 225274,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 110976,
"reasoning_tokens": 9792
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 552201,
"output_tokens": 30300,
"total_tokens": 582501,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 355456,
"reasoning_tokens": 13056
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 396381,
"output_tokens": 27921,
"total_tokens": 424302,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 259456,
"reasoning_tokens": 9408
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 68144,
"output_tokens": 9608,
"total_tokens": 77752,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 27904,
"reasoning_tokens": 4928
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 250268,
"output_tokens": 29482,
"total_tokens": 279750,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 139776,
"reasoning_tokens": 15680
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 46727,
"output_tokens": 8142,
"total_tokens": 54869,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 25344,
"reasoning_tokens": 3712
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 233282,
"output_tokens": 27157,
"total_tokens": 260439,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 119424,
"reasoning_tokens": 15872
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 187143,
"output_tokens": 17493,
"total_tokens": 204636,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 115328,
"reasoning_tokens": 10112
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 41857,
"output_tokens": 11162,
"total_tokens": 53019,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 19584,
"reasoning_tokens": 5120
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 320612,
"output_tokens": 27305,
"total_tokens": 347917,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 190336,
"reasoning_tokens": 14464
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 148426,
"output_tokens": 16272,
"total_tokens": 164698,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 82048,
"reasoning_tokens": 6592
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 210069,
"output_tokens": 18914,
"total_tokens": 228983,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 134656,
"reasoning_tokens": 10112
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 98063,
"output_tokens": 19571,
"total_tokens": 117634,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 46208,
"reasoning_tokens": 11904
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 199299,
"output_tokens": 22901,
"total_tokens": 222200,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 122240,
"reasoning_tokens": 10752
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 342733,
"output_tokens": 20596,
"total_tokens": 363329,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 211968,
"reasoning_tokens": 9344
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 99684,
"output_tokens": 17548,
"total_tokens": 117232,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 45184,
"reasoning_tokens": 9344
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 355034,
"output_tokens": 42397,
"total_tokens": 397431,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 194176,
"reasoning_tokens": 17216
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 508419,
"output_tokens": 28371,
"total_tokens": 536790,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 311296,
"reasoning_tokens": 13888
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 643578,
"output_tokens": 41398,
"total_tokens": 684976,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 447744,
"reasoning_tokens": 21184
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 628441,
"output_tokens": 46677,
"total_tokens": 675118,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 422656,
"reasoning_tokens": 25152
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 69632,
"output_tokens": 6638,
"total_tokens": 76270,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 19840,
"reasoning_tokens": 2880
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 733639,
"output_tokens": 62532,
"total_tokens": 796171,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 520832,
"reasoning_tokens": 35200
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 311594,
"output_tokens": 17581,
"total_tokens": 329175,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 182656,
"reasoning_tokens": 8640
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 166737,
"output_tokens": 14968,
"total_tokens": 181705,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 89472,
"reasoning_tokens": 7040
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 811449,
"output_tokens": 56228,
"total_tokens": 867677,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 547584,
"reasoning_tokens": 31744
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 612253,
"output_tokens": 38499,
"total_tokens": 650752,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 366208,
"reasoning_tokens": 18112
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 1019521,
"output_tokens": 54688,
"total_tokens": 1074209,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 821376,
"reasoning_tokens": 34560
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 59900,
"output_tokens": 6988,
"total_tokens": 66888,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 16896,
"reasoning_tokens": 3776
}
}
],
[
{
"model": "gpt-5-mini-2025-08-07",
"usage": {
"input_tokens": 420775,
"output_tokens": 34555,
"total_tokens": 455330,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 277760,
"reasoning_tokens": 16000
}
}
]
],
"model_costs": [
0.1328008,
0.06785255000000001,
0.05581315,
0.07345465,
0.0394785,
0.10871349999999998,
0.02702085,
0.11297859999999996,
0.061490949999999996,
0.14545025,
0.0327643,
0.12683989999999998,
0.0398709,
0.05057985,
0.12935635,
0.06718044999999999,
0.1225692,
0.1198109,
0.14204884999999995,
0.1613744,
0.18528529999999999,
0.06732715,
0.11867265000000002,
0.09655964999999998,
0.029973599999999996,
0.0900814,
0.02226335,
0.0857641,
0.05582295000000001,
0.028381849999999997,
0.09193739999999999,
0.05118969999999999,
0.06004764999999999,
0.05326095,
0.06812275,
0.07918244999999999,
0.0498506,
0.12986289999999998,
0.11380515000000002,
0.1429481,
0.15536665000000008,
0.02622,
0.19128655000000003,
0.07196290000000001,
0.05148905000000001,
0.19211185,
0.14766444999999997,
0.17944665,
0.0251494,
0.11180775
]
}
] |
{
"submit_time": "2025-08-14T19:05:15.098000",
"username": "Ai2",
"agent_name": "Asta Code",
"agent_description": null,
"agent_url": "https://github.com/allenai/asta-bench",
"logs_url": null,
"logs_url_public": "hf://datasets/allenai/asta-bench-submissions/1.0.0-dev1/validation/miked-ai_Asta_Code_GPT-5-mini_2025-08-14T19-05-15",
"summary_url": null,
"openness": "Open source & closed weights",
"tool_usage": "Custom interface"
}
|
{
"name": "asta-bench",
"version": "1.0.0",
"splits": [
{
"name": "validation",
"tasks": [
{
"name": "ArxivDIGESTables_Clean_validation",
"path": "astabench/arxivdigestables_validation",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_validation",
"path": "astabench/sqa_dev",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_validation",
"path": "astabench/litqa2_validation",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "PaperFindingBench_validation",
"path": "astabench/paper_finder_validation",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_validation",
"path": "astabench/paper_finder_litqa2_validation",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_validation",
"path": "astabench/discoverybench_validation",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_validation",
"path": "astabench/core_bench_validation",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_validation",
"path": "astabench/ds1000_validation",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_validation",
"path": "astabench/e2e_discovery_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_validation",
"path": "astabench/e2e_discovery_hard_validation",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_validation",
"path": "astabench/super_validation",
"primary_metric": "entrypoint/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_validation",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_validation",
"weight": 0.5
}
]
},
{
"name": "test",
"tasks": [
{
"name": "PaperFindingBench_test",
"path": "astabench/paper_finder_test",
"primary_metric": "score_paper_finder/adjusted_f1_micro_avg",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_Search_test",
"path": "astabench/paper_finder_litqa2_test",
"primary_metric": "score_paper_finder/recall_at_30",
"tags": [
"lit"
]
},
{
"name": "ScholarQA_CS2_test",
"path": "astabench/sqa_test",
"primary_metric": "global_avg/mean",
"tags": [
"lit"
]
},
{
"name": "ArxivDIGESTables_Clean_test",
"path": "astabench/arxivdigestables_test",
"primary_metric": "score_tables/mean",
"tags": [
"lit"
]
},
{
"name": "LitQA2_FullText_test",
"path": "astabench/litqa2_test",
"primary_metric": "is_correct/accuracy",
"tags": [
"lit"
]
},
{
"name": "DiscoveryBench_test",
"path": "astabench/discoverybench_test",
"primary_metric": "score_discoverybench/mean",
"tags": [
"data"
]
},
{
"name": "CORE_Bench_Hard_test",
"path": "astabench/core_bench_test",
"primary_metric": "score_with_stderr/accuracy",
"tags": [
"code"
]
},
{
"name": "DS_1000_test",
"path": "astabench/ds1000_test",
"primary_metric": "ds1000_scorer/accuracy",
"tags": [
"code"
]
},
{
"name": "E2E_Bench_test",
"path": "astabench/e2e_discovery_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "E2E_Bench_Hard_test",
"path": "astabench/e2e_discovery_hard_test",
"primary_metric": "score_rubric/accuracy",
"tags": [
"discovery"
]
},
{
"name": "SUPER_Expert_test",
"path": "astabench/super_test",
"primary_metric": "output_match/mean",
"tags": [
"code"
]
}
],
"macro_average_weight_adjustments": [
{
"tag": "lit",
"task": "LitQA2_FullText_test",
"weight": 0.5
},
{
"tag": "lit",
"task": "LitQA2_FullText_Search_test",
"weight": 0.5
}
]
}
]
}
|
validation
|
[
{
"task_name": "SUPER_Expert_validation",
"eval_spec": {
"solver": "astabench/solvers/code_agent/agent.py@code_agent",
"solver_args": "{\"json_output\": 1, \"max_context_tokens\": 1000000, \"max_tries\": 200}",
"model": "openai/gpt-5-2025-08-07",
"model_args": "{}",
"task_args": "{}",
"revision": {
"type": "git",
"origin": "https://github.com/allenai/asta-bench.git",
"commit": "be252e2"
},
"packages": "{\"inspect_ai\": \"0.3.114\"}"
},
"metrics": [
{
"name": "submitted/mean",
"value": 0.28
},
{
"name": "submitted/stderr",
"value": 0.06414269805898185
},
{
"name": "entrypoint/mean",
"value": 0.44
},
{
"name": "entrypoint/stderr",
"value": 0.07091242083423346
}
],
"model_usages": [
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 136744,
"output_tokens": 15302,
"total_tokens": 152046,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 31488,
"reasoning_tokens": 11456
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 28421,
"output_tokens": 4989,
"total_tokens": 33410,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 5888,
"reasoning_tokens": 2432
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 199653,
"output_tokens": 24212,
"total_tokens": 223865,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 69632,
"reasoning_tokens": 15168
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 53152,
"output_tokens": 5740,
"total_tokens": 58892,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 5120,
"reasoning_tokens": 1728
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 363554,
"output_tokens": 46459,
"total_tokens": 410013,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 130944,
"reasoning_tokens": 34944
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 167188,
"output_tokens": 15063,
"total_tokens": 182251,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 41600,
"reasoning_tokens": 9216
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 64583,
"output_tokens": 16745,
"total_tokens": 81328,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 19840,
"reasoning_tokens": 9088
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 1037980,
"output_tokens": 88855,
"total_tokens": 1126835,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 503936,
"reasoning_tokens": 60864
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 197761,
"output_tokens": 19316,
"total_tokens": 217077,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 64640,
"reasoning_tokens": 12992
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 356870,
"output_tokens": 45234,
"total_tokens": 402104,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 199168,
"reasoning_tokens": 28416
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 170834,
"output_tokens": 30811,
"total_tokens": 201645,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 81920,
"reasoning_tokens": 19712
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 869050,
"output_tokens": 83980,
"total_tokens": 953030,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 501504,
"reasoning_tokens": 50432
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 162257,
"output_tokens": 22929,
"total_tokens": 185186,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 58752,
"reasoning_tokens": 15296
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 752015,
"output_tokens": 66987,
"total_tokens": 819002,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 325760,
"reasoning_tokens": 32960
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 149359,
"output_tokens": 18055,
"total_tokens": 167414,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 59264,
"reasoning_tokens": 10368
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 857791,
"output_tokens": 72454,
"total_tokens": 930245,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 440192,
"reasoning_tokens": 41088
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 352618,
"output_tokens": 39802,
"total_tokens": 392420,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 198272,
"reasoning_tokens": 27840
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 503670,
"output_tokens": 60344,
"total_tokens": 564014,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 276352,
"reasoning_tokens": 41920
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 128348,
"output_tokens": 18194,
"total_tokens": 146542,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 36992,
"reasoning_tokens": 11264
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 229914,
"output_tokens": 42563,
"total_tokens": 272477,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 99072,
"reasoning_tokens": 24000
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 198515,
"output_tokens": 35487,
"total_tokens": 234002,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 94080,
"reasoning_tokens": 17920
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 107084,
"output_tokens": 20327,
"total_tokens": 127411,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 19328,
"reasoning_tokens": 15360
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 187068,
"output_tokens": 18419,
"total_tokens": 205487,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 43648,
"reasoning_tokens": 8128
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 97533,
"output_tokens": 19805,
"total_tokens": 117338,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 30208,
"reasoning_tokens": 15808
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 123425,
"output_tokens": 28354,
"total_tokens": 151779,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 47360,
"reasoning_tokens": 17280
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 1009044,
"output_tokens": 84204,
"total_tokens": 1093248,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 589696,
"reasoning_tokens": 51200
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 57288,
"output_tokens": 12961,
"total_tokens": 70249,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 30464,
"reasoning_tokens": 8576
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 183468,
"output_tokens": 24496,
"total_tokens": 207964,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 59904,
"reasoning_tokens": 18560
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 134643,
"output_tokens": 18605,
"total_tokens": 153248,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 70144,
"reasoning_tokens": 11392
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 570590,
"output_tokens": 62953,
"total_tokens": 633543,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 329216,
"reasoning_tokens": 45248
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 462648,
"output_tokens": 54029,
"total_tokens": 516677,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 154880,
"reasoning_tokens": 27264
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 308014,
"output_tokens": 34842,
"total_tokens": 342856,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 140416,
"reasoning_tokens": 22080
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 454892,
"output_tokens": 40897,
"total_tokens": 495789,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 156416,
"reasoning_tokens": 27648
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 373897,
"output_tokens": 52511,
"total_tokens": 426408,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 171008,
"reasoning_tokens": 34880
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 19526,
"output_tokens": 11915,
"total_tokens": 31441,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 8576,
"reasoning_tokens": 6400
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 99013,
"output_tokens": 12502,
"total_tokens": 111515,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 23808,
"reasoning_tokens": 7808
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 136398,
"output_tokens": 19103,
"total_tokens": 155501,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 54656,
"reasoning_tokens": 12864
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 468171,
"output_tokens": 45514,
"total_tokens": 513685,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 191360,
"reasoning_tokens": 26112
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 467080,
"output_tokens": 47081,
"total_tokens": 514161,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 210048,
"reasoning_tokens": 28096
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 114990,
"output_tokens": 12189,
"total_tokens": 127179,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 27648,
"reasoning_tokens": 6080
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 156958,
"output_tokens": 21860,
"total_tokens": 178818,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 40704,
"reasoning_tokens": 11712
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 77007,
"output_tokens": 12943,
"total_tokens": 89950,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 21120,
"reasoning_tokens": 6464
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 114500,
"output_tokens": 16263,
"total_tokens": 130763,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 41856,
"reasoning_tokens": 9536
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 174796,
"output_tokens": 23511,
"total_tokens": 198307,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 41600,
"reasoning_tokens": 11840
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 87910,
"output_tokens": 14166,
"total_tokens": 102076,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 24064,
"reasoning_tokens": 11008
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 432589,
"output_tokens": 44533,
"total_tokens": 477122,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 225408,
"reasoning_tokens": 27264
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 301286,
"output_tokens": 42348,
"total_tokens": 343634,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 118144,
"reasoning_tokens": 26048
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 393636,
"output_tokens": 45497,
"total_tokens": 439133,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 195840,
"reasoning_tokens": 22400
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 430452,
"output_tokens": 44435,
"total_tokens": 474887,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 223104,
"reasoning_tokens": 28480
}
}
],
[
{
"model": "gpt-5-2025-08-07",
"usage": {
"input_tokens": 274698,
"output_tokens": 46035,
"total_tokens": 320733,
"input_tokens_cache_write": null,
"input_tokens_cache_read": 96896,
"reasoning_tokens": 32256
}
}
]
],
"model_costs": [
0.288526,
0.07879225000000001,
0.41335025000000003,
0.11808,
0.7717205,
0.31281500000000007,
0.22585875000000002,
1.619097,
0.3676412499999999,
0.6743635000000001,
0.4294925000000001,
1.3619205000000003,
0.36601525,
1.2434087500000002,
0.30057675,
1.3015627499999998,
0.6157364999999998,
0.9221315000000002,
0.30075900000000005,
0.6015665,
0.49717375,
0.315381,
0.36892100000000005,
0.28598225000000005,
0.38454125,
1.4399369999999998,
0.16694799999999999,
0.40690300000000007,
0.27544175,
0.9723995,
0.9443600000000001,
0.5754695000000001,
0.801617,
0.8000972500000001,
0.13390950000000001,
0.22200225000000004,
0.30003950000000007,
0.82507375,
0.8183560000000001,
0.23452350000000002,
0.3690055,
0.20192875,
0.25866700000000004,
0.40680499999999997,
0.22447550000000002,
0.7324822500000001,
0.6671754999999999,
0.7266950000000001,
0.7314230000000002,
0.6947145
]
}
] |
{
"submit_time": "2025-08-14T19:06:15.866000",
"username": "Ai2",
"agent_name": "Asta Code",
"agent_description": null,
"agent_url": "https://github.com/allenai/asta-bench",
"logs_url": null,
"logs_url_public": "hf://datasets/allenai/asta-bench-submissions/1.0.0-dev1/validation/miked-ai_Asta_Code_GPT-5_2025-08-14T19-06-15",
"summary_url": null,
"openness": "Open source & closed weights",
"tool_usage": "Custom interface"
}
|
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"SUPER_Expert_validation","eval_spec":{"solver":"agent_baselines/solvers/react/basic_a(...TRUNCATED) | {"submit_time":"2025-08-26T06:45:15.968802Z","username":"Ai2","agent_name":"ReAct","agent_descriptio(...TRUNCATED) |
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"E2E_Bench_Hard_validation","eval_spec":{"solver":"astabench/evals/e2e_discovery/solve(...TRUNCATED) | {"submit_time":"2025-07-10T18:12:16.960575Z","username":"Ai2","agent_name":"Asta CodeScientist","age(...TRUNCATED) |
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"DiscoveryBench_validation","eval_spec":{"solver":"astabench/solvers/datavoyager/agent(...TRUNCATED) | {"submit_time":"2025-08-14T04:16:47.886330Z","username":"Ai2","agent_name":"Asta DataVoyager","agent(...TRUNCATED) |
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"DiscoveryBench_validation","eval_spec":{"solver":"astabench/solvers/datavoyager/agent(...TRUNCATED) | {"submit_time":"2025-08-14T19:32:30.253858Z","username":"Ai2","agent_name":"Asta DataVoyager","agent(...TRUNCATED) |
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"DiscoveryBench_validation","eval_spec":{"solver":"astabench/solvers/datavoyager/agent(...TRUNCATED) | {"submit_time":"2025-08-14T19:42:20.164747Z","username":"Ai2","agent_name":"Asta DataVoyager","agent(...TRUNCATED) |
{"name":"asta-bench","version":"1.0.0","splits":[{"name":"validation","tasks":[{"name":"ArxivDIGESTa(...TRUNCATED) |
validation
| [{"task_name":"DiscoveryBench_validation","eval_spec":{"solver":"astabench/solvers/datavoyager/agent(...TRUNCATED) | {"submit_time":"2025-08-14T21:44:09.404096Z","username":"Ai2","agent_name":"Asta DataVoyager","agent(...TRUNCATED) |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 389
Size of downloaded dataset files:
9.46 MB
Size of the auto-converted Parquet files:
2.47 MB
Number of rows:
70