datasetId
stringlengths
5
121
author
stringlengths
2
42
last_modified
unknown
downloads
int64
0
2.93M
likes
int64
0
7.2k
tags
sequencelengths
1
7.92k
task_categories
sequencelengths
0
47
createdAt
unknown
card
stringlengths
15
1.01M
LangAGI-Lab/Mind2Web-cleaned-lite-reward-model-v2
LangAGI-Lab
"2024-09-19T08:43:33Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-09-19T08:42:54Z"
--- dataset_info: features: - name: action_uid dtype: string - name: operation dtype: string - name: pos_candidates sequence: string - name: neg_candidates sequence: string - name: website dtype: string - name: domain dtype: string - name: subdomain dtype: string - name: annotation_id dtype: string - name: confirmed_task dtype: string - name: action_reprs sequence: string - name: target_action_index dtype: string - name: target_action_reprs dtype: string - name: action dtype: string - name: original_action_repr dtype: string - name: original_pos_candidate struct: - name: attributes struct: - name: alt dtype: string - name: aria_description dtype: string - name: aria_label dtype: string - name: backend_node_id dtype: string - name: bounding_box_rect dtype: string - name: class dtype: string - name: data_pw_testid_buckeye_candidate dtype: string - name: id dtype: string - name: input_checked dtype: string - name: input_value dtype: string - name: is_clickable dtype: string - name: label dtype: string - name: name dtype: string - name: placeholder dtype: string - name: role dtype: string - name: text_value dtype: string - name: title dtype: string - name: type dtype: string - name: value dtype: string - name: backend_node_id dtype: string - name: is_original_target dtype: bool - name: is_top_level_target dtype: bool - name: tag dtype: string - name: match_type dtype: string - name: cleaned_accessibility_tree dtype: string - name: previous_actions sequence: string - name: cleaned_next_accessibility_tree dtype: string - name: next_state_tao dtype: string - name: new_items dtype: string - name: updated_items dtype: string - name: deleted_items dtype: string - name: refined_tao dtype: string - name: raw_prediction dtype: string - name: rationale dtype: string - name: next_state_description_with_tao dtype: string - name: reward_score dtype: string splits: - name: train num_bytes: 1367938090 num_examples: 6125 download_size: 274000502 dataset_size: 1367938090 configs: - config_name: default data_files: - split: train path: data/train-* ---
LangAGI-Lab/step-wise-eval-description-with-refined-tao
LangAGI-Lab
"2024-09-19T18:28:02Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-09-19T18:27:59Z"
--- dataset_info: features: - name: task_id dtype: string - name: step_idx dtype: int64 - name: cur_observation dtype: string - name: prev_observation dtype: string - name: next_observation dtype: string - name: prev_actions sequence: string - name: gold_action dtype: string - name: objective dtype: string - name: url dtype: string - name: next_state_tao dtype: string - name: new_items dtype: string - name: updated_items dtype: string - name: deleted_items dtype: string - name: value_score dtype: float64 - name: refined_tao dtype: string - name: raw_prediction dtype: string - name: rationale dtype: string - name: next_state_description_with_tao dtype: string splits: - name: train num_bytes: 8342300 num_examples: 102 download_size: 2190847 dataset_size: 8342300 configs: - config_name: default data_files: - split: train path: data/train-* ---
Goodnight7/full_MedC-K
Goodnight7
"2024-09-23T14:36:39Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-09-23T14:36:34Z"
--- dataset_info: features: - name: question dtype: string - name: exp dtype: string - name: subject_name dtype: string - name: topic_name dtype: string - name: answer dtype: string splits: - name: train num_bytes: 109432229 num_examples: 182822 download_size: 68954063 dataset_size: 109432229 configs: - config_name: default data_files: - split: train path: data/train-* ---
LangAGI-Lab/step-wise-eval-description-with-refined-tao-9neg-hj-hard-simple-gold-observation
LangAGI-Lab
"2024-09-26T12:28:09Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-09-26T12:28:05Z"
--- dataset_info: features: - name: task_id dtype: string - name: step_idx dtype: int64 - name: cur_observation dtype: string - name: prev_observation dtype: string - name: next_observation dtype: string - name: prev_actions sequence: string - name: gold_action dtype: string - name: objective dtype: string - name: url dtype: string - name: next_state_tao dtype: string - name: new_items dtype: string - name: updated_items dtype: string - name: deleted_items dtype: string - name: value_score dtype: float64 - name: refined_tao dtype: string - name: raw_prediction dtype: string - name: rationale dtype: string - name: next_state_description_with_tao dtype: string - name: raw_neg_actions dtype: string - name: neg_actions sequence: string - name: neg_actions_w_rationale sequence: string - name: neg_next_state_simple sequence: string - name: gold_observation_simple dtype: string splits: - name: train num_bytes: 8876595 num_examples: 102 download_size: 2672219 dataset_size: 8876595 configs: - config_name: default data_files: - split: train path: data/train-* ---
LangAGI-Lab/Mind2Web-HTML-cleaned-lite-with-refined-tao-former
LangAGI-Lab
"2024-09-27T16:26:29Z"
20
1
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-09-27T15:55:28Z"
--- dataset_info: features: - name: task_id dtype: string - name: step_idx dtype: int64 - name: prev_actions sequence: string - name: gold_action dtype: string - name: objective dtype: string - name: url dtype: string - name: action_uid dtype: string - name: operation dtype: string - name: pos_candidates sequence: string - name: neg_candidates sequence: string - name: website dtype: string - name: domain dtype: string - name: subdomain dtype: string - name: trajectory_action_seq sequence: string - name: match_type dtype: string - name: next_state_tao dtype: string - name: refined_tao dtype: string - name: next_observation dtype: string - name: cur_observation dtype: string splits: - name: train num_bytes: 11295309447 num_examples: 3475 download_size: 1894698151 dataset_size: 11295309447 configs: - config_name: default data_files: - split: train path: data/train-* ---
ahmed275/SCOD
ahmed275
"2024-10-19T23:47:52Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-10-17T12:22:56Z"
--- dataset_info: features: - name: id dtype: string - name: year dtype: int64 - name: url dtype: string - name: opinionOfTheCourt dtype: string - name: syllabus dtype: string - name: issueArea dtype: float64 - name: decisionDirection dtype: float64 - name: partyWinning dtype: float64 - name: voteDistribution dtype: float64 - name: respondentType dtype: int64 - name: respondent dtype: float64 - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 91860392 num_examples: 4270 - name: validation num_bytes: 22249439 num_examples: 541 - name: test num_bytes: 21353250 num_examples: 547 download_size: 70262465 dataset_size: 135463081 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* ---
ai2-adapt-dev/sft_v3.9_used_on_policy_p1_olmo2_7b
ai2-adapt-dev
"2024-11-23T19:56:47Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-11-23T19:56:32Z"
--- dataset_info: features: - name: prompt dtype: string - name: chosen list: - name: content dtype: string - name: role dtype: string - name: rejected list: - name: content dtype: string - name: role dtype: string - name: chosen_rating dtype: float64 - name: rejected_rating dtype: float64 - name: chosen_model dtype: string - name: rejected_model dtype: string splits: - name: train num_bytes: 602221562 num_examples: 97275 download_size: 315936846 dataset_size: 602221562 configs: - config_name: default data_files: - split: train path: data/train-* ---
ai2-adapt-dev/sft_v3.9_if_taxonomy_olmo2_7b
ai2-adapt-dev
"2024-11-23T20:00:00Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2024-11-23T19:59:51Z"
--- dataset_info: features: - name: prompt dtype: string - name: chosen list: - name: content dtype: string - name: role dtype: string - name: rejected list: - name: content dtype: string - name: role dtype: string - name: chosen_rating dtype: float64 - name: rejected_rating dtype: float64 - name: chosen_model dtype: string - name: rejected_model dtype: string splits: - name: train num_bytes: 457266471 num_examples: 65796 download_size: 237088920 dataset_size: 457266471 configs: - config_name: default data_files: - split: train path: data/train-* ---
AidanFerrara/attack_ICS
AidanFerrara
"2024-11-25T14:03:26Z"
20
0
[ "task_categories:text-classification", "language:en", "size_categories:n<1K", "format:csv", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
[ "text-classification" ]
"2024-11-25T14:00:02Z"
--- task_categories: - text-classification language: - en ---
rui-qian/FP-refcoco
rui-qian
"2025-01-21T10:01:51Z"
20
0
[ "license:mit", "size_categories:n<1K", "format:imagefolder", "modality:image", "library:datasets", "library:mlcroissant", "region:us" ]
null
"2024-12-12T07:06:07Z"
--- license: mit --- READ results on FP-RefCOCO(+/g) val split.
FrancophonIA/Glossaire_genetique_moleculaire_genie_genetique
FrancophonIA
"2025-01-11T18:50:29Z"
20
0
[ "task_categories:translation", "multilinguality:multilingual", "language:en", "language:fr", "license:cc-by-4.0", "region:us" ]
[ "translation" ]
"2025-01-11T18:49:11Z"
--- license: cc-by-4.0 language: - en - fr multilinguality: - multilingual viewer: false task_categories: - translation --- > [!NOTE] > Dataset origin: https://loterre-skosmos.loterre.fr/GGMGG/fr/ ## Description Ce glossaire contient 391 entrées terminologiques recensant des termes de la génétique moléculaire et plus particulièrement du génie génétique. Il correspond en grande partie au contenu de l'ouvrage "Glossaire de génétique moléculaire et génie génétique", publié par INRA Editions en 1991, et pour le reste aux entrées publiées dans ce domaine au Journal Officiel en 1990. Une partie des entrées a été mise à jour en 2005 suite aux travaux de la commission de terminologie et de néologie du ministère de l'agriculture et validés par la Commission Générale de terminologie.
FrancophonIA/KRoQ
FrancophonIA
"2025-01-11T20:45:24Z"
20
0
[ "multilinguality:multilingual", "language:fr", "language:en", "language:de", "language:el", "region:us" ]
null
"2025-01-11T20:42:08Z"
--- language: - fr - en - de - el multilinguality: - multilingual viewer: false --- > [!NOTE] > Dataset origin: http://lrec2018.lrec-conf.org/en/shared-lrs/ ## Description In this paper we present the Konstanz Resource of Questions (KRoQ), the first dependency-parsed, parallel multilingual corpus of information-seeking and non-information-seeking questions. In creating the corpus, we employ a linguistically motivated rule-based system that uses linguistic cues from one language to help classify and annotate questions across other languages. Our current corpus includes German, French, Spanish and Koine Greek. Based on the linguistically motivated heuristics we identify, a two-step scoring mechanism assigns intra- and inter-language scores to each question. Based on these scores, each question is classified as being either information seeking or non-information seeking. An evaluation shows that this mechanism correctly classifies questions in 79% of the cases. We release our corpus as a basis for further work in the area of question classification. It can be utilized as training and testing data for machine-learning algorithms, as corpus-data for theoretical linguistic questions or as a resource for further rule-based approaches to question identification. ## Citation ``` @InProceedings{KALOULI18.13, author = {Aikaterini-Lida Kalouli and Katharina Kaiser and Annette Hautli-Janisz and Georg A. Kaiser and Miriam Butt}, title = "{A Multilingual Approach to Question Classification}", booktitle = {Proceedings of the Eleventh International Conference on Language Resources and Evaluation (LREC 2018)}, year = {2018}, month = {May 7-12, 2018}, address = {Miyazaki, Japan}, editor = {Nicoletta Calzolari (Conference chair) and Khalid Choukri and Christopher Cieri and Thierry Declerck and Sara Goggi and Koiti Hasida and Hitoshi Isahara and Bente Maegaard and Joseph Mariani and Hélène Mazo and Asuncion Moreno and Jan Odijk and Stelios Piperidis and Takenobu Tokunaga}, publisher = {European Language Resources Association (ELRA)}, isbn = {979-10-95546-00-9}, language = {english} } ```
FrancophonIA/SexIt
FrancophonIA
"2025-01-11T20:55:08Z"
20
0
[ "language:fr", "region:us" ]
null
"2025-01-11T20:52:31Z"
--- language: - fr viewer: false --- > [!NOTE] > Dataset origin: https://www.jeuxdemots.org/sexit.php?action=list
FrancophonIA/WMT12
FrancophonIA
"2025-01-11T21:05:36Z"
20
0
[ "multilinguality:multilingual", "language:fr", "language:en", "language:cs", "language:de", "language:es", "region:us" ]
null
"2025-01-11T21:01:36Z"
--- language: - fr - en - cs - de - es multilinguality: - multilingual viewer: false --- > [!NOTE] > Dataset origin: http://lrec2014.lrec-conf.org/en/shared-lrs/current-list-shared-lrs/
FrancophonIA/WMT13
FrancophonIA
"2025-01-11T21:06:04Z"
20
0
[ "multilinguality:multilingual", "language:fr", "language:en", "language:cs", "language:de", "language:es", "language:ru", "region:us" ]
null
"2025-01-11T21:01:41Z"
--- language: - fr - en - cs - de - es - ru multilinguality: - multilingual viewer: false --- > [!NOTE] > Dataset origin: http://lrec2014.lrec-conf.org/en/shared-lrs/current-list-shared-lrs/
LVSTCK/ultrachat-sft-mk
LVSTCK
"2025-01-15T20:22:44Z"
20
0
[ "task_categories:question-answering", "task_categories:text-generation", "language:mk", "license:mit", "size_categories:10K<n<100K", "format:json", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "region:us" ]
[ "question-answering", "text-generation" ]
"2025-01-12T00:53:38Z"
--- license: mit task_categories: - question-answering - text-generation language: - mk --- ## Dataset Summary This dataset consists of multi-turn chat conversations translated from the original Ultra-Chat dataset using the Google Translate API. *Note:* This is a sample dataset (rougly ~16k samples), as translating the entire dataset would be costly.
IAlsace/affiche_charcuterie
IAlsace
"2025-01-12T14:19:47Z"
20
0
[ "task_categories:translation", "multilinguality:multilingual", "language:gsw", "language:fr", "region:us" ]
[ "translation" ]
"2025-01-12T14:10:33Z"
--- language: - gsw - fr multilinguality: - multilingual viewer: false task_categories: - translation --- > [!NOTE] > Dataset origin: https://www.olcalsace.org/fr/affiches-cartes-et-signaletique ## Description Affiches charcuterie bilingue, élaborée en partenariat avec la Fédération des bouchers-charcutiers-traiteurs d’Alsace.
guozhen0/eval_act_so100_rs
guozhen0
"2025-01-18T01:20:17Z"
20
0
[ "task_categories:robotics", "license:apache-2.0", "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:timeseries", "modality:video", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us", "LeRobot", "so100", "tutorial", "eval" ]
[ "robotics" ]
"2025-01-15T01:59:50Z"
--- license: apache-2.0 task_categories: - robotics tags: - LeRobot - so100 - tutorial - eval configs: - config_name: default data_files: data/*/*.parquet --- This dataset was created using [LeRobot](https://github.com/huggingface/lerobot). ## Dataset Description - **Homepage:** [More Information Needed] - **Paper:** [More Information Needed] - **License:** apache-2.0 ## Dataset Structure [meta/info.json](meta/info.json): ```json { "codebase_version": "v2.0", "robot_type": "so100", "total_episodes": 5, "total_frames": 2980, "total_tasks": 1, "total_videos": 5, "total_chunks": 1, "chunks_size": 1000, "fps": 30, "splits": { "train": "0:5" }, "data_path": "data/chunk-{episode_chunk:03d}/episode_{episode_index:06d}.parquet", "video_path": "videos/chunk-{episode_chunk:03d}/{video_key}/episode_{episode_index:06d}.mp4", "features": { "action": { "dtype": "float32", "shape": [ 6 ], "names": [ "main_shoulder_pan", "main_shoulder_lift", "main_elbow_flex", "main_wrist_flex", "main_wrist_roll", "main_gripper" ] }, "observation.state": { "dtype": "float32", "shape": [ 6 ], "names": [ "main_shoulder_pan", "main_shoulder_lift", "main_elbow_flex", "main_wrist_flex", "main_wrist_roll", "main_gripper" ] }, "observation.images.phone": { "dtype": "video", "shape": [ 480, 640, 3 ], "names": [ "height", "width", "channels" ], "info": { "video.fps": 30.0, "video.height": 480, "video.width": 640, "video.channels": 3, "video.codec": "av1", "video.pix_fmt": "yuv420p", "video.is_depth_map": false, "has_audio": false } }, "timestamp": { "dtype": "float32", "shape": [ 1 ], "names": null }, "frame_index": { "dtype": "int64", "shape": [ 1 ], "names": null }, "episode_index": { "dtype": "int64", "shape": [ 1 ], "names": null }, "index": { "dtype": "int64", "shape": [ 1 ], "names": null }, "task_index": { "dtype": "int64", "shape": [ 1 ], "names": null } } } ``` ## Citation **BibTeX:** ```bibtex [More Information Needed] ```
Zeed11/temp_pic
Zeed11
"2025-01-15T11:47:20Z"
20
0
[ "license:mit", "region:us" ]
null
"2025-01-15T11:47:20Z"
--- license: mit ---
LLMsForHepth/infer_hep-th
LLMsForHepth
"2025-01-16T11:53:59Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T11:53:54Z"
--- dataset_info: features: - name: index dtype: int64 - name: id dtype: string - name: submitter dtype: string - name: authors dtype: string - name: title dtype: string - name: comments dtype: string - name: journal-ref dtype: string - name: doi dtype: string - name: report-no dtype: string - name: categories dtype: string - name: license dtype: string - name: orig_abstract dtype: string - name: versions list: - name: created dtype: string - name: version dtype: string - name: update_date dtype: string - name: authors_parsed sequence: sequence: string - name: abstract dtype: string - name: prompt dtype: string - name: y_true dtype: string - name: comp_Llama-3.1-8B dtype: string - name: preds_Llama-3.1-8B dtype: string - name: comp_s1-L-3.1-8B-base dtype: string - name: preds_s1-L-3.1-8B-base dtype: string - name: comp_s2-L-3.1-8B-base dtype: string - name: preds_s2-L-3.1-8B-base dtype: string - name: comp_s3-L-3.1-8B-base_v3 dtype: string - name: preds_s3-L-3.1-8B-base_v3 dtype: string splits: - name: test num_bytes: 167700279 num_examples: 15808 download_size: 72853568 dataset_size: 167700279 configs: - config_name: default data_files: - split: test path: data/test-* ---
Denn231/pii_dataset_v0.82_errors_v11_valid
Denn231
"2025-01-16T12:16:34Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T12:16:32Z"
--- dataset_info: features: - name: source_text dtype: string - name: id dtype: string - name: processed_text dtype: string - name: privacy_mask list: - name: end dtype: int64 - name: label dtype: string - name: start dtype: int64 - name: value dtype: string - name: labels sequence: string - name: source dtype: int64 - name: answer_v_11_valid list: - name: end dtype: int64 - name: label dtype: string - name: start dtype: int64 - name: value dtype: string - name: answer_errors_v_11_valid struct: - name: fn dtype: int64 - name: fp dtype: int64 - name: tp dtype: int64 splits: - name: train num_bytes: 1685803 num_examples: 1643 download_size: 958181 dataset_size: 1685803 configs: - config_name: default data_files: - split: train path: data/train-* ---
OALL/details_tiiuae__Falcon3-1B-Instruct
OALL
"2025-01-16T12:20:20Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T12:20:07Z"
--- pretty_name: Evaluation run of tiiuae/Falcon3-1B-Instruct dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [tiiuae/Falcon3-1B-Instruct](https://huggingface.co/tiiuae/Falcon3-1B-Instruct).\n\ \nThe dataset is composed of 136 configuration, each one coresponding to one of\ \ the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can\ \ be found as a specific split in each configuration, the split being named using\ \ the timestamp of the run.The \"train\" split is always pointing to the latest\ \ results.\n\nAn additional configuration \"results\" store all the aggregated results\ \ of the run.\n\nTo load the details from a run, you can for instance do the following:\n\ ```python\nfrom datasets import load_dataset\ndata = load_dataset(\"OALL/details_tiiuae__Falcon3-1B-Instruct\"\ ,\n\t\"lighteval_xstory_cloze_ar_0_2025_01_16T12_15_09_430602_parquet\",\n\tsplit=\"\ train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2025-01-16T12:15:09.430602](https://huggingface.co/datasets/OALL/details_tiiuae__Falcon3-1B-Instruct/blob/main/results_2025-01-16T12-15-09.430602.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc_norm\": 0.34630602006843797,\n\ \ \"acc_norm_stderr\": 0.0368983486824361,\n \"acc\": 0.4831237590999338,\n\ \ \"acc_stderr\": 0.0128597939199776\n },\n \"community|acva:Algeria|0\"\ : {\n \"acc_norm\": 0.517948717948718,\n \"acc_norm_stderr\": 0.03587477098773825\n\ \ },\n \"community|acva:Ancient_Egypt|0\": {\n \"acc_norm\": 0.18412698412698414,\n\ \ \"acc_norm_stderr\": 0.021872840227741344\n },\n \"community|acva:Arab_Empire|0\"\ : {\n \"acc_norm\": 0.3132075471698113,\n \"acc_norm_stderr\": 0.02854479331905533\n\ \ },\n \"community|acva:Arabic_Architecture|0\": {\n \"acc_norm\":\ \ 0.46153846153846156,\n \"acc_norm_stderr\": 0.03579154352544571\n },\n\ \ \"community|acva:Arabic_Art|0\": {\n \"acc_norm\": 0.3384615384615385,\n\ \ \"acc_norm_stderr\": 0.03397280032734095\n },\n \"community|acva:Arabic_Astronomy|0\"\ : {\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.03581804596782233\n\ \ },\n \"community|acva:Arabic_Calligraphy|0\": {\n \"acc_norm\": 0.4980392156862745,\n\ \ \"acc_norm_stderr\": 0.031372549019607836\n },\n \"community|acva:Arabic_Ceremony|0\"\ : {\n \"acc_norm\": 0.5297297297297298,\n \"acc_norm_stderr\": 0.036795272555679256\n\ \ },\n \"community|acva:Arabic_Clothing|0\": {\n \"acc_norm\": 0.47692307692307695,\n\ \ \"acc_norm_stderr\": 0.03585965308947411\n },\n \"community|acva:Arabic_Culture|0\"\ : {\n \"acc_norm\": 0.2512820512820513,\n \"acc_norm_stderr\": 0.031141461571214363\n\ \ },\n \"community|acva:Arabic_Food|0\": {\n \"acc_norm\": 0.441025641025641,\n\ \ \"acc_norm_stderr\": 0.0356473293185358\n },\n \"community|acva:Arabic_Funeral|0\"\ : {\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.05092415229967329\n\ \ },\n \"community|acva:Arabic_Geography|0\": {\n \"acc_norm\": 0.5862068965517241,\n\ \ \"acc_norm_stderr\": 0.041042692118062316\n },\n \"community|acva:Arabic_History|0\"\ : {\n \"acc_norm\": 0.35384615384615387,\n \"acc_norm_stderr\": 0.03433004254147036\n\ \ },\n \"community|acva:Arabic_Language_Origin|0\": {\n \"acc_norm\"\ : 0.5578947368421052,\n \"acc_norm_stderr\": 0.051224183891818126\n },\n\ \ \"community|acva:Arabic_Literature|0\": {\n \"acc_norm\": 0.4827586206896552,\n\ \ \"acc_norm_stderr\": 0.04164188720169377\n },\n \"community|acva:Arabic_Math|0\"\ : {\n \"acc_norm\": 0.3641025641025641,\n \"acc_norm_stderr\": 0.034546538677863885\n\ \ },\n \"community|acva:Arabic_Medicine|0\": {\n \"acc_norm\": 0.4896551724137931,\n\ \ \"acc_norm_stderr\": 0.04165774775728763\n },\n \"community|acva:Arabic_Music|0\"\ : {\n \"acc_norm\": 0.30935251798561153,\n \"acc_norm_stderr\": 0.03934735112547112\n\ \ },\n \"community|acva:Arabic_Ornament|0\": {\n \"acc_norm\": 0.4666666666666667,\n\ \ \"acc_norm_stderr\": 0.03581804596782233\n },\n \"community|acva:Arabic_Philosophy|0\"\ : {\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n\ \ },\n \"community|acva:Arabic_Physics_and_Chemistry|0\": {\n \"acc_norm\"\ : 0.4256410256410256,\n \"acc_norm_stderr\": 0.03549871080367708\n },\n\ \ \"community|acva:Arabic_Wedding|0\": {\n \"acc_norm\": 0.40512820512820513,\n\ \ \"acc_norm_stderr\": 0.03524577495610961\n },\n \"community|acva:Bahrain|0\"\ : {\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.07216392363431012\n\ \ },\n \"community|acva:Comoros|0\": {\n \"acc_norm\": 0.35555555555555557,\n\ \ \"acc_norm_stderr\": 0.07216392363431014\n },\n \"community|acva:Egypt_modern|0\"\ : {\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.05092415229967328\n\ \ },\n \"community|acva:InfluenceFromAncientEgypt|0\": {\n \"acc_norm\"\ : 0.5846153846153846,\n \"acc_norm_stderr\": 0.03538013280575029\n },\n\ \ \"community|acva:InfluenceFromByzantium|0\": {\n \"acc_norm\": 0.6827586206896552,\n\ \ \"acc_norm_stderr\": 0.03878352372138622\n },\n \"community|acva:InfluenceFromChina|0\"\ : {\n \"acc_norm\": 0.30256410256410254,\n \"acc_norm_stderr\": 0.03298070870085618\n\ \ },\n \"community|acva:InfluenceFromGreece|0\": {\n \"acc_norm\":\ \ 0.6358974358974359,\n \"acc_norm_stderr\": 0.03454653867786389\n },\n\ \ \"community|acva:InfluenceFromIslam|0\": {\n \"acc_norm\": 0.3103448275862069,\n\ \ \"acc_norm_stderr\": 0.03855289616378947\n },\n \"community|acva:InfluenceFromPersia|0\"\ : {\n \"acc_norm\": 0.6971428571428572,\n \"acc_norm_stderr\": 0.03483414676585986\n\ \ },\n \"community|acva:InfluenceFromRome|0\": {\n \"acc_norm\": 0.5846153846153846,\n\ \ \"acc_norm_stderr\": 0.035380132805750295\n },\n \"community|acva:Iraq|0\"\ : {\n \"acc_norm\": 0.5529411764705883,\n \"acc_norm_stderr\": 0.054247803536170265\n\ \ },\n \"community|acva:Islam_Education|0\": {\n \"acc_norm\": 0.4666666666666667,\n\ \ \"acc_norm_stderr\": 0.03581804596782232\n },\n \"community|acva:Islam_branches_and_schools|0\"\ : {\n \"acc_norm\": 0.49142857142857144,\n \"acc_norm_stderr\": 0.0378993320697706\n\ \ },\n \"community|acva:Islamic_law_system|0\": {\n \"acc_norm\": 0.4153846153846154,\n\ \ \"acc_norm_stderr\": 0.03538013280575029\n },\n \"community|acva:Jordan|0\"\ : {\n \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.07309112127323451\n\ \ },\n \"community|acva:Kuwait|0\": {\n \"acc_norm\": 0.28888888888888886,\n\ \ \"acc_norm_stderr\": 0.06832943242540508\n },\n \"community|acva:Lebanon|0\"\ : {\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.06267511942419626\n\ \ },\n \"community|acva:Libya|0\": {\n \"acc_norm\": 0.4888888888888889,\n\ \ \"acc_norm_stderr\": 0.07535922203472523\n },\n \"community|acva:Mauritania|0\"\ : {\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.07491109582924915\n\ \ },\n \"community|acva:Mesopotamia_civilization|0\": {\n \"acc_norm\"\ : 0.5225806451612903,\n \"acc_norm_stderr\": 0.04025003948244411\n },\n\ \ \"community|acva:Morocco|0\": {\n \"acc_norm\": 0.2222222222222222,\n\ \ \"acc_norm_stderr\": 0.06267511942419628\n },\n \"community|acva:Oman|0\"\ : {\n \"acc_norm\": 0.24444444444444444,\n \"acc_norm_stderr\": 0.06478835438717\n\ \ },\n \"community|acva:Palestine|0\": {\n \"acc_norm\": 0.27058823529411763,\n\ \ \"acc_norm_stderr\": 0.048473144530236524\n },\n \"community|acva:Qatar|0\"\ : {\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.07446027270295806\n\ \ },\n \"community|acva:Saudi_Arabia|0\": {\n \"acc_norm\": 0.3435897435897436,\n\ \ \"acc_norm_stderr\": 0.034096273014098545\n },\n \"community|acva:Somalia|0\"\ : {\n \"acc_norm\": 0.4222222222222222,\n \"acc_norm_stderr\": 0.07446027270295805\n\ \ },\n \"community|acva:Sudan|0\": {\n \"acc_norm\": 0.4,\n \ \ \"acc_norm_stderr\": 0.07385489458759965\n },\n \"community|acva:Syria|0\"\ : {\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.07385489458759965\n\ \ },\n \"community|acva:Tunisia|0\": {\n \"acc_norm\": 0.35555555555555557,\n\ \ \"acc_norm_stderr\": 0.07216392363431012\n },\n \"community|acva:United_Arab_Emirates|0\"\ : {\n \"acc_norm\": 0.24705882352941178,\n \"acc_norm_stderr\": 0.047058823529411785\n\ \ },\n \"community|acva:Yemen|0\": {\n \"acc_norm\": 0.3,\n \ \ \"acc_norm_stderr\": 0.15275252316519466\n },\n \"community|acva:communication|0\"\ : {\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025974025974025955\n\ \ },\n \"community|acva:computer_and_phone|0\": {\n \"acc_norm\": 0.4542372881355932,\n\ \ \"acc_norm_stderr\": 0.029038197586234566\n },\n \"community|acva:daily_life|0\"\ : {\n \"acc_norm\": 0.19881305637982197,\n \"acc_norm_stderr\": 0.021773073762099044\n\ \ },\n \"community|acva:entertainment|0\": {\n \"acc_norm\": 0.23728813559322035,\n\ \ \"acc_norm_stderr\": 0.024811018803776317\n },\n \"community|alghafa:mcq_exams_test_ar|0\"\ : {\n \"acc_norm\": 0.26032315978456017,\n \"acc_norm_stderr\": 0.018609727684848365\n\ \ },\n \"community|alghafa:meta_ar_dialects|0\": {\n \"acc_norm\":\ \ 0.25338276181649677,\n \"acc_norm_stderr\": 0.005922186338909283\n },\n\ \ \"community|alghafa:meta_ar_msa|0\": {\n \"acc_norm\": 0.2670391061452514,\n\ \ \"acc_norm_stderr\": 0.014796502622562557\n },\n \"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0\"\ : {\n \"acc_norm\": 0.5333333333333333,\n \"acc_norm_stderr\": 0.05799451149344531\n\ \ },\n \"community|alghafa:multiple_choice_grounded_statement_soqal_task|0\"\ : {\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.03932313218491398\n\ \ },\n \"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0\"\ : {\n \"acc_norm\": 0.30666666666666664,\n \"acc_norm_stderr\": 0.03777558444306215\n\ \ },\n \"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0\"\ : {\n \"acc_norm\": 0.4930581613508443,\n \"acc_norm_stderr\": 0.00559172843848121\n\ \ },\n \"community|alghafa:multiple_choice_rating_sentiment_task|0\": {\n\ \ \"acc_norm\": 0.3312760633861551,\n \"acc_norm_stderr\": 0.006079391241463982\n\ \ },\n \"community|alghafa:multiple_choice_sentiment_task|0\": {\n \ \ \"acc_norm\": 0.3226744186046512,\n \"acc_norm_stderr\": 0.011275688483429172\n\ \ },\n \"community|arabic_exams|0\": {\n \"acc_norm\": 0.28677839851024206,\n\ \ \"acc_norm_stderr\": 0.01953453451048444\n },\n \"community|arabic_mmlu:abstract_algebra|0\"\ : {\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n\ \ },\n \"community|arabic_mmlu:anatomy|0\": {\n \"acc_norm\": 0.24444444444444444,\n\ \ \"acc_norm_stderr\": 0.037125378336148665\n },\n \"community|arabic_mmlu:astronomy|0\"\ : {\n \"acc_norm\": 0.32894736842105265,\n \"acc_norm_stderr\": 0.03823428969926604\n\ \ },\n \"community|arabic_mmlu:business_ethics|0\": {\n \"acc_norm\"\ : 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n \"community|arabic_mmlu:clinical_knowledge|0\"\ : {\n \"acc_norm\": 0.30566037735849055,\n \"acc_norm_stderr\": 0.028353298073322666\n\ \ },\n \"community|arabic_mmlu:college_biology|0\": {\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"community|arabic_mmlu:college_chemistry|0\"\ : {\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n\ \ },\n \"community|arabic_mmlu:college_computer_science|0\": {\n \"\ acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \ \ \"community|arabic_mmlu:college_mathematics|0\": {\n \"acc_norm\": 0.27,\n\ \ \"acc_norm_stderr\": 0.044619604333847415\n },\n \"community|arabic_mmlu:college_medicine|0\"\ : {\n \"acc_norm\": 0.3236994219653179,\n \"acc_norm_stderr\": 0.0356760379963917\n\ \ },\n \"community|arabic_mmlu:college_physics|0\": {\n \"acc_norm\"\ : 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105655\n },\n\ \ \"community|arabic_mmlu:computer_security|0\": {\n \"acc_norm\": 0.2,\n\ \ \"acc_norm_stderr\": 0.04020151261036845\n },\n \"community|arabic_mmlu:conceptual_physics|0\"\ : {\n \"acc_norm\": 0.23829787234042554,\n \"acc_norm_stderr\": 0.02785125297388978\n\ \ },\n \"community|arabic_mmlu:econometrics|0\": {\n \"acc_norm\":\ \ 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n\ \ \"community|arabic_mmlu:electrical_engineering|0\": {\n \"acc_norm\"\ : 0.2482758620689655,\n \"acc_norm_stderr\": 0.036001056927277716\n },\n\ \ \"community|arabic_mmlu:elementary_mathematics|0\": {\n \"acc_norm\"\ : 0.2857142857142857,\n \"acc_norm_stderr\": 0.023266512213730564\n },\n\ \ \"community|arabic_mmlu:formal_logic|0\": {\n \"acc_norm\": 0.3253968253968254,\n\ \ \"acc_norm_stderr\": 0.041905964388711366\n },\n \"community|arabic_mmlu:global_facts|0\"\ : {\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n\ \ },\n \"community|arabic_mmlu:high_school_biology|0\": {\n \"acc_norm\"\ : 0.3193548387096774,\n \"acc_norm_stderr\": 0.026522709674667775\n },\n\ \ \"community|arabic_mmlu:high_school_chemistry|0\": {\n \"acc_norm\"\ : 0.24630541871921183,\n \"acc_norm_stderr\": 0.030315099285617715\n },\n\ \ \"community|arabic_mmlu:high_school_computer_science|0\": {\n \"acc_norm\"\ : 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"community|arabic_mmlu:high_school_european_history|0\"\ : {\n \"acc_norm\": 0.296969696969697,\n \"acc_norm_stderr\": 0.035679697722680474\n\ \ },\n \"community|arabic_mmlu:high_school_geography|0\": {\n \"acc_norm\"\ : 0.35353535353535354,\n \"acc_norm_stderr\": 0.03406086723547153\n },\n\ \ \"community|arabic_mmlu:high_school_government_and_politics|0\": {\n \ \ \"acc_norm\": 0.3626943005181347,\n \"acc_norm_stderr\": 0.03469713791704371\n\ \ },\n \"community|arabic_mmlu:high_school_macroeconomics|0\": {\n \ \ \"acc_norm\": 0.34102564102564104,\n \"acc_norm_stderr\": 0.024035489676335068\n\ \ },\n \"community|arabic_mmlu:high_school_mathematics|0\": {\n \"\ acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.027195934804085622\n\ \ },\n \"community|arabic_mmlu:high_school_microeconomics|0\": {\n \ \ \"acc_norm\": 0.3445378151260504,\n \"acc_norm_stderr\": 0.030868682604121633\n\ \ },\n \"community|arabic_mmlu:high_school_physics|0\": {\n \"acc_norm\"\ : 0.2980132450331126,\n \"acc_norm_stderr\": 0.037345356767871984\n },\n\ \ \"community|arabic_mmlu:high_school_psychology|0\": {\n \"acc_norm\"\ : 0.3174311926605505,\n \"acc_norm_stderr\": 0.0199571521984605\n },\n\ \ \"community|arabic_mmlu:high_school_statistics|0\": {\n \"acc_norm\"\ : 0.4166666666666667,\n \"acc_norm_stderr\": 0.033622774366080424\n },\n\ \ \"community|arabic_mmlu:high_school_us_history|0\": {\n \"acc_norm\"\ : 0.20098039215686275,\n \"acc_norm_stderr\": 0.028125972265654362\n },\n\ \ \"community|arabic_mmlu:high_school_world_history|0\": {\n \"acc_norm\"\ : 0.22362869198312235,\n \"acc_norm_stderr\": 0.027123298205229972\n },\n\ \ \"community|arabic_mmlu:human_aging|0\": {\n \"acc_norm\": 0.19282511210762332,\n\ \ \"acc_norm_stderr\": 0.026478240960489365\n },\n \"community|arabic_mmlu:human_sexuality|0\"\ : {\n \"acc_norm\": 0.26717557251908397,\n \"acc_norm_stderr\": 0.03880848301082395\n\ \ },\n \"community|arabic_mmlu:international_law|0\": {\n \"acc_norm\"\ : 0.18181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n\ \ \"community|arabic_mmlu:jurisprudence|0\": {\n \"acc_norm\": 0.26851851851851855,\n\ \ \"acc_norm_stderr\": 0.04284467968052191\n },\n \"community|arabic_mmlu:logical_fallacies|0\"\ : {\n \"acc_norm\": 0.2331288343558282,\n \"acc_norm_stderr\": 0.03322015795776741\n\ \ },\n \"community|arabic_mmlu:machine_learning|0\": {\n \"acc_norm\"\ : 0.17857142857142858,\n \"acc_norm_stderr\": 0.036352091215778065\n },\n\ \ \"community|arabic_mmlu:management|0\": {\n \"acc_norm\": 0.2912621359223301,\n\ \ \"acc_norm_stderr\": 0.044986763205729224\n },\n \"community|arabic_mmlu:marketing|0\"\ : {\n \"acc_norm\": 0.2094017094017094,\n \"acc_norm_stderr\": 0.026655699653922768\n\ \ },\n \"community|arabic_mmlu:medical_genetics|0\": {\n \"acc_norm\"\ : 0.26,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"community|arabic_mmlu:miscellaneous|0\"\ : {\n \"acc_norm\": 0.23499361430395913,\n \"acc_norm_stderr\": 0.015162024152278445\n\ \ },\n \"community|arabic_mmlu:moral_disputes|0\": {\n \"acc_norm\"\ : 0.23699421965317918,\n \"acc_norm_stderr\": 0.02289408248992599\n },\n\ \ \"community|arabic_mmlu:moral_scenarios|0\": {\n \"acc_norm\": 0.2748603351955307,\n\ \ \"acc_norm_stderr\": 0.014931316703220517\n },\n \"community|arabic_mmlu:nutrition|0\"\ : {\n \"acc_norm\": 0.3137254901960784,\n \"acc_norm_stderr\": 0.02656892101545714\n\ \ },\n \"community|arabic_mmlu:philosophy|0\": {\n \"acc_norm\": 0.2508038585209003,\n\ \ \"acc_norm_stderr\": 0.024619771956697168\n },\n \"community|arabic_mmlu:prehistory|0\"\ : {\n \"acc_norm\": 0.24691358024691357,\n \"acc_norm_stderr\": 0.02399350170904213\n\ \ },\n \"community|arabic_mmlu:professional_accounting|0\": {\n \"\ acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340460997\n\ \ },\n \"community|arabic_mmlu:professional_law|0\": {\n \"acc_norm\"\ : 0.24902216427640156,\n \"acc_norm_stderr\": 0.01104489226404077\n },\n\ \ \"community|arabic_mmlu:professional_medicine|0\": {\n \"acc_norm\"\ : 0.41911764705882354,\n \"acc_norm_stderr\": 0.029972807170464626\n },\n\ \ \"community|arabic_mmlu:professional_psychology|0\": {\n \"acc_norm\"\ : 0.22058823529411764,\n \"acc_norm_stderr\": 0.016774672365468517\n },\n\ \ \"community|arabic_mmlu:public_relations|0\": {\n \"acc_norm\": 0.2545454545454545,\n\ \ \"acc_norm_stderr\": 0.04172343038705383\n },\n \"community|arabic_mmlu:security_studies|0\"\ : {\n \"acc_norm\": 0.40408163265306124,\n \"acc_norm_stderr\": 0.03141470802586588\n\ \ },\n \"community|arabic_mmlu:sociology|0\": {\n \"acc_norm\": 0.2537313432835821,\n\ \ \"acc_norm_stderr\": 0.030769444967296018\n },\n \"community|arabic_mmlu:us_foreign_policy|0\"\ : {\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n\ \ },\n \"community|arabic_mmlu:virology|0\": {\n \"acc_norm\": 0.23493975903614459,\n\ \ \"acc_norm_stderr\": 0.03300533186128922\n },\n \"community|arabic_mmlu:world_religions|0\"\ : {\n \"acc_norm\": 0.17543859649122806,\n \"acc_norm_stderr\": 0.029170885500727665\n\ \ },\n \"community|arc_challenge_okapi_ar|0\": {\n \"acc_norm\": 0.2706896551724138,\n\ \ \"acc_norm_stderr\": 0.013051195593979622\n },\n \"community|arc_easy_ar|0\"\ : {\n \"acc_norm\": 0.25676818950930624,\n \"acc_norm_stderr\": 0.008986713434390267\n\ \ },\n \"community|boolq_ar|0\": {\n \"acc_norm\": 0.6092024539877301,\n\ \ \"acc_norm_stderr\": 0.008547016763994043\n },\n \"community|copa_ext_ar|0\"\ : {\n \"acc_norm\": 0.4111111111111111,\n \"acc_norm_stderr\": 0.05215564061107555\n\ \ },\n \"community|hellaswag_okapi_ar|0\": {\n \"acc_norm\": 0.2546069130956275,\n\ \ \"acc_norm_stderr\": 0.004549284614264872\n },\n \"community|openbook_qa_ext_ar|0\"\ : {\n \"acc_norm\": 0.34545454545454546,\n \"acc_norm_stderr\": 0.02139448746962014\n\ \ },\n \"community|piqa_ar|0\": {\n \"acc_norm\": 0.5002727768685216,\n\ \ \"acc_norm_stderr\": 0.011681731099489213\n },\n \"community|race_ar|0\"\ : {\n \"acc_norm\": 0.2937715560965713,\n \"acc_norm_stderr\": 0.006488467368464929\n\ \ },\n \"community|sciq_ar|0\": {\n \"acc_norm\": 0.3527638190954774,\n\ \ \"acc_norm_stderr\": 0.015155847230169505\n },\n \"community|toxigen_ar|0\"\ : {\n \"acc_norm\": 0.4310160427807487,\n \"acc_norm_stderr\": 0.016204039390071805\n\ \ },\n \"lighteval|xstory_cloze:ar|0\": {\n \"acc\": 0.4831237590999338,\n\ \ \"acc_stderr\": 0.0128597939199776\n },\n \"community|acva:_average|0\"\ : {\n \"acc_norm\": 0.41509657499055447,\n \"acc_norm_stderr\": 0.04705029510032508\n\ \ },\n \"community|alghafa:_average|0\": {\n \"acc_norm\": 0.34752818567643984,\n\ \ \"acc_norm_stderr\": 0.02192982810345733\n },\n \"community|arabic_mmlu:_average|0\"\ : {\n \"acc_norm\": 0.2725530215266088,\n \"acc_norm_stderr\": 0.03293407974198066\n\ \ }\n}\n```" repo_url: https://huggingface.co/tiiuae/Falcon3-1B-Instruct configs: - config_name: community_acva_Algeria_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Algeria|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Algeria|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Ancient_Egypt_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Ancient_Egypt|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Ancient_Egypt|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arab_Empire_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arab_Empire|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arab_Empire|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Architecture_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Architecture|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Architecture|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Art_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Art|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Art|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Astronomy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Astronomy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Astronomy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Calligraphy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Calligraphy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Calligraphy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Ceremony_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Ceremony|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Ceremony|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Clothing_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Clothing|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Clothing|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Culture_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Culture|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Culture|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Food_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Food|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Food|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Funeral_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Funeral|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Funeral|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Geography_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Geography|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Geography|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_History_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_History|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_History|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Language_Origin_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Language_Origin|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Language_Origin|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Literature_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Literature|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Literature|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Math_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Math|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Math|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Medicine_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Medicine|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Medicine|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Music_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Music|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Music|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Ornament_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Ornament|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Ornament|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Philosophy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Philosophy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Philosophy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Physics_and_Chemistry_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Arabic_Wedding_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Arabic_Wedding|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Arabic_Wedding|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Bahrain_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Bahrain|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Bahrain|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Comoros_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Comoros|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Comoros|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Egypt_modern_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Egypt_modern|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Egypt_modern|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromAncientEgypt_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromAncientEgypt|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromAncientEgypt|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromByzantium_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromByzantium|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromByzantium|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromChina_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromChina|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromChina|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromGreece_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromGreece|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromGreece|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromIslam_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromIslam|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromIslam|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromPersia_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromPersia|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromPersia|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_InfluenceFromRome_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:InfluenceFromRome|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromRome|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Iraq_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Iraq|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Iraq|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Islam_Education_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Islam_Education|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Islam_Education|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Islam_branches_and_schools_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Islam_branches_and_schools|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Islam_branches_and_schools|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Islamic_law_system_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Islamic_law_system|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Islamic_law_system|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Jordan_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Jordan|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Jordan|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Kuwait_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Kuwait|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Kuwait|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Lebanon_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Lebanon|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Lebanon|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Libya_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Libya|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Libya|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Mauritania_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Mauritania|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Mauritania|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Mesopotamia_civilization_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Mesopotamia_civilization|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Mesopotamia_civilization|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Morocco_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Morocco|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Morocco|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Oman_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Oman|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Oman|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Palestine_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Palestine|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Palestine|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Qatar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Qatar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Qatar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Saudi_Arabia_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Saudi_Arabia|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Saudi_Arabia|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Somalia_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Somalia|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Somalia|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Sudan_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Sudan|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Sudan|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Syria_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Syria|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Syria|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Tunisia_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Tunisia|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Tunisia|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_United_Arab_Emirates_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:United_Arab_Emirates|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:United_Arab_Emirates|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_Yemen_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:Yemen|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:Yemen|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_communication_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:communication|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:communication|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_computer_and_phone_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:computer_and_phone|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:computer_and_phone|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_daily_life_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:daily_life|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:daily_life|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_acva_entertainment_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|acva:entertainment|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|acva:entertainment|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_mcq_exams_test_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:mcq_exams_test_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:mcq_exams_test_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_meta_ar_dialects_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:meta_ar_dialects|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:meta_ar_dialects|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_meta_ar_msa_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:meta_ar_msa|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:meta_ar_msa|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_facts_truefalse_balanced_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_grounded_statement_soqal_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_grounded_statement_xglue_mlqa_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_rating_sentiment_no_neutral_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_rating_sentiment_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_alghafa_multiple_choice_sentiment_task_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|alghafa:multiple_choice_sentiment_task|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_sentiment_task|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_exams_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_exams|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_exams|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_abstract_algebra_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:abstract_algebra|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:abstract_algebra|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_anatomy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:anatomy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:anatomy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_astronomy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:astronomy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:astronomy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_business_ethics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:business_ethics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:business_ethics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_clinical_knowledge_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:clinical_knowledge|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:clinical_knowledge|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_biology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_biology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_biology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_chemistry_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_chemistry|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_chemistry|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_computer_science_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_computer_science|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_computer_science|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_mathematics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_mathematics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_mathematics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_medicine_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_medicine|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_medicine|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_college_physics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:college_physics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_physics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_computer_security_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:computer_security|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:computer_security|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_conceptual_physics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:conceptual_physics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:conceptual_physics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_econometrics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:econometrics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:econometrics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_electrical_engineering_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:electrical_engineering|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:electrical_engineering|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_elementary_mathematics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:elementary_mathematics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:elementary_mathematics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_formal_logic_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:formal_logic|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:formal_logic|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_global_facts_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:global_facts|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:global_facts|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_biology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_biology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_biology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_chemistry_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_chemistry|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_chemistry|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_computer_science_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_computer_science|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_computer_science|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_european_history_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_european_history|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_european_history|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_geography_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_geography|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_geography|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_government_and_politics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_macroeconomics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_mathematics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_mathematics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_mathematics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_microeconomics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_microeconomics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_microeconomics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_physics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_physics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_physics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_psychology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_psychology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_psychology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_statistics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_statistics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_statistics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_us_history_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_us_history|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_us_history|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_high_school_world_history_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:high_school_world_history|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_world_history|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_human_aging_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:human_aging|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:human_aging|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_human_sexuality_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:human_sexuality|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:human_sexuality|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_international_law_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:international_law|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:international_law|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_jurisprudence_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:jurisprudence|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:jurisprudence|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_logical_fallacies_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:logical_fallacies|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:logical_fallacies|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_machine_learning_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:machine_learning|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:machine_learning|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_management_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:management|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:management|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_marketing_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:marketing|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:marketing|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_medical_genetics_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:medical_genetics|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:medical_genetics|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_miscellaneous_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:miscellaneous|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:miscellaneous|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_moral_disputes_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:moral_disputes|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:moral_disputes|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_moral_scenarios_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:moral_scenarios|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:moral_scenarios|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_nutrition_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:nutrition|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:nutrition|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_philosophy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:philosophy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:philosophy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_prehistory_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:prehistory|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:prehistory|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_professional_accounting_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:professional_accounting|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_accounting|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_professional_law_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:professional_law|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_law|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_professional_medicine_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:professional_medicine|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_medicine|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_professional_psychology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:professional_psychology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_psychology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_public_relations_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:public_relations|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:public_relations|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_security_studies_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:security_studies|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:security_studies|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_sociology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:sociology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:sociology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_us_foreign_policy_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:us_foreign_policy|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:us_foreign_policy|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_virology_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:virology|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:virology|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arabic_mmlu_world_religions_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arabic_mmlu:world_religions|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arabic_mmlu:world_religions|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arc_challenge_okapi_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arc_challenge_okapi_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arc_challenge_okapi_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_arc_easy_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|arc_easy_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|arc_easy_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_boolq_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|boolq_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|boolq_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_copa_ext_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|copa_ext_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|copa_ext_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_hellaswag_okapi_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|hellaswag_okapi_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|hellaswag_okapi_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_openbook_qa_ext_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|openbook_qa_ext_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|openbook_qa_ext_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_piqa_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|piqa_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|piqa_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_race_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|race_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|race_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_sciq_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|sciq_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|sciq_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: community_toxigen_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_community|toxigen_ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_community|toxigen_ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: lighteval_xstory_cloze_ar_0_2025_01_16T12_15_09_430602_parquet data_files: - split: 2025_01_16T12_15_09.430602 path: - '**/details_lighteval|xstory_cloze:ar|0_2025-01-16T12-15-09.430602.parquet' - split: latest path: - '**/details_lighteval|xstory_cloze:ar|0_2025-01-16T12-15-09.430602.parquet' - config_name: results data_files: - split: 2025_01_16T12_15_09.430602 path: - results_2025-01-16T12-15-09.430602.parquet - split: latest path: - results_2025-01-16T12-15-09.430602.parquet --- # Dataset Card for Evaluation run of tiiuae/Falcon3-1B-Instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [tiiuae/Falcon3-1B-Instruct](https://huggingface.co/tiiuae/Falcon3-1B-Instruct). The dataset is composed of 136 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run. To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("OALL/details_tiiuae__Falcon3-1B-Instruct", "lighteval_xstory_cloze_ar_0_2025_01_16T12_15_09_430602_parquet", split="train") ``` ## Latest results These are the [latest results from run 2025-01-16T12:15:09.430602](https://huggingface.co/datasets/OALL/details_tiiuae__Falcon3-1B-Instruct/blob/main/results_2025-01-16T12-15-09.430602.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc_norm": 0.34630602006843797, "acc_norm_stderr": 0.0368983486824361, "acc": 0.4831237590999338, "acc_stderr": 0.0128597939199776 }, "community|acva:Algeria|0": { "acc_norm": 0.517948717948718, "acc_norm_stderr": 0.03587477098773825 }, "community|acva:Ancient_Egypt|0": { "acc_norm": 0.18412698412698414, "acc_norm_stderr": 0.021872840227741344 }, "community|acva:Arab_Empire|0": { "acc_norm": 0.3132075471698113, "acc_norm_stderr": 0.02854479331905533 }, "community|acva:Arabic_Architecture|0": { "acc_norm": 0.46153846153846156, "acc_norm_stderr": 0.03579154352544571 }, "community|acva:Arabic_Art|0": { "acc_norm": 0.3384615384615385, "acc_norm_stderr": 0.03397280032734095 }, "community|acva:Arabic_Astronomy|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03581804596782233 }, "community|acva:Arabic_Calligraphy|0": { "acc_norm": 0.4980392156862745, "acc_norm_stderr": 0.031372549019607836 }, "community|acva:Arabic_Ceremony|0": { "acc_norm": 0.5297297297297298, "acc_norm_stderr": 0.036795272555679256 }, "community|acva:Arabic_Clothing|0": { "acc_norm": 0.47692307692307695, "acc_norm_stderr": 0.03585965308947411 }, "community|acva:Arabic_Culture|0": { "acc_norm": 0.2512820512820513, "acc_norm_stderr": 0.031141461571214363 }, "community|acva:Arabic_Food|0": { "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.0356473293185358 }, "community|acva:Arabic_Funeral|0": { "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.05092415229967329 }, "community|acva:Arabic_Geography|0": { "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.041042692118062316 }, "community|acva:Arabic_History|0": { "acc_norm": 0.35384615384615387, "acc_norm_stderr": 0.03433004254147036 }, "community|acva:Arabic_Language_Origin|0": { "acc_norm": 0.5578947368421052, "acc_norm_stderr": 0.051224183891818126 }, "community|acva:Arabic_Literature|0": { "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "community|acva:Arabic_Math|0": { "acc_norm": 0.3641025641025641, "acc_norm_stderr": 0.034546538677863885 }, "community|acva:Arabic_Medicine|0": { "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "community|acva:Arabic_Music|0": { "acc_norm": 0.30935251798561153, "acc_norm_stderr": 0.03934735112547112 }, "community|acva:Arabic_Ornament|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03581804596782233 }, "community|acva:Arabic_Philosophy|0": { "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "acc_norm": 0.4256410256410256, "acc_norm_stderr": 0.03549871080367708 }, "community|acva:Arabic_Wedding|0": { "acc_norm": 0.40512820512820513, "acc_norm_stderr": 0.03524577495610961 }, "community|acva:Bahrain|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Comoros|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431014 }, "community|acva:Egypt_modern|0": { "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.05092415229967328 }, "community|acva:InfluenceFromAncientEgypt|0": { "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.03538013280575029 }, "community|acva:InfluenceFromByzantium|0": { "acc_norm": 0.6827586206896552, "acc_norm_stderr": 0.03878352372138622 }, "community|acva:InfluenceFromChina|0": { "acc_norm": 0.30256410256410254, "acc_norm_stderr": 0.03298070870085618 }, "community|acva:InfluenceFromGreece|0": { "acc_norm": 0.6358974358974359, "acc_norm_stderr": 0.03454653867786389 }, "community|acva:InfluenceFromIslam|0": { "acc_norm": 0.3103448275862069, "acc_norm_stderr": 0.03855289616378947 }, "community|acva:InfluenceFromPersia|0": { "acc_norm": 0.6971428571428572, "acc_norm_stderr": 0.03483414676585986 }, "community|acva:InfluenceFromRome|0": { "acc_norm": 0.5846153846153846, "acc_norm_stderr": 0.035380132805750295 }, "community|acva:Iraq|0": { "acc_norm": 0.5529411764705883, "acc_norm_stderr": 0.054247803536170265 }, "community|acva:Islam_Education|0": { "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03581804596782232 }, "community|acva:Islam_branches_and_schools|0": { "acc_norm": 0.49142857142857144, "acc_norm_stderr": 0.0378993320697706 }, "community|acva:Islamic_law_system|0": { "acc_norm": 0.4153846153846154, "acc_norm_stderr": 0.03538013280575029 }, "community|acva:Jordan|0": { "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.07309112127323451 }, "community|acva:Kuwait|0": { "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.06832943242540508 }, "community|acva:Lebanon|0": { "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.06267511942419626 }, "community|acva:Libya|0": { "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.07535922203472523 }, "community|acva:Mauritania|0": { "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.07491109582924915 }, "community|acva:Mesopotamia_civilization|0": { "acc_norm": 0.5225806451612903, "acc_norm_stderr": 0.04025003948244411 }, "community|acva:Morocco|0": { "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.06267511942419628 }, "community|acva:Oman|0": { "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.06478835438717 }, "community|acva:Palestine|0": { "acc_norm": 0.27058823529411763, "acc_norm_stderr": 0.048473144530236524 }, "community|acva:Qatar|0": { "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.07446027270295806 }, "community|acva:Saudi_Arabia|0": { "acc_norm": 0.3435897435897436, "acc_norm_stderr": 0.034096273014098545 }, "community|acva:Somalia|0": { "acc_norm": 0.4222222222222222, "acc_norm_stderr": 0.07446027270295805 }, "community|acva:Sudan|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.07385489458759965 }, "community|acva:Syria|0": { "acc_norm": 0.4, "acc_norm_stderr": 0.07385489458759965 }, "community|acva:Tunisia|0": { "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:United_Arab_Emirates|0": { "acc_norm": 0.24705882352941178, "acc_norm_stderr": 0.047058823529411785 }, "community|acva:Yemen|0": { "acc_norm": 0.3, "acc_norm_stderr": 0.15275252316519466 }, "community|acva:communication|0": { "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025974025974025955 }, "community|acva:computer_and_phone|0": { "acc_norm": 0.4542372881355932, "acc_norm_stderr": 0.029038197586234566 }, "community|acva:daily_life|0": { "acc_norm": 0.19881305637982197, "acc_norm_stderr": 0.021773073762099044 }, "community|acva:entertainment|0": { "acc_norm": 0.23728813559322035, "acc_norm_stderr": 0.024811018803776317 }, "community|alghafa:mcq_exams_test_ar|0": { "acc_norm": 0.26032315978456017, "acc_norm_stderr": 0.018609727684848365 }, "community|alghafa:meta_ar_dialects|0": { "acc_norm": 0.25338276181649677, "acc_norm_stderr": 0.005922186338909283 }, "community|alghafa:meta_ar_msa|0": { "acc_norm": 0.2670391061452514, "acc_norm_stderr": 0.014796502622562557 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.05799451149344531 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "acc_norm": 0.36, "acc_norm_stderr": 0.03932313218491398 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "acc_norm": 0.30666666666666664, "acc_norm_stderr": 0.03777558444306215 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "acc_norm": 0.4930581613508443, "acc_norm_stderr": 0.00559172843848121 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "acc_norm": 0.3312760633861551, "acc_norm_stderr": 0.006079391241463982 }, "community|alghafa:multiple_choice_sentiment_task|0": { "acc_norm": 0.3226744186046512, "acc_norm_stderr": 0.011275688483429172 }, "community|arabic_exams|0": { "acc_norm": 0.28677839851024206, "acc_norm_stderr": 0.01953453451048444 }, "community|arabic_mmlu:abstract_algebra|0": { "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "community|arabic_mmlu:anatomy|0": { "acc_norm": 0.24444444444444444, "acc_norm_stderr": 0.037125378336148665 }, "community|arabic_mmlu:astronomy|0": { "acc_norm": 0.32894736842105265, "acc_norm_stderr": 0.03823428969926604 }, "community|arabic_mmlu:business_ethics|0": { "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "community|arabic_mmlu:clinical_knowledge|0": { "acc_norm": 0.30566037735849055, "acc_norm_stderr": 0.028353298073322666 }, "community|arabic_mmlu:college_biology|0": { "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "community|arabic_mmlu:college_chemistry|0": { "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "community|arabic_mmlu:college_computer_science|0": { "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "community|arabic_mmlu:college_mathematics|0": { "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "community|arabic_mmlu:college_medicine|0": { "acc_norm": 0.3236994219653179, "acc_norm_stderr": 0.0356760379963917 }, "community|arabic_mmlu:college_physics|0": { "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105655 }, "community|arabic_mmlu:computer_security|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "community|arabic_mmlu:conceptual_physics|0": { "acc_norm": 0.23829787234042554, "acc_norm_stderr": 0.02785125297388978 }, "community|arabic_mmlu:econometrics|0": { "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "community|arabic_mmlu:electrical_engineering|0": { "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.036001056927277716 }, "community|arabic_mmlu:elementary_mathematics|0": { "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.023266512213730564 }, "community|arabic_mmlu:formal_logic|0": { "acc_norm": 0.3253968253968254, "acc_norm_stderr": 0.041905964388711366 }, "community|arabic_mmlu:global_facts|0": { "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "community|arabic_mmlu:high_school_biology|0": { "acc_norm": 0.3193548387096774, "acc_norm_stderr": 0.026522709674667775 }, "community|arabic_mmlu:high_school_chemistry|0": { "acc_norm": 0.24630541871921183, "acc_norm_stderr": 0.030315099285617715 }, "community|arabic_mmlu:high_school_computer_science|0": { "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "community|arabic_mmlu:high_school_european_history|0": { "acc_norm": 0.296969696969697, "acc_norm_stderr": 0.035679697722680474 }, "community|arabic_mmlu:high_school_geography|0": { "acc_norm": 0.35353535353535354, "acc_norm_stderr": 0.03406086723547153 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "acc_norm": 0.3626943005181347, "acc_norm_stderr": 0.03469713791704371 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "acc_norm": 0.34102564102564104, "acc_norm_stderr": 0.024035489676335068 }, "community|arabic_mmlu:high_school_mathematics|0": { "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.027195934804085622 }, "community|arabic_mmlu:high_school_microeconomics|0": { "acc_norm": 0.3445378151260504, "acc_norm_stderr": 0.030868682604121633 }, "community|arabic_mmlu:high_school_physics|0": { "acc_norm": 0.2980132450331126, "acc_norm_stderr": 0.037345356767871984 }, "community|arabic_mmlu:high_school_psychology|0": { "acc_norm": 0.3174311926605505, "acc_norm_stderr": 0.0199571521984605 }, "community|arabic_mmlu:high_school_statistics|0": { "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.033622774366080424 }, "community|arabic_mmlu:high_school_us_history|0": { "acc_norm": 0.20098039215686275, "acc_norm_stderr": 0.028125972265654362 }, "community|arabic_mmlu:high_school_world_history|0": { "acc_norm": 0.22362869198312235, "acc_norm_stderr": 0.027123298205229972 }, "community|arabic_mmlu:human_aging|0": { "acc_norm": 0.19282511210762332, "acc_norm_stderr": 0.026478240960489365 }, "community|arabic_mmlu:human_sexuality|0": { "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082395 }, "community|arabic_mmlu:international_law|0": { "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "community|arabic_mmlu:jurisprudence|0": { "acc_norm": 0.26851851851851855, "acc_norm_stderr": 0.04284467968052191 }, "community|arabic_mmlu:logical_fallacies|0": { "acc_norm": 0.2331288343558282, "acc_norm_stderr": 0.03322015795776741 }, "community|arabic_mmlu:machine_learning|0": { "acc_norm": 0.17857142857142858, "acc_norm_stderr": 0.036352091215778065 }, "community|arabic_mmlu:management|0": { "acc_norm": 0.2912621359223301, "acc_norm_stderr": 0.044986763205729224 }, "community|arabic_mmlu:marketing|0": { "acc_norm": 0.2094017094017094, "acc_norm_stderr": 0.026655699653922768 }, "community|arabic_mmlu:medical_genetics|0": { "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768078 }, "community|arabic_mmlu:miscellaneous|0": { "acc_norm": 0.23499361430395913, "acc_norm_stderr": 0.015162024152278445 }, "community|arabic_mmlu:moral_disputes|0": { "acc_norm": 0.23699421965317918, "acc_norm_stderr": 0.02289408248992599 }, "community|arabic_mmlu:moral_scenarios|0": { "acc_norm": 0.2748603351955307, "acc_norm_stderr": 0.014931316703220517 }, "community|arabic_mmlu:nutrition|0": { "acc_norm": 0.3137254901960784, "acc_norm_stderr": 0.02656892101545714 }, "community|arabic_mmlu:philosophy|0": { "acc_norm": 0.2508038585209003, "acc_norm_stderr": 0.024619771956697168 }, "community|arabic_mmlu:prehistory|0": { "acc_norm": 0.24691358024691357, "acc_norm_stderr": 0.02399350170904213 }, "community|arabic_mmlu:professional_accounting|0": { "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340460997 }, "community|arabic_mmlu:professional_law|0": { "acc_norm": 0.24902216427640156, "acc_norm_stderr": 0.01104489226404077 }, "community|arabic_mmlu:professional_medicine|0": { "acc_norm": 0.41911764705882354, "acc_norm_stderr": 0.029972807170464626 }, "community|arabic_mmlu:professional_psychology|0": { "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.016774672365468517 }, "community|arabic_mmlu:public_relations|0": { "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "community|arabic_mmlu:security_studies|0": { "acc_norm": 0.40408163265306124, "acc_norm_stderr": 0.03141470802586588 }, "community|arabic_mmlu:sociology|0": { "acc_norm": 0.2537313432835821, "acc_norm_stderr": 0.030769444967296018 }, "community|arabic_mmlu:us_foreign_policy|0": { "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "community|arabic_mmlu:virology|0": { "acc_norm": 0.23493975903614459, "acc_norm_stderr": 0.03300533186128922 }, "community|arabic_mmlu:world_religions|0": { "acc_norm": 0.17543859649122806, "acc_norm_stderr": 0.029170885500727665 }, "community|arc_challenge_okapi_ar|0": { "acc_norm": 0.2706896551724138, "acc_norm_stderr": 0.013051195593979622 }, "community|arc_easy_ar|0": { "acc_norm": 0.25676818950930624, "acc_norm_stderr": 0.008986713434390267 }, "community|boolq_ar|0": { "acc_norm": 0.6092024539877301, "acc_norm_stderr": 0.008547016763994043 }, "community|copa_ext_ar|0": { "acc_norm": 0.4111111111111111, "acc_norm_stderr": 0.05215564061107555 }, "community|hellaswag_okapi_ar|0": { "acc_norm": 0.2546069130956275, "acc_norm_stderr": 0.004549284614264872 }, "community|openbook_qa_ext_ar|0": { "acc_norm": 0.34545454545454546, "acc_norm_stderr": 0.02139448746962014 }, "community|piqa_ar|0": { "acc_norm": 0.5002727768685216, "acc_norm_stderr": 0.011681731099489213 }, "community|race_ar|0": { "acc_norm": 0.2937715560965713, "acc_norm_stderr": 0.006488467368464929 }, "community|sciq_ar|0": { "acc_norm": 0.3527638190954774, "acc_norm_stderr": 0.015155847230169505 }, "community|toxigen_ar|0": { "acc_norm": 0.4310160427807487, "acc_norm_stderr": 0.016204039390071805 }, "lighteval|xstory_cloze:ar|0": { "acc": 0.4831237590999338, "acc_stderr": 0.0128597939199776 }, "community|acva:_average|0": { "acc_norm": 0.41509657499055447, "acc_norm_stderr": 0.04705029510032508 }, "community|alghafa:_average|0": { "acc_norm": 0.34752818567643984, "acc_norm_stderr": 0.02192982810345733 }, "community|arabic_mmlu:_average|0": { "acc_norm": 0.2725530215266088, "acc_norm_stderr": 0.03293407974198066 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
spiralworks/raw_24_ds
spiralworks
"2025-01-16T12:33:51Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T12:30:27Z"
--- dataset_info: features: - name: forum_id dtype: string - name: forum_title dtype: string - name: forum_authors sequence: string - name: forum_abstract dtype: string - name: forum_keywords sequence: string - name: forum_pdf_url dtype: string - name: note_id dtype: string - name: note_type dtype: string - name: note_created dtype: int64 - name: note_replyto dtype: string - name: note_readers sequence: string - name: note_signatures sequence: string - name: note_text dtype: string splits: - name: train num_bytes: 877296510 num_examples: 219635 download_size: 261211476 dataset_size: 877296510 configs: - config_name: default data_files: - split: train path: data/train-* ---
OALL/details_tiiuae__Falcon3-1B-Base
OALL
"2025-01-16T12:31:39Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T12:31:25Z"
--- pretty_name: Evaluation run of tiiuae/Falcon3-1B-Base dataset_summary: "Dataset automatically created during the evaluation run of model\ \ [tiiuae/Falcon3-1B-Base](https://huggingface.co/tiiuae/Falcon3-1B-Base).\n\nThe\ \ dataset is composed of 136 configuration, each one coresponding to one of the\ \ evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be\ \ found as a specific split in each configuration, the split being named using the\ \ timestamp of the run.The \"train\" split is always pointing to the latest results.\n\ \nAn additional configuration \"results\" store all the aggregated results of the\ \ run.\n\nTo load the details from a run, you can for instance do the following:\n\ ```python\nfrom datasets import load_dataset\ndata = load_dataset(\"OALL/details_tiiuae__Falcon3-1B-Base\"\ ,\n\t\"lighteval_xstory_cloze_ar_0_2025_01_16T12_26_22_423966_parquet\",\n\tsplit=\"\ train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2025-01-16T12:26:22.423966](https://huggingface.co/datasets/OALL/details_tiiuae__Falcon3-1B-Base/blob/main/results_2025-01-16T12-26-22.423966.json)(note\ \ that their might be results for other tasks in the repos if successive evals didn't\ \ cover the same tasks. You find each in the results and the \"latest\" split for\ \ each eval):\n\n```python\n{\n \"all\": {\n \"acc_norm\": 0.39784155146450983,\n\ \ \"acc_norm_stderr\": 0.03643606240527679,\n \"acc\": 0.47054930509596293,\n\ \ \"acc_stderr\": 0.012844785490016997\n },\n \"community|acva:Algeria|0\"\ : {\n \"acc_norm\": 0.4564102564102564,\n \"acc_norm_stderr\": 0.03576123096991214\n\ \ },\n \"community|acva:Ancient_Egypt|0\": {\n \"acc_norm\": 0.5841269841269842,\n\ \ \"acc_norm_stderr\": 0.02781436705129215\n },\n \"community|acva:Arab_Empire|0\"\ : {\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880263\n\ \ },\n \"community|acva:Arabic_Architecture|0\": {\n \"acc_norm\":\ \ 0.5384615384615384,\n \"acc_norm_stderr\": 0.0357915435254457\n },\n\ \ \"community|acva:Arabic_Art|0\": {\n \"acc_norm\": 0.4461538461538462,\n\ \ \"acc_norm_stderr\": 0.03568913546569233\n },\n \"community|acva:Arabic_Astronomy|0\"\ : {\n \"acc_norm\": 0.5282051282051282,\n \"acc_norm_stderr\": 0.035840746749208334\n\ \ },\n \"community|acva:Arabic_Calligraphy|0\": {\n \"acc_norm\": 0.48627450980392156,\n\ \ \"acc_norm_stderr\": 0.03136096744694241\n },\n \"community|acva:Arabic_Ceremony|0\"\ : {\n \"acc_norm\": 0.5081081081081081,\n \"acc_norm_stderr\": 0.036855642198496893\n\ \ },\n \"community|acva:Arabic_Clothing|0\": {\n \"acc_norm\": 0.441025641025641,\n\ \ \"acc_norm_stderr\": 0.0356473293185358\n },\n \"community|acva:Arabic_Culture|0\"\ : {\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.03384487217112063\n\ \ },\n \"community|acva:Arabic_Food|0\": {\n \"acc_norm\": 0.5897435897435898,\n\ \ \"acc_norm_stderr\": 0.03531493712326671\n },\n \"community|acva:Arabic_Funeral|0\"\ : {\n \"acc_norm\": 0.5578947368421052,\n \"acc_norm_stderr\": 0.05122418389181814\n\ \ },\n \"community|acva:Arabic_Geography|0\": {\n \"acc_norm\": 0.38620689655172413,\n\ \ \"acc_norm_stderr\": 0.04057324734419035\n },\n \"community|acva:Arabic_History|0\"\ : {\n \"acc_norm\": 0.6051282051282051,\n \"acc_norm_stderr\": 0.03509545602262036\n\ \ },\n \"community|acva:Arabic_Language_Origin|0\": {\n \"acc_norm\"\ : 0.5052631578947369,\n \"acc_norm_stderr\": 0.05156820511122477\n },\n\ \ \"community|acva:Arabic_Literature|0\": {\n \"acc_norm\": 0.503448275862069,\n\ \ \"acc_norm_stderr\": 0.04166567577101579\n },\n \"community|acva:Arabic_Math|0\"\ : {\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03174930436412671\n\ \ },\n \"community|acva:Arabic_Medicine|0\": {\n \"acc_norm\": 0.5172413793103449,\n\ \ \"acc_norm_stderr\": 0.04164188720169375\n },\n \"community|acva:Arabic_Music|0\"\ : {\n \"acc_norm\": 0.4316546762589928,\n \"acc_norm_stderr\": 0.042163322608081595\n\ \ },\n \"community|acva:Arabic_Ornament|0\": {\n \"acc_norm\": 0.48205128205128206,\n\ \ \"acc_norm_stderr\": 0.035874770987738294\n },\n \"community|acva:Arabic_Philosophy|0\"\ : {\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n\ \ },\n \"community|acva:Arabic_Physics_and_Chemistry|0\": {\n \"acc_norm\"\ : 0.4256410256410256,\n \"acc_norm_stderr\": 0.035498710803677065\n },\n\ \ \"community|acva:Arabic_Wedding|0\": {\n \"acc_norm\": 0.558974358974359,\n\ \ \"acc_norm_stderr\": 0.03564732931853579\n },\n \"community|acva:Bahrain|0\"\ : {\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.07216392363431012\n\ \ },\n \"community|acva:Comoros|0\": {\n \"acc_norm\": 0.5333333333333333,\n\ \ \"acc_norm_stderr\": 0.0752101433090355\n },\n \"community|acva:Egypt_modern|0\"\ : {\n \"acc_norm\": 0.5684210526315789,\n \"acc_norm_stderr\": 0.05108592673308946\n\ \ },\n \"community|acva:InfluenceFromAncientEgypt|0\": {\n \"acc_norm\"\ : 0.441025641025641,\n \"acc_norm_stderr\": 0.0356473293185358\n },\n\ \ \"community|acva:InfluenceFromByzantium|0\": {\n \"acc_norm\": 0.4482758620689655,\n\ \ \"acc_norm_stderr\": 0.04144311810878152\n },\n \"community|acva:InfluenceFromChina|0\"\ : {\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.0317493043641267\n\ \ },\n \"community|acva:InfluenceFromGreece|0\": {\n \"acc_norm\":\ \ 0.3641025641025641,\n \"acc_norm_stderr\": 0.03454653867786389\n },\n\ \ \"community|acva:InfluenceFromIslam|0\": {\n \"acc_norm\": 0.6482758620689655,\n\ \ \"acc_norm_stderr\": 0.03979236637497412\n },\n \"community|acva:InfluenceFromPersia|0\"\ : {\n \"acc_norm\": 0.3485714285714286,\n \"acc_norm_stderr\": 0.03612473503503051\n\ \ },\n \"community|acva:InfluenceFromRome|0\": {\n \"acc_norm\": 0.4205128205128205,\n\ \ \"acc_norm_stderr\": 0.03544138389303483\n },\n \"community|acva:Iraq|0\"\ : {\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.0544600058689736\n\ \ },\n \"community|acva:Islam_Education|0\": {\n \"acc_norm\": 0.517948717948718,\n\ \ \"acc_norm_stderr\": 0.035874770987738246\n },\n \"community|acva:Islam_branches_and_schools|0\"\ : {\n \"acc_norm\": 0.5314285714285715,\n \"acc_norm_stderr\": 0.03782994654682181\n\ \ },\n \"community|acva:Islamic_law_system|0\": {\n \"acc_norm\": 0.5333333333333333,\n\ \ \"acc_norm_stderr\": 0.03581804596782233\n },\n \"community|acva:Jordan|0\"\ : {\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.07385489458759965\n\ \ },\n \"community|acva:Kuwait|0\": {\n \"acc_norm\": 0.6888888888888889,\n\ \ \"acc_norm_stderr\": 0.06979205927323111\n },\n \"community|acva:Lebanon|0\"\ : {\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.06979205927323111\n\ \ },\n \"community|acva:Libya|0\": {\n \"acc_norm\": 0.5777777777777777,\n\ \ \"acc_norm_stderr\": 0.07446027270295806\n },\n \"community|acva:Mauritania|0\"\ : {\n \"acc_norm\": 0.5333333333333333,\n \"acc_norm_stderr\": 0.0752101433090355\n\ \ },\n \"community|acva:Mesopotamia_civilization|0\": {\n \"acc_norm\"\ : 0.5032258064516129,\n \"acc_norm_stderr\": 0.04029030966708646\n },\n\ \ \"community|acva:Morocco|0\": {\n \"acc_norm\": 0.7333333333333333,\n\ \ \"acc_norm_stderr\": 0.06666666666666668\n },\n \"community|acva:Oman|0\"\ : {\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.06267511942419626\n\ \ },\n \"community|acva:Palestine|0\": {\n \"acc_norm\": 0.6823529411764706,\n\ \ \"acc_norm_stderr\": 0.05079691179733583\n },\n \"community|acva:Qatar|0\"\ : {\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.07491109582924915\n\ \ },\n \"community|acva:Saudi_Arabia|0\": {\n \"acc_norm\": 0.6717948717948717,\n\ \ \"acc_norm_stderr\": 0.03371243782413707\n },\n \"community|acva:Somalia|0\"\ : {\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.07216392363431012\n\ \ },\n \"community|acva:Sudan|0\": {\n \"acc_norm\": 0.6666666666666666,\n\ \ \"acc_norm_stderr\": 0.07106690545187012\n },\n \"community|acva:Syria|0\"\ : {\n \"acc_norm\": 0.6888888888888889,\n \"acc_norm_stderr\": 0.06979205927323111\n\ \ },\n \"community|acva:Tunisia|0\": {\n \"acc_norm\": 0.6444444444444445,\n\ \ \"acc_norm_stderr\": 0.07216392363431011\n },\n \"community|acva:United_Arab_Emirates|0\"\ : {\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.04628210543937907\n\ \ },\n \"community|acva:Yemen|0\": {\n \"acc_norm\": 0.8,\n \ \ \"acc_norm_stderr\": 0.13333333333333333\n },\n \"community|acva:communication|0\"\ : {\n \"acc_norm\": 0.6098901098901099,\n \"acc_norm_stderr\": 0.025601532524954076\n\ \ },\n \"community|acva:computer_and_phone|0\": {\n \"acc_norm\": 0.5457627118644067,\n\ \ \"acc_norm_stderr\": 0.02903819758623457\n },\n \"community|acva:daily_life|0\"\ : {\n \"acc_norm\": 0.7685459940652819,\n \"acc_norm_stderr\": 0.02300899468734538\n\ \ },\n \"community|acva:entertainment|0\": {\n \"acc_norm\": 0.6067796610169491,\n\ \ \"acc_norm_stderr\": 0.02848786016617071\n },\n \"community|alghafa:mcq_exams_test_ar|0\"\ : {\n \"acc_norm\": 0.2746858168761221,\n \"acc_norm_stderr\": 0.018929703300795454\n\ \ },\n \"community|alghafa:meta_ar_dialects|0\": {\n \"acc_norm\":\ \ 0.24559777571825764,\n \"acc_norm_stderr\": 0.005860817845144341\n },\n\ \ \"community|alghafa:meta_ar_msa|0\": {\n \"acc_norm\": 0.2581005586592179,\n\ \ \"acc_norm_stderr\": 0.014635185616527824\n },\n \"community|alghafa:multiple_choice_facts_truefalse_balanced_task|0\"\ : {\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.05807730170189531\n\ \ },\n \"community|alghafa:multiple_choice_grounded_statement_soqal_task|0\"\ : {\n \"acc_norm\": 0.31333333333333335,\n \"acc_norm_stderr\": 0.037999960751971595\n\ \ },\n \"community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0\"\ : {\n \"acc_norm\": 0.2733333333333333,\n \"acc_norm_stderr\": 0.036510752504862\n\ \ },\n \"community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0\"\ : {\n \"acc_norm\": 0.4945590994371482,\n \"acc_norm_stderr\": 0.005591936327342272\n\ \ },\n \"community|alghafa:multiple_choice_rating_sentiment_task|0\": {\n\ \ \"acc_norm\": 0.32827356130108426,\n \"acc_norm_stderr\": 0.006065349138244435\n\ \ },\n \"community|alghafa:multiple_choice_sentiment_task|0\": {\n \ \ \"acc_norm\": 0.3383720930232558,\n \"acc_norm_stderr\": 0.011412117593743184\n\ \ },\n \"community|arabic_exams|0\": {\n \"acc_norm\": 0.23649906890130354,\n\ \ \"acc_norm_stderr\": 0.018354269670319875\n },\n \"community|arabic_mmlu:abstract_algebra|0\"\ : {\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036845\n\ \ },\n \"community|arabic_mmlu:anatomy|0\": {\n \"acc_norm\": 0.18518518518518517,\n\ \ \"acc_norm_stderr\": 0.03355677216313142\n },\n \"community|arabic_mmlu:astronomy|0\"\ : {\n \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n\ \ },\n \"community|arabic_mmlu:business_ethics|0\": {\n \"acc_norm\"\ : 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"community|arabic_mmlu:clinical_knowledge|0\"\ : {\n \"acc_norm\": 0.2037735849056604,\n \"acc_norm_stderr\": 0.0247907845017754\n\ \ },\n \"community|arabic_mmlu:college_biology|0\": {\n \"acc_norm\"\ : 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"community|arabic_mmlu:college_chemistry|0\"\ : {\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n\ \ },\n \"community|arabic_mmlu:college_computer_science|0\": {\n \"\ acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \ \ \"community|arabic_mmlu:college_mathematics|0\": {\n \"acc_norm\": 0.22,\n\ \ \"acc_norm_stderr\": 0.04163331998932269\n },\n \"community|arabic_mmlu:college_medicine|0\"\ : {\n \"acc_norm\": 0.19653179190751446,\n \"acc_norm_stderr\": 0.030299574664788147\n\ \ },\n \"community|arabic_mmlu:college_physics|0\": {\n \"acc_norm\"\ : 0.30392156862745096,\n \"acc_norm_stderr\": 0.045766654032077636\n },\n\ \ \"community|arabic_mmlu:computer_security|0\": {\n \"acc_norm\": 0.22,\n\ \ \"acc_norm_stderr\": 0.04163331998932268\n },\n \"community|arabic_mmlu:conceptual_physics|0\"\ : {\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.028504856470514192\n\ \ },\n \"community|arabic_mmlu:econometrics|0\": {\n \"acc_norm\":\ \ 0.21929824561403508,\n \"acc_norm_stderr\": 0.03892431106518752\n },\n\ \ \"community|arabic_mmlu:electrical_engineering|0\": {\n \"acc_norm\"\ : 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n },\n\ \ \"community|arabic_mmlu:elementary_mathematics|0\": {\n \"acc_norm\"\ : 0.20899470899470898,\n \"acc_norm_stderr\": 0.02094048156533485\n },\n\ \ \"community|arabic_mmlu:formal_logic|0\": {\n \"acc_norm\": 0.29365079365079366,\n\ \ \"acc_norm_stderr\": 0.04073524322147128\n },\n \"community|arabic_mmlu:global_facts|0\"\ : {\n \"acc_norm\": 0.17,\n \"acc_norm_stderr\": 0.0377525168068637\n\ \ },\n \"community|arabic_mmlu:high_school_biology|0\": {\n \"acc_norm\"\ : 0.1967741935483871,\n \"acc_norm_stderr\": 0.022616409420742025\n },\n\ \ \"community|arabic_mmlu:high_school_chemistry|0\": {\n \"acc_norm\"\ : 0.1921182266009852,\n \"acc_norm_stderr\": 0.027719315709614775\n },\n\ \ \"community|arabic_mmlu:high_school_computer_science|0\": {\n \"acc_norm\"\ : 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"community|arabic_mmlu:high_school_european_history|0\"\ : {\n \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.033744026441394036\n\ \ },\n \"community|arabic_mmlu:high_school_geography|0\": {\n \"acc_norm\"\ : 0.18181818181818182,\n \"acc_norm_stderr\": 0.027479603010538787\n },\n\ \ \"community|arabic_mmlu:high_school_government_and_politics|0\": {\n \ \ \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752943\n\ \ },\n \"community|arabic_mmlu:high_school_macroeconomics|0\": {\n \ \ \"acc_norm\": 0.2205128205128205,\n \"acc_norm_stderr\": 0.021020672680827916\n\ \ },\n \"community|arabic_mmlu:high_school_mathematics|0\": {\n \"\ acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.025348097468097835\n\ \ },\n \"community|arabic_mmlu:high_school_microeconomics|0\": {\n \ \ \"acc_norm\": 0.21428571428571427,\n \"acc_norm_stderr\": 0.026653531596715498\n\ \ },\n \"community|arabic_mmlu:high_school_physics|0\": {\n \"acc_norm\"\ : 0.1986754966887417,\n \"acc_norm_stderr\": 0.03257847384436776\n },\n\ \ \"community|arabic_mmlu:high_school_psychology|0\": {\n \"acc_norm\"\ : 0.23302752293577983,\n \"acc_norm_stderr\": 0.018125669180861486\n },\n\ \ \"community|arabic_mmlu:high_school_statistics|0\": {\n \"acc_norm\"\ : 0.19907407407407407,\n \"acc_norm_stderr\": 0.027232298462690232\n },\n\ \ \"community|arabic_mmlu:high_school_us_history|0\": {\n \"acc_norm\"\ : 0.2549019607843137,\n \"acc_norm_stderr\": 0.03058759135160425\n },\n\ \ \"community|arabic_mmlu:high_school_world_history|0\": {\n \"acc_norm\"\ : 0.20675105485232068,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n\ \ \"community|arabic_mmlu:human_aging|0\": {\n \"acc_norm\": 0.28699551569506726,\n\ \ \"acc_norm_stderr\": 0.030360379710291936\n },\n \"community|arabic_mmlu:human_sexuality|0\"\ : {\n \"acc_norm\": 0.2595419847328244,\n \"acc_norm_stderr\": 0.03844876139785271\n\ \ },\n \"community|arabic_mmlu:international_law|0\": {\n \"acc_norm\"\ : 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n\ \ \"community|arabic_mmlu:jurisprudence|0\": {\n \"acc_norm\": 0.25925925925925924,\n\ \ \"acc_norm_stderr\": 0.042365112580946336\n },\n \"community|arabic_mmlu:logical_fallacies|0\"\ : {\n \"acc_norm\": 0.2085889570552147,\n \"acc_norm_stderr\": 0.031921934489347215\n\ \ },\n \"community|arabic_mmlu:machine_learning|0\": {\n \"acc_norm\"\ : 0.23214285714285715,\n \"acc_norm_stderr\": 0.04007341809755805\n },\n\ \ \"community|arabic_mmlu:management|0\": {\n \"acc_norm\": 0.18446601941747573,\n\ \ \"acc_norm_stderr\": 0.03840423627288276\n },\n \"community|arabic_mmlu:marketing|0\"\ : {\n \"acc_norm\": 0.28205128205128205,\n \"acc_norm_stderr\": 0.029480360549541194\n\ \ },\n \"community|arabic_mmlu:medical_genetics|0\": {\n \"acc_norm\"\ : 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"community|arabic_mmlu:miscellaneous|0\"\ : {\n \"acc_norm\": 0.24776500638569604,\n \"acc_norm_stderr\": 0.015438083080568965\n\ \ },\n \"community|arabic_mmlu:moral_disputes|0\": {\n \"acc_norm\"\ : 0.23410404624277456,\n \"acc_norm_stderr\": 0.022797110278071134\n },\n\ \ \"community|arabic_mmlu:moral_scenarios|0\": {\n \"acc_norm\": 0.23575418994413408,\n\ \ \"acc_norm_stderr\": 0.014196375686290804\n },\n \"community|arabic_mmlu:nutrition|0\"\ : {\n \"acc_norm\": 0.23202614379084968,\n \"acc_norm_stderr\": 0.024170840879341016\n\ \ },\n \"community|arabic_mmlu:philosophy|0\": {\n \"acc_norm\": 0.19292604501607716,\n\ \ \"acc_norm_stderr\": 0.022411516780911363\n },\n \"community|arabic_mmlu:prehistory|0\"\ : {\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n\ \ },\n \"community|arabic_mmlu:professional_accounting|0\": {\n \"\ acc_norm\": 0.24113475177304963,\n \"acc_norm_stderr\": 0.025518731049537755\n\ \ },\n \"community|arabic_mmlu:professional_law|0\": {\n \"acc_norm\"\ : 0.227509778357236,\n \"acc_norm_stderr\": 0.01070718857686424\n },\n\ \ \"community|arabic_mmlu:professional_medicine|0\": {\n \"acc_norm\"\ : 0.3272058823529412,\n \"acc_norm_stderr\": 0.028501452860396563\n },\n\ \ \"community|arabic_mmlu:professional_psychology|0\": {\n \"acc_norm\"\ : 0.24509803921568626,\n \"acc_norm_stderr\": 0.017401816711427657\n },\n\ \ \"community|arabic_mmlu:public_relations|0\": {\n \"acc_norm\": 0.20909090909090908,\n\ \ \"acc_norm_stderr\": 0.038950910157241364\n },\n \"community|arabic_mmlu:security_studies|0\"\ : {\n \"acc_norm\": 0.22857142857142856,\n \"acc_norm_stderr\": 0.02688214492230774\n\ \ },\n \"community|arabic_mmlu:sociology|0\": {\n \"acc_norm\": 0.24378109452736318,\n\ \ \"acc_norm_stderr\": 0.03036049015401464\n },\n \"community|arabic_mmlu:us_foreign_policy|0\"\ : {\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n\ \ },\n \"community|arabic_mmlu:virology|0\": {\n \"acc_norm\": 0.27710843373493976,\n\ \ \"acc_norm_stderr\": 0.034843315926805875\n },\n \"community|arabic_mmlu:world_religions|0\"\ : {\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.03565079670708311\n\ \ },\n \"community|arc_challenge_okapi_ar|0\": {\n \"acc_norm\": 0.27155172413793105,\n\ \ \"acc_norm_stderr\": 0.01306423320277828\n },\n \"community|arc_easy_ar|0\"\ : {\n \"acc_norm\": 0.2491539763113367,\n \"acc_norm_stderr\": 0.00889769455700685\n\ \ },\n \"community|boolq_ar|0\": {\n \"acc_norm\": 0.6171779141104294,\n\ \ \"acc_norm_stderr\": 0.008514546087346945\n },\n \"community|copa_ext_ar|0\"\ : {\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.05298680599073449\n\ \ },\n \"community|hellaswag_okapi_ar|0\": {\n \"acc_norm\": 0.2597317631665031,\n\ \ \"acc_norm_stderr\": 0.004579018747523884\n },\n \"community|openbook_qa_ext_ar|0\"\ : {\n \"acc_norm\": 0.34949494949494947,\n \"acc_norm_stderr\": 0.02145271751103444\n\ \ },\n \"community|piqa_ar|0\": {\n \"acc_norm\": 0.5100927441352974,\n\ \ \"acc_norm_stderr\": 0.011679352711411064\n },\n \"community|race_ar|0\"\ : {\n \"acc_norm\": 0.29559748427672955,\n \"acc_norm_stderr\": 0.0065001812469421254\n\ \ },\n \"community|sciq_ar|0\": {\n \"acc_norm\": 0.32663316582914576,\n\ \ \"acc_norm_stderr\": 0.014875199838353862\n },\n \"community|toxigen_ar|0\"\ : {\n \"acc_norm\": 0.43529411764705883,\n \"acc_norm_stderr\": 0.01622292337449384\n\ \ },\n \"lighteval|xstory_cloze:ar|0\": {\n \"acc\": 0.47054930509596293,\n\ \ \"acc_stderr\": 0.012844785490016997\n },\n \"community|acva:_average|0\"\ : {\n \"acc_norm\": 0.5717927063276779,\n \"acc_norm_stderr\": 0.04719449311224505\n\ \ },\n \"community|alghafa:_average|0\": {\n \"acc_norm\": 0.3384728412979725,\n\ \ \"acc_norm_stderr\": 0.02167590275339183\n },\n \"community|arabic_mmlu:_average|0\"\ : {\n \"acc_norm\": 0.23610984407231933,\n \"acc_norm_stderr\": 0.031743469411994435\n\ \ }\n}\n```" repo_url: https://huggingface.co/tiiuae/Falcon3-1B-Base configs: - config_name: community_acva_Algeria_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Algeria|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Algeria|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Ancient_Egypt_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Ancient_Egypt|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Ancient_Egypt|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arab_Empire_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arab_Empire|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arab_Empire|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Architecture_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Architecture|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Architecture|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Art_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Art|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Art|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Astronomy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Astronomy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Astronomy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Calligraphy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Calligraphy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Calligraphy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Ceremony_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Ceremony|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Ceremony|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Clothing_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Clothing|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Clothing|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Culture_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Culture|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Culture|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Food_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Food|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Food|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Funeral_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Funeral|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Funeral|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Geography_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Geography|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Geography|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_History_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_History|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_History|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Language_Origin_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Language_Origin|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Language_Origin|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Literature_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Literature|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Literature|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Math_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Math|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Math|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Medicine_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Medicine|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Medicine|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Music_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Music|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Music|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Ornament_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Ornament|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Ornament|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Philosophy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Philosophy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Philosophy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Physics_and_Chemistry_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Physics_and_Chemistry|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Arabic_Wedding_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Arabic_Wedding|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Arabic_Wedding|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Bahrain_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Bahrain|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Bahrain|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Comoros_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Comoros|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Comoros|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Egypt_modern_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Egypt_modern|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Egypt_modern|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromAncientEgypt_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromAncientEgypt|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromAncientEgypt|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromByzantium_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromByzantium|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromByzantium|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromChina_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromChina|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromChina|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromGreece_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromGreece|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromGreece|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromIslam_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromIslam|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromIslam|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromPersia_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromPersia|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromPersia|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_InfluenceFromRome_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:InfluenceFromRome|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:InfluenceFromRome|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Iraq_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Iraq|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Iraq|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Islam_Education_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Islam_Education|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Islam_Education|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Islam_branches_and_schools_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Islam_branches_and_schools|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Islam_branches_and_schools|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Islamic_law_system_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Islamic_law_system|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Islamic_law_system|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Jordan_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Jordan|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Jordan|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Kuwait_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Kuwait|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Kuwait|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Lebanon_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Lebanon|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Lebanon|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Libya_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Libya|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Libya|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Mauritania_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Mauritania|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Mauritania|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Mesopotamia_civilization_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Mesopotamia_civilization|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Mesopotamia_civilization|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Morocco_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Morocco|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Morocco|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Oman_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Oman|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Oman|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Palestine_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Palestine|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Palestine|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Qatar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Qatar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Qatar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Saudi_Arabia_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Saudi_Arabia|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Saudi_Arabia|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Somalia_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Somalia|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Somalia|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Sudan_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Sudan|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Sudan|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Syria_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Syria|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Syria|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Tunisia_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Tunisia|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Tunisia|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_United_Arab_Emirates_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:United_Arab_Emirates|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:United_Arab_Emirates|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_Yemen_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:Yemen|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:Yemen|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_communication_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:communication|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:communication|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_computer_and_phone_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:computer_and_phone|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:computer_and_phone|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_daily_life_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:daily_life|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:daily_life|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_acva_entertainment_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|acva:entertainment|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|acva:entertainment|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_mcq_exams_test_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:mcq_exams_test_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:mcq_exams_test_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_meta_ar_dialects_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:meta_ar_dialects|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:meta_ar_dialects|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_meta_ar_msa_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:meta_ar_msa|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:meta_ar_msa|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_facts_truefalse_balanced_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_facts_truefalse_balanced_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_grounded_statement_soqal_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_grounded_statement_soqal_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_grounded_statement_xglue_mlqa_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_rating_sentiment_no_neutral_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_rating_sentiment_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_rating_sentiment_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_alghafa_multiple_choice_sentiment_task_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|alghafa:multiple_choice_sentiment_task|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|alghafa:multiple_choice_sentiment_task|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_exams_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_exams|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_exams|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_abstract_algebra_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:abstract_algebra|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:abstract_algebra|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_anatomy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:anatomy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:anatomy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_astronomy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:astronomy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:astronomy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_business_ethics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:business_ethics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:business_ethics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_clinical_knowledge_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:clinical_knowledge|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:clinical_knowledge|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_biology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_biology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_biology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_chemistry_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_chemistry|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_chemistry|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_computer_science_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_computer_science|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_computer_science|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_mathematics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_mathematics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_mathematics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_medicine_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_medicine|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_medicine|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_college_physics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:college_physics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:college_physics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_computer_security_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:computer_security|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:computer_security|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_conceptual_physics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:conceptual_physics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:conceptual_physics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_econometrics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:econometrics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:econometrics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_electrical_engineering_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:electrical_engineering|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:electrical_engineering|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_elementary_mathematics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:elementary_mathematics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:elementary_mathematics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_formal_logic_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:formal_logic|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:formal_logic|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_global_facts_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:global_facts|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:global_facts|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_biology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_biology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_biology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_chemistry_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_chemistry|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_chemistry|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_computer_science_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_computer_science|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_computer_science|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_european_history_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_european_history|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_european_history|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_geography_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_geography|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_geography|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_government_and_politics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_government_and_politics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_macroeconomics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_macroeconomics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_mathematics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_mathematics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_mathematics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_microeconomics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_microeconomics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_microeconomics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_physics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_physics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_physics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_psychology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_psychology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_psychology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_statistics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_statistics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_statistics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_us_history_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_us_history|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_us_history|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_high_school_world_history_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:high_school_world_history|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:high_school_world_history|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_human_aging_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:human_aging|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:human_aging|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_human_sexuality_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:human_sexuality|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:human_sexuality|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_international_law_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:international_law|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:international_law|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_jurisprudence_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:jurisprudence|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:jurisprudence|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_logical_fallacies_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:logical_fallacies|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:logical_fallacies|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_machine_learning_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:machine_learning|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:machine_learning|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_management_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:management|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:management|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_marketing_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:marketing|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:marketing|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_medical_genetics_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:medical_genetics|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:medical_genetics|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_miscellaneous_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:miscellaneous|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:miscellaneous|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_moral_disputes_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:moral_disputes|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:moral_disputes|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_moral_scenarios_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:moral_scenarios|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:moral_scenarios|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_nutrition_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:nutrition|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:nutrition|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_philosophy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:philosophy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:philosophy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_prehistory_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:prehistory|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:prehistory|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_professional_accounting_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:professional_accounting|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_accounting|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_professional_law_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:professional_law|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_law|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_professional_medicine_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:professional_medicine|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_medicine|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_professional_psychology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:professional_psychology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:professional_psychology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_public_relations_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:public_relations|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:public_relations|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_security_studies_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:security_studies|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:security_studies|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_sociology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:sociology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:sociology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_us_foreign_policy_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:us_foreign_policy|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:us_foreign_policy|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_virology_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:virology|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:virology|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arabic_mmlu_world_religions_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arabic_mmlu:world_religions|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arabic_mmlu:world_religions|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arc_challenge_okapi_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arc_challenge_okapi_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arc_challenge_okapi_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_arc_easy_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|arc_easy_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|arc_easy_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_boolq_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|boolq_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|boolq_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_copa_ext_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|copa_ext_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|copa_ext_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_hellaswag_okapi_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|hellaswag_okapi_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|hellaswag_okapi_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_openbook_qa_ext_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|openbook_qa_ext_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|openbook_qa_ext_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_piqa_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|piqa_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|piqa_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_race_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|race_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|race_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_sciq_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|sciq_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|sciq_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: community_toxigen_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_community|toxigen_ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_community|toxigen_ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: lighteval_xstory_cloze_ar_0_2025_01_16T12_26_22_423966_parquet data_files: - split: 2025_01_16T12_26_22.423966 path: - '**/details_lighteval|xstory_cloze:ar|0_2025-01-16T12-26-22.423966.parquet' - split: latest path: - '**/details_lighteval|xstory_cloze:ar|0_2025-01-16T12-26-22.423966.parquet' - config_name: results data_files: - split: 2025_01_16T12_26_22.423966 path: - results_2025-01-16T12-26-22.423966.parquet - split: latest path: - results_2025-01-16T12-26-22.423966.parquet --- # Dataset Card for Evaluation run of tiiuae/Falcon3-1B-Base <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [tiiuae/Falcon3-1B-Base](https://huggingface.co/tiiuae/Falcon3-1B-Base). The dataset is composed of 136 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run. To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("OALL/details_tiiuae__Falcon3-1B-Base", "lighteval_xstory_cloze_ar_0_2025_01_16T12_26_22_423966_parquet", split="train") ``` ## Latest results These are the [latest results from run 2025-01-16T12:26:22.423966](https://huggingface.co/datasets/OALL/details_tiiuae__Falcon3-1B-Base/blob/main/results_2025-01-16T12-26-22.423966.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc_norm": 0.39784155146450983, "acc_norm_stderr": 0.03643606240527679, "acc": 0.47054930509596293, "acc_stderr": 0.012844785490016997 }, "community|acva:Algeria|0": { "acc_norm": 0.4564102564102564, "acc_norm_stderr": 0.03576123096991214 }, "community|acva:Ancient_Egypt|0": { "acc_norm": 0.5841269841269842, "acc_norm_stderr": 0.02781436705129215 }, "community|acva:Arab_Empire|0": { "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880263 }, "community|acva:Arabic_Architecture|0": { "acc_norm": 0.5384615384615384, "acc_norm_stderr": 0.0357915435254457 }, "community|acva:Arabic_Art|0": { "acc_norm": 0.4461538461538462, "acc_norm_stderr": 0.03568913546569233 }, "community|acva:Arabic_Astronomy|0": { "acc_norm": 0.5282051282051282, "acc_norm_stderr": 0.035840746749208334 }, "community|acva:Arabic_Calligraphy|0": { "acc_norm": 0.48627450980392156, "acc_norm_stderr": 0.03136096744694241 }, "community|acva:Arabic_Ceremony|0": { "acc_norm": 0.5081081081081081, "acc_norm_stderr": 0.036855642198496893 }, "community|acva:Arabic_Clothing|0": { "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.0356473293185358 }, "community|acva:Arabic_Culture|0": { "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.03384487217112063 }, "community|acva:Arabic_Food|0": { "acc_norm": 0.5897435897435898, "acc_norm_stderr": 0.03531493712326671 }, "community|acva:Arabic_Funeral|0": { "acc_norm": 0.5578947368421052, "acc_norm_stderr": 0.05122418389181814 }, "community|acva:Arabic_Geography|0": { "acc_norm": 0.38620689655172413, "acc_norm_stderr": 0.04057324734419035 }, "community|acva:Arabic_History|0": { "acc_norm": 0.6051282051282051, "acc_norm_stderr": 0.03509545602262036 }, "community|acva:Arabic_Language_Origin|0": { "acc_norm": 0.5052631578947369, "acc_norm_stderr": 0.05156820511122477 }, "community|acva:Arabic_Literature|0": { "acc_norm": 0.503448275862069, "acc_norm_stderr": 0.04166567577101579 }, "community|acva:Arabic_Math|0": { "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03174930436412671 }, "community|acva:Arabic_Medicine|0": { "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "community|acva:Arabic_Music|0": { "acc_norm": 0.4316546762589928, "acc_norm_stderr": 0.042163322608081595 }, "community|acva:Arabic_Ornament|0": { "acc_norm": 0.48205128205128206, "acc_norm_stderr": 0.035874770987738294 }, "community|acva:Arabic_Philosophy|0": { "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "community|acva:Arabic_Physics_and_Chemistry|0": { "acc_norm": 0.4256410256410256, "acc_norm_stderr": 0.035498710803677065 }, "community|acva:Arabic_Wedding|0": { "acc_norm": 0.558974358974359, "acc_norm_stderr": 0.03564732931853579 }, "community|acva:Bahrain|0": { "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Comoros|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Egypt_modern|0": { "acc_norm": 0.5684210526315789, "acc_norm_stderr": 0.05108592673308946 }, "community|acva:InfluenceFromAncientEgypt|0": { "acc_norm": 0.441025641025641, "acc_norm_stderr": 0.0356473293185358 }, "community|acva:InfluenceFromByzantium|0": { "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.04144311810878152 }, "community|acva:InfluenceFromChina|0": { "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.0317493043641267 }, "community|acva:InfluenceFromGreece|0": { "acc_norm": 0.3641025641025641, "acc_norm_stderr": 0.03454653867786389 }, "community|acva:InfluenceFromIslam|0": { "acc_norm": 0.6482758620689655, "acc_norm_stderr": 0.03979236637497412 }, "community|acva:InfluenceFromPersia|0": { "acc_norm": 0.3485714285714286, "acc_norm_stderr": 0.03612473503503051 }, "community|acva:InfluenceFromRome|0": { "acc_norm": 0.4205128205128205, "acc_norm_stderr": 0.03544138389303483 }, "community|acva:Iraq|0": { "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.0544600058689736 }, "community|acva:Islam_Education|0": { "acc_norm": 0.517948717948718, "acc_norm_stderr": 0.035874770987738246 }, "community|acva:Islam_branches_and_schools|0": { "acc_norm": 0.5314285714285715, "acc_norm_stderr": 0.03782994654682181 }, "community|acva:Islamic_law_system|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.03581804596782233 }, "community|acva:Jordan|0": { "acc_norm": 0.6, "acc_norm_stderr": 0.07385489458759965 }, "community|acva:Kuwait|0": { "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:Lebanon|0": { "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:Libya|0": { "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.07446027270295806 }, "community|acva:Mauritania|0": { "acc_norm": 0.5333333333333333, "acc_norm_stderr": 0.0752101433090355 }, "community|acva:Mesopotamia_civilization|0": { "acc_norm": 0.5032258064516129, "acc_norm_stderr": 0.04029030966708646 }, "community|acva:Morocco|0": { "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.06666666666666668 }, "community|acva:Oman|0": { "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.06267511942419626 }, "community|acva:Palestine|0": { "acc_norm": 0.6823529411764706, "acc_norm_stderr": 0.05079691179733583 }, "community|acva:Qatar|0": { "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.07491109582924915 }, "community|acva:Saudi_Arabia|0": { "acc_norm": 0.6717948717948717, "acc_norm_stderr": 0.03371243782413707 }, "community|acva:Somalia|0": { "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.07216392363431012 }, "community|acva:Sudan|0": { "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.07106690545187012 }, "community|acva:Syria|0": { "acc_norm": 0.6888888888888889, "acc_norm_stderr": 0.06979205927323111 }, "community|acva:Tunisia|0": { "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.07216392363431011 }, "community|acva:United_Arab_Emirates|0": { "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.04628210543937907 }, "community|acva:Yemen|0": { "acc_norm": 0.8, "acc_norm_stderr": 0.13333333333333333 }, "community|acva:communication|0": { "acc_norm": 0.6098901098901099, "acc_norm_stderr": 0.025601532524954076 }, "community|acva:computer_and_phone|0": { "acc_norm": 0.5457627118644067, "acc_norm_stderr": 0.02903819758623457 }, "community|acva:daily_life|0": { "acc_norm": 0.7685459940652819, "acc_norm_stderr": 0.02300899468734538 }, "community|acva:entertainment|0": { "acc_norm": 0.6067796610169491, "acc_norm_stderr": 0.02848786016617071 }, "community|alghafa:mcq_exams_test_ar|0": { "acc_norm": 0.2746858168761221, "acc_norm_stderr": 0.018929703300795454 }, "community|alghafa:meta_ar_dialects|0": { "acc_norm": 0.24559777571825764, "acc_norm_stderr": 0.005860817845144341 }, "community|alghafa:meta_ar_msa|0": { "acc_norm": 0.2581005586592179, "acc_norm_stderr": 0.014635185616527824 }, "community|alghafa:multiple_choice_facts_truefalse_balanced_task|0": { "acc_norm": 0.52, "acc_norm_stderr": 0.05807730170189531 }, "community|alghafa:multiple_choice_grounded_statement_soqal_task|0": { "acc_norm": 0.31333333333333335, "acc_norm_stderr": 0.037999960751971595 }, "community|alghafa:multiple_choice_grounded_statement_xglue_mlqa_task|0": { "acc_norm": 0.2733333333333333, "acc_norm_stderr": 0.036510752504862 }, "community|alghafa:multiple_choice_rating_sentiment_no_neutral_task|0": { "acc_norm": 0.4945590994371482, "acc_norm_stderr": 0.005591936327342272 }, "community|alghafa:multiple_choice_rating_sentiment_task|0": { "acc_norm": 0.32827356130108426, "acc_norm_stderr": 0.006065349138244435 }, "community|alghafa:multiple_choice_sentiment_task|0": { "acc_norm": 0.3383720930232558, "acc_norm_stderr": 0.011412117593743184 }, "community|arabic_exams|0": { "acc_norm": 0.23649906890130354, "acc_norm_stderr": 0.018354269670319875 }, "community|arabic_mmlu:abstract_algebra|0": { "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036845 }, "community|arabic_mmlu:anatomy|0": { "acc_norm": 0.18518518518518517, "acc_norm_stderr": 0.03355677216313142 }, "community|arabic_mmlu:astronomy|0": { "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "community|arabic_mmlu:business_ethics|0": { "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "community|arabic_mmlu:clinical_knowledge|0": { "acc_norm": 0.2037735849056604, "acc_norm_stderr": 0.0247907845017754 }, "community|arabic_mmlu:college_biology|0": { "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "community|arabic_mmlu:college_chemistry|0": { "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "community|arabic_mmlu:college_computer_science|0": { "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "community|arabic_mmlu:college_mathematics|0": { "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "community|arabic_mmlu:college_medicine|0": { "acc_norm": 0.19653179190751446, "acc_norm_stderr": 0.030299574664788147 }, "community|arabic_mmlu:college_physics|0": { "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.045766654032077636 }, "community|arabic_mmlu:computer_security|0": { "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "community|arabic_mmlu:conceptual_physics|0": { "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.028504856470514192 }, "community|arabic_mmlu:econometrics|0": { "acc_norm": 0.21929824561403508, "acc_norm_stderr": 0.03892431106518752 }, "community|arabic_mmlu:electrical_engineering|0": { "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "community|arabic_mmlu:elementary_mathematics|0": { "acc_norm": 0.20899470899470898, "acc_norm_stderr": 0.02094048156533485 }, "community|arabic_mmlu:formal_logic|0": { "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.04073524322147128 }, "community|arabic_mmlu:global_facts|0": { "acc_norm": 0.17, "acc_norm_stderr": 0.0377525168068637 }, "community|arabic_mmlu:high_school_biology|0": { "acc_norm": 0.1967741935483871, "acc_norm_stderr": 0.022616409420742025 }, "community|arabic_mmlu:high_school_chemistry|0": { "acc_norm": 0.1921182266009852, "acc_norm_stderr": 0.027719315709614775 }, "community|arabic_mmlu:high_school_computer_science|0": { "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "community|arabic_mmlu:high_school_european_history|0": { "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.033744026441394036 }, "community|arabic_mmlu:high_school_geography|0": { "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.027479603010538787 }, "community|arabic_mmlu:high_school_government_and_politics|0": { "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752943 }, "community|arabic_mmlu:high_school_macroeconomics|0": { "acc_norm": 0.2205128205128205, "acc_norm_stderr": 0.021020672680827916 }, "community|arabic_mmlu:high_school_mathematics|0": { "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.025348097468097835 }, "community|arabic_mmlu:high_school_microeconomics|0": { "acc_norm": 0.21428571428571427, "acc_norm_stderr": 0.026653531596715498 }, "community|arabic_mmlu:high_school_physics|0": { "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.03257847384436776 }, "community|arabic_mmlu:high_school_psychology|0": { "acc_norm": 0.23302752293577983, "acc_norm_stderr": 0.018125669180861486 }, "community|arabic_mmlu:high_school_statistics|0": { "acc_norm": 0.19907407407407407, "acc_norm_stderr": 0.027232298462690232 }, "community|arabic_mmlu:high_school_us_history|0": { "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.03058759135160425 }, "community|arabic_mmlu:high_school_world_history|0": { "acc_norm": 0.20675105485232068, "acc_norm_stderr": 0.0263616516683891 }, "community|arabic_mmlu:human_aging|0": { "acc_norm": 0.28699551569506726, "acc_norm_stderr": 0.030360379710291936 }, "community|arabic_mmlu:human_sexuality|0": { "acc_norm": 0.2595419847328244, "acc_norm_stderr": 0.03844876139785271 }, "community|arabic_mmlu:international_law|0": { "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "community|arabic_mmlu:jurisprudence|0": { "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.042365112580946336 }, "community|arabic_mmlu:logical_fallacies|0": { "acc_norm": 0.2085889570552147, "acc_norm_stderr": 0.031921934489347215 }, "community|arabic_mmlu:machine_learning|0": { "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755805 }, "community|arabic_mmlu:management|0": { "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "community|arabic_mmlu:marketing|0": { "acc_norm": 0.28205128205128205, "acc_norm_stderr": 0.029480360549541194 }, "community|arabic_mmlu:medical_genetics|0": { "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "community|arabic_mmlu:miscellaneous|0": { "acc_norm": 0.24776500638569604, "acc_norm_stderr": 0.015438083080568965 }, "community|arabic_mmlu:moral_disputes|0": { "acc_norm": 0.23410404624277456, "acc_norm_stderr": 0.022797110278071134 }, "community|arabic_mmlu:moral_scenarios|0": { "acc_norm": 0.23575418994413408, "acc_norm_stderr": 0.014196375686290804 }, "community|arabic_mmlu:nutrition|0": { "acc_norm": 0.23202614379084968, "acc_norm_stderr": 0.024170840879341016 }, "community|arabic_mmlu:philosophy|0": { "acc_norm": 0.19292604501607716, "acc_norm_stderr": 0.022411516780911363 }, "community|arabic_mmlu:prehistory|0": { "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "community|arabic_mmlu:professional_accounting|0": { "acc_norm": 0.24113475177304963, "acc_norm_stderr": 0.025518731049537755 }, "community|arabic_mmlu:professional_law|0": { "acc_norm": 0.227509778357236, "acc_norm_stderr": 0.01070718857686424 }, "community|arabic_mmlu:professional_medicine|0": { "acc_norm": 0.3272058823529412, "acc_norm_stderr": 0.028501452860396563 }, "community|arabic_mmlu:professional_psychology|0": { "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.017401816711427657 }, "community|arabic_mmlu:public_relations|0": { "acc_norm": 0.20909090909090908, "acc_norm_stderr": 0.038950910157241364 }, "community|arabic_mmlu:security_studies|0": { "acc_norm": 0.22857142857142856, "acc_norm_stderr": 0.02688214492230774 }, "community|arabic_mmlu:sociology|0": { "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.03036049015401464 }, "community|arabic_mmlu:us_foreign_policy|0": { "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "community|arabic_mmlu:virology|0": { "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.034843315926805875 }, "community|arabic_mmlu:world_religions|0": { "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.03565079670708311 }, "community|arc_challenge_okapi_ar|0": { "acc_norm": 0.27155172413793105, "acc_norm_stderr": 0.01306423320277828 }, "community|arc_easy_ar|0": { "acc_norm": 0.2491539763113367, "acc_norm_stderr": 0.00889769455700685 }, "community|boolq_ar|0": { "acc_norm": 0.6171779141104294, "acc_norm_stderr": 0.008514546087346945 }, "community|copa_ext_ar|0": { "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.05298680599073449 }, "community|hellaswag_okapi_ar|0": { "acc_norm": 0.2597317631665031, "acc_norm_stderr": 0.004579018747523884 }, "community|openbook_qa_ext_ar|0": { "acc_norm": 0.34949494949494947, "acc_norm_stderr": 0.02145271751103444 }, "community|piqa_ar|0": { "acc_norm": 0.5100927441352974, "acc_norm_stderr": 0.011679352711411064 }, "community|race_ar|0": { "acc_norm": 0.29559748427672955, "acc_norm_stderr": 0.0065001812469421254 }, "community|sciq_ar|0": { "acc_norm": 0.32663316582914576, "acc_norm_stderr": 0.014875199838353862 }, "community|toxigen_ar|0": { "acc_norm": 0.43529411764705883, "acc_norm_stderr": 0.01622292337449384 }, "lighteval|xstory_cloze:ar|0": { "acc": 0.47054930509596293, "acc_stderr": 0.012844785490016997 }, "community|acva:_average|0": { "acc_norm": 0.5717927063276779, "acc_norm_stderr": 0.04719449311224505 }, "community|alghafa:_average|0": { "acc_norm": 0.3384728412979725, "acc_norm_stderr": 0.02167590275339183 }, "community|arabic_mmlu:_average|0": { "acc_norm": 0.23610984407231933, "acc_norm_stderr": 0.031743469411994435 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
reddyvbhaskar/5.2.0.7
reddyvbhaskar
"2025-01-16T12:44:37Z"
20
0
[ "license:mit", "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T12:41:31Z"
--- license: mit configs: - config_name: default data_files: - split: train path: data/train-* dataset_info: features: - name: Q dtype: string - name: A dtype: string splits: - name: train num_bytes: 3858 num_examples: 20 download_size: 4929 dataset_size: 3858 ---
Octapod/aloha_hand
Octapod
"2025-01-16T13:28:27Z"
20
0
[ "task_categories:robotics", "region:us", "LeRobot", "tutorial" ]
[ "robotics" ]
"2025-01-16T12:48:43Z"
--- task_categories: - robotics tags: - LeRobot - tutorial --- This dataset was created using [LeRobot](https://github.com/huggingface/lerobot).
Wesamalnabki-bsc/my-distiset-d3dd466c
Wesamalnabki-bsc
"2025-01-16T13:13:38Z"
20
0
[ "task_categories:text-classification", "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "library:distilabel", "region:us", "synthetic", "distilabel", "rlaif", "datacraft" ]
[ "text-classification" ]
"2025-01-16T13:13:36Z"
--- size_categories: n<1K task_categories: - text-classification dataset_info: features: - name: text dtype: string - name: label dtype: class_label: names: '0': environment '1': technology '2': arts '3': culture '4': science '5': social-issues '6': sports '7': history '8': education '9': entertainment '10': health '11': politics '12': travel '13': business '14': economy '15': food splits: - name: train num_bytes: 3691 num_examples: 10 download_size: 5742 dataset_size: 3691 configs: - config_name: default data_files: - split: train path: data/train-* tags: - synthetic - distilabel - rlaif - datacraft --- <p align="left"> <a href="https://github.com/argilla-io/distilabel"> <img src="https://raw.githubusercontent.com/argilla-io/distilabel/main/docs/assets/distilabel-badge-light.png" alt="Built with Distilabel" width="200" height="32"/> </a> </p> # Dataset Card for my-distiset-d3dd466c This dataset has been created with [distilabel](https://distilabel.argilla.io/). ## Dataset Summary This dataset contains a `pipeline.yaml` which can be used to reproduce the pipeline that generated it in distilabel using the `distilabel` CLI: ```console distilabel pipeline run --config "https://huggingface.co/datasets/Wesamalnabki-bsc/my-distiset-d3dd466c/raw/main/pipeline.yaml" ``` or explore the configuration: ```console distilabel pipeline info --config "https://huggingface.co/datasets/Wesamalnabki-bsc/my-distiset-d3dd466c/raw/main/pipeline.yaml" ``` ## Dataset structure The examples have the following structure per configuration: <details><summary> Configuration: default </summary><hr> ```json { "label": 7, "text": "The Treaty of Versailles was signed on June 28, 1919, in the Hall of Mirrors at the Palace of Versailles in Versailles, France. The treaty imposed harsh penalties on Germany, including significant territorial losses and heavy reparations. The treaty officially ended World War I, but its terms have been widely criticized for contributing to the outbreak of World War II." } ``` This subset can be loaded as: ```python from datasets import load_dataset ds = load_dataset("Wesamalnabki-bsc/my-distiset-d3dd466c", "default") ``` Or simply as it follows, since there's only one configuration and is named `default`: ```python from datasets import load_dataset ds = load_dataset("Wesamalnabki-bsc/my-distiset-d3dd466c") ``` </details>
benchang1110/Taiwan-book-1B
benchang1110
"2025-01-16T15:18:26Z"
20
0
[ "language:zh", "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T13:50:20Z"
--- dataset_info: features: - name: category dtype: string - name: author dtype: string - name: book dtype: string - name: text dtype: string splits: - name: train num_bytes: 2301319885 num_examples: 4183 download_size: 1527950083 dataset_size: 2301319885 configs: - config_name: default data_files: - split: train path: data/train-* language: - zh --- ## Dataset summary This dataset is designed for Traditional Chinese (zh-tw) and comprises of a collection of books from [好讀](https://www.haodoo.net) **Total tokens: 1.3B** (Tokens are calculated by tokenizer of LLaMA2) ## Usage ```python from datasets import load_dataset dataset = load_dataset("benchang1110/Taiwan-book-1B", split="train") ```
jusKnows/toxic_dataset_v3
jusKnows
"2025-01-16T15:12:31Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T15:12:30Z"
--- dataset_info: features: - name: text dtype: string - name: reason dtype: string - name: class dtype: string - name: toxic_level dtype: string splits: - name: train num_bytes: 140528 num_examples: 1200 download_size: 55459 dataset_size: 140528 configs: - config_name: default data_files: - split: train path: data/train-* ---
heekyo/airforce_academy_chatbot_dataset
heekyo
"2025-01-16T15:58:53Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T15:53:10Z"
--- dataset_info: features: - name: text dtype: string splits: - name: train num_bytes: 337074 num_examples: 54946 download_size: 113160 dataset_size: 337074 configs: - config_name: default data_files: - split: train path: data/train-* ---
emon-j/open_genmoji_data
emon-j
"2025-01-16T16:49:38Z"
20
0
[ "task_categories:image-to-image", "language:en", "size_categories:1K<n<10K", "format:parquet", "modality:image", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
[ "image-to-image" ]
"2025-01-16T16:30:17Z"
--- task_categories: - image-to-image language: - en size_categories: - 1K<n<10K --- # GenMoji Dataset This repository hosts the **GenMoji Dataset**, a collection of Apple emojis sourced from Emojigraph, along with their respective captions. ## Dataset Overview - **Total Examples:** 3,770 - **Features:** - `image`: An emoji image file. ![image/png](https://cdn-uploads.huggingface.co/production/uploads/64ed4009555440894012fa98/gFOxbIR9x0GTVzz4DUO4B.png) - `caption`: grinning face emoji. ## Example Usage To load the dataset, use the Hugging Face `datasets` library: ```python from datasets import load_dataset dataset = load_dataset("emon-j/open_genmoji_data") print(dataset) example = dataset['train'][0] print("Caption:", example['caption']) example_image = example['image'] example_image.show() ```
1231czx/fixedbeta05_llama3_sft_math_dpo_type1_7ktype2__7ktype3_ver2_150_more_datatmp10_vllmexp_retest2
1231czx
"2025-01-16T16:31:21Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T16:31:17Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool - name: my_prompt dtype: string - name: proxy_reward dtype: bool splits: - name: train num_bytes: 240219770 num_examples: 50000 download_size: 85619975 dataset_size: 240219770 configs: - config_name: default data_files: - split: train path: data/train-* --- # Dataset Card for "fixedbeta05_llama3_sft_math_dpo_type1_7ktype2__7ktype3_ver2_150_more_datatmp10_vllmexp_retest2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
1231czx/fixedbeta05_llama3_sft_math_dpo_type1_7ktype2__7ktype3_ver2_250_more_datatmp10_vllmexp_retest2
1231czx
"2025-01-16T19:00:32Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T19:00:27Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool - name: my_prompt dtype: string - name: proxy_reward dtype: bool splits: - name: train num_bytes: 242230165 num_examples: 50000 download_size: 86604735 dataset_size: 242230165 configs: - config_name: default data_files: - split: train path: data/train-* ---
Petar-Uni-Freiburg/LLM_Time_Series
Petar-Uni-Freiburg
"2025-01-17T00:07:24Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T19:02:26Z"
--- dataset_info: features: - name: Date dtype: string - name: BCOMENTR Index dtype: float64 - name: BCOMINTR Index dtype: float64 - name: BCOMTR Index dtype: float64 - name: CAC Index dtype: float64 - name: CCMP Index dtype: float64 - name: DAX Index dtype: float64 - name: DJI Index dtype: float64 - name: EUCRBRDT Index dtype: float64 - name: EURAUD Curncy dtype: float64 - name: EURCAD Curncy dtype: float64 - name: EURGBP Curncy dtype: float64 - name: EURJPY Curncy dtype: float64 - name: EURUSD Curncy dtype: float64 - name: FTSEMIB Index dtype: float64 - name: GDBR10 Index dtype: float64 - name: GDBR2 Index dtype: string - name: GTGBP10Y Govt dtype: float64 - name: GTJPY10Y Govt dtype: string - name: HSI Index dtype: float64 - name: IBEX Index dtype: float64 - name: KOSPI Index dtype: float64 - name: LMCADS03 Comdty dtype: float64 - name: NDX Index dtype: float64 - name: NKY Index dtype: float64 - name: PUT Index dtype: float64 - name: SPX Index dtype: float64 - name: SX5E Index dtype: float64 - name: SX5T Index dtype: float64 - name: UKX Index dtype: float64 - name: USDJPY Curncy dtype: float64 - name: USGG10 Index dtype: float64 - name: USGG2YR Index dtype: float64 - name: VIX Index dtype: float64 - name: XAU Comdty dtype: float64 - name: Target dtype: int64 - name: line_text dtype: string splits: - name: train num_bytes: 2372637 num_examples: 6256 - name: test num_bytes: 264189 num_examples: 696 download_size: 2587639 dataset_size: 2636826 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* ---
1231czx/fixed_beta05_llama3_sft_math_type1_3ktype2__and_7ktype3_loss250_more_datatmp10_vllmexp_retest2
1231czx
"2025-01-16T19:04:07Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T19:04:02Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool - name: my_prompt dtype: string - name: proxy_reward dtype: bool splits: - name: train num_bytes: 241596947 num_examples: 50000 download_size: 86058768 dataset_size: 241596947 configs: - config_name: default data_files: - split: train path: data/train-* ---
clembench-playpen/binary_dataset_wordle_wordlewithclue
clembench-playpen
"2025-01-16T22:13:41Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T20:00:38Z"
--- dataset_info: features: - name: game dtype: string - name: game_id dtype: int64 - name: model dtype: string - name: benchmark_version dtype: string - name: experiment dtype: string - name: episode dtype: string - name: Aborted dtype: int64 - name: Lose dtype: int64 - name: Success dtype: int64 - name: target dtype: string - name: player dtype: string - name: prompt list: - name: content dtype: string - name: role dtype: string - name: completion list: - name: content dtype: string - name: role dtype: string - name: turn_score dtype: int64 - name: label dtype: bool - name: __index_level_0__ dtype: int64 splits: - name: train num_bytes: 13717285 num_examples: 5670 download_size: 702098 dataset_size: 13717285 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/dpollama3_it_gsm8k_5ktype4_300tmp10
tmpmodelsave
"2025-01-16T20:19:09Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T20:19:08Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 14431287 num_examples: 5276 download_size: 4783372 dataset_size: 14431287 configs: - config_name: default data_files: - split: train path: data/train-* ---
Andrwyl/tokenized_grammar_transforms
Andrwyl
"2025-01-16T20:19:18Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T20:19:13Z"
--- dataset_info: features: - name: id dtype: int64 - name: baseline dtype: string - name: topicalization dtype: string - name: vp_topicalization dtype: string - name: clefting dtype: string - name: passivization dtype: string - name: input_ids sequence: sequence: int32 - name: attention_mask sequence: sequence: int8 splits: - name: train num_bytes: 156629880 num_examples: 257400 download_size: 18775207 dataset_size: 156629880 --- # Dataset Card for "tokenized_grammar_transforms" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
RyanYr/reflect_omnimath-test_t3_crtc
RyanYr
"2025-01-17T15:39:17Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T20:30:38Z"
--- dataset_info: features: - name: domain sequence: string - name: difficulty dtype: float64 - name: problem dtype: string - name: solution dtype: string - name: answer dtype: string - name: source dtype: string - name: response@0 sequence: string - name: response@1 sequence: string - name: response@2 sequence: string - name: response@3 sequence: string - name: response@4 sequence: string - name: response@5 sequence: string splits: - name: train num_bytes: 61037620 num_examples: 4428 download_size: 23410653 dataset_size: 61037620 configs: - config_name: default data_files: - split: train path: data/train-* ---
RyanYr/reflect_omnimath-test_t4_binlabel
RyanYr
"2025-01-17T15:59:20Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T20:56:46Z"
--- dataset_info: features: - name: domain sequence: string - name: difficulty dtype: float64 - name: problem dtype: string - name: solution dtype: string - name: answer dtype: string - name: source dtype: string - name: response@0 sequence: string - name: response@1 sequence: string - name: response@2 sequence: string - name: response@3 sequence: string - name: response@4 sequence: string - name: response@5 sequence: string - name: response@6 sequence: string - name: response@7 sequence: string - name: response@8 sequence: string - name: response@0_ans sequence: string - name: response@0_correctness sequence: bool - name: response@2_ans sequence: string - name: response@2_correctness sequence: bool - name: response@4_ans sequence: string - name: response@4_correctness sequence: bool - name: response@6_ans sequence: string - name: response@6_correctness sequence: bool - name: response@8_ans sequence: string - name: response@8_correctness sequence: bool splits: - name: train num_bytes: 93300564 num_examples: 4428 download_size: 35471287 dataset_size: 93300564 configs: - config_name: default data_files: - split: train path: data/train-* ---
spiralworks/sample_5_per_v2
spiralworks
"2025-01-16T21:03:22Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T21:03:16Z"
--- dataset_info: features: - name: forum_id dtype: string - name: forum_title dtype: string - name: forum_authors sequence: string - name: forum_abstract dtype: string - name: forum_keywords sequence: string - name: forum_pdf_url dtype: string - name: note_id dtype: string - name: note_type dtype: string - name: note_created dtype: int64 - name: note_replyto dtype: string - name: note_readers sequence: string - name: note_signatures sequence: string - name: note_text dtype: string splits: - name: train num_bytes: 16348366 num_examples: 4599 download_size: 5383506 dataset_size: 16348366 configs: - config_name: default data_files: - split: train path: data/train-* ---
spiralworks/sample_5_per_v3
spiralworks
"2025-01-16T21:20:59Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T21:20:53Z"
--- dataset_info: features: - name: forum_id dtype: string - name: forum_title dtype: string - name: forum_authors sequence: string - name: forum_abstract dtype: string - name: forum_keywords sequence: string - name: forum_pdf_url dtype: string - name: forum_url dtype: string - name: note_id dtype: string - name: note_type dtype: string - name: note_created dtype: int64 - name: note_replyto dtype: string - name: note_readers sequence: string - name: note_signatures sequence: string - name: venue dtype: string - name: year dtype: string - name: note_text dtype: string splits: - name: train num_bytes: 16747756 num_examples: 4599 download_size: 5406194 dataset_size: 16747756 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/dpollama3_it_gsm8k_5ktype4_no_sft_loss_300tmp10
tmpmodelsave
"2025-01-16T22:40:05Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T22:40:04Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 13806683 num_examples: 5276 download_size: 4621589 dataset_size: 13806683 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/dpollama3_it_gsm8k_5ktype4_no_sft_loss_350tmp10
tmpmodelsave
"2025-01-16T22:46:32Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T22:46:31Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 14108033 num_examples: 5276 download_size: 4732159 dataset_size: 14108033 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/dpollama3_it_gsm8k_5ktype4_no_sft_loss_400tmp10
tmpmodelsave
"2025-01-16T22:53:51Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-16T22:53:37Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 14192864 num_examples: 5276 download_size: 4759789 dataset_size: 14192864 configs: - config_name: default data_files: - split: train path: data/train-* ---
Jasgui11/Hebrew
Jasgui11
"2025-01-17T01:06:25Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:audio", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T01:06:23Z"
--- dataset_info: features: - name: audio dtype: audio splits: - name: train num_bytes: 111207.0 num_examples: 1 download_size: 110119 dataset_size: 111207.0 configs: - config_name: default data_files: - split: train path: data/train-* ---
ankner/apps-rl-n-10-deepseek-7b-inst-labeled
ankner
"2025-01-17T01:15:37Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T01:15:03Z"
--- dataset_info: features: - name: input dtype: string - name: response dtype: string - name: test_cases dtype: string - name: difficulty dtype: string - name: id dtype: int64 - name: pass_rates sequence: float64 splits: - name: train num_bytes: 861915758 num_examples: 4249 - name: test num_bytes: 337254951 num_examples: 1001 download_size: 743858161 dataset_size: 1199170709 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* ---
spiralworks/openreview_2025_wildcard
spiralworks
"2025-01-17T02:03:38Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T01:53:26Z"
--- dataset_info: features: - name: forum_id dtype: string - name: forum_title dtype: string - name: forum_authors sequence: string - name: forum_abstract dtype: string - name: forum_keywords sequence: string - name: forum_pdf_url dtype: string - name: forum_url dtype: string - name: note_id dtype: string - name: note_type dtype: string - name: note_created dtype: int64 - name: note_replyto dtype: string - name: note_readers sequence: string - name: note_signatures sequence: string - name: venue dtype: string - name: year dtype: string - name: note_text dtype: string splits: - name: train num_bytes: 2565679898 num_examples: 626430 download_size: 758998779 dataset_size: 2565679898 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/beta05dpollama3_it_gsm8k_6ktype4_with_sft_loss_400tmp07
tmpmodelsave
"2025-01-17T02:33:24Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T02:33:22Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 14765047 num_examples: 5276 download_size: 4799740 dataset_size: 14765047 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/dpo_llama_type1_gsm8k_7ktype4_beta05_tmp10_250tmp10
tmpmodelsave
"2025-01-17T03:41:23Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T03:41:22Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 11235550 num_examples: 3957 download_size: 3666700 dataset_size: 11235550 configs: - config_name: default data_files: - split: train path: data/train-* ---
0xHorizon/MesrimShareGPT
0xHorizon
"2025-01-17T03:49:10Z"
20
0
[ "license:other", "size_categories:n<1K", "format:json", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T03:48:52Z"
--- license: other license_name: mesrim license_link: LICENSE ---
ntnu-smil/jacob-lttc-asr-manual-correction
ntnu-smil
"2025-01-17T03:49:26Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:audio", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T03:49:10Z"
--- dataset_info: features: - name: AUDIO_ID dtype: string - name: SCORE dtype: string - name: FORM dtype: string - name: ASR dtype: string - name: TEXT dtype: string - name: audio dtype: audio: sampling_rate: 16000 splits: - name: train.75 num_bytes: 167103535.0 num_examples: 62 - name: test.25 num_bytes: 58596008.0 num_examples: 22 download_size: 220603385 dataset_size: 225699543.0 configs: - config_name: default data_files: - split: train.75 path: data/train.75-* - split: test.25 path: data/test.25-* ---
tmpmodelsave/dpo_llama_type1_gsm8k_7ktype4_beta05_tmp10_350tmp10
tmpmodelsave
"2025-01-17T03:52:40Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T03:52:39Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 11149619 num_examples: 3957 download_size: 3640371 dataset_size: 11149619 configs: - config_name: default data_files: - split: train path: data/train-* ---
chiyuanhsiao/mmlu_stage1_no-replay
chiyuanhsiao
"2025-01-17T04:39:07Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T04:38:25Z"
--- dataset_info: features: - name: task_type dtype: string - name: task_name dtype: string - name: subtask_name dtype: string - name: input_question dtype: string - name: input_choice_list struct: - name: A dtype: string - name: B dtype: string - name: C dtype: string - name: D dtype: string - name: input_final_prompts sequence: string - name: input_correct_responses sequence: string - name: output_prediction_text sequence: string - name: output_parsed_answer dtype: string - name: output_choice_completions dtype: 'null' - name: output_choice_negative_log_likelihoods dtype: 'null' - name: output_metrics struct: - name: acc dtype: float64 - name: correct_format dtype: float64 - name: is_correct dtype: bool - name: input_question_hash dtype: string - name: input_final_prompts_hash sequence: string - name: benchmark_label dtype: string - name: eval_config struct: - name: max_gen_len dtype: string - name: max_prompt_len dtype: string - name: num_few_shot dtype: string - name: num_generations dtype: string - name: prompt_fn dtype: string - name: return_logprobs dtype: string - name: seed dtype: string - name: temperature dtype: string - name: top_k dtype: string - name: top_p dtype: string - name: my_prediction_text dtype: string splits: - name: latest num_bytes: 212758777 num_examples: 14042 download_size: 27889759 dataset_size: 212758777 configs: - config_name: default data_files: - split: latest path: data/latest-* ---
enjalot/ls-dataisplural
enjalot
"2025-01-17T04:43:55Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us", "latent-scope" ]
null
"2025-01-17T04:43:50Z"
--- tags: - latent-scope --- # ls-dataisplural This dataset contains the files necessary to view in [latentscope](https://github.com/enjalot/latent-scope). The files in the `latentscope` are used by the app to view. You can also preview the scope TODO Total size of dataset files: 18.1 MB TODO: download script inside latentscope
amang1802/wildeweb_cls_1M
amang1802
"2025-01-17T04:58:43Z"
20
0
[ "size_categories:1M<n<10M", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T04:57:17Z"
--- dataset_info: features: - name: text dtype: string - name: id dtype: string - name: dump dtype: string - name: url dtype: string - name: file_path dtype: string - name: language dtype: string - name: language_score dtype: float64 - name: token_count dtype: int64 - name: score dtype: float64 - name: int_score dtype: int64 - name: justification dtype: string - name: classification_score dtype: int64 splits: - name: train num_bytes: 5446531825 num_examples: 1000000 download_size: 3071699657 dataset_size: 5446531825 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/beta05dpollama3_it_gsm8k_onlytype12_with_sft_loss_100tmp07
tmpmodelsave
"2025-01-17T05:00:18Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T05:00:16Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 10900135 num_examples: 3957 download_size: 3533081 dataset_size: 10900135 configs: - config_name: default data_files: - split: train path: data/train-* ---
chiyuanhsiao/mmlu_stage1_replay
chiyuanhsiao
"2025-01-17T05:13:08Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T05:13:00Z"
--- dataset_info: features: - name: task_type dtype: string - name: task_name dtype: string - name: subtask_name dtype: string - name: input_question dtype: string - name: input_choice_list struct: - name: A dtype: string - name: B dtype: string - name: C dtype: string - name: D dtype: string - name: input_final_prompts sequence: string - name: input_correct_responses sequence: string - name: output_prediction_text sequence: string - name: output_parsed_answer dtype: string - name: output_choice_completions dtype: 'null' - name: output_choice_negative_log_likelihoods dtype: 'null' - name: output_metrics struct: - name: acc dtype: float64 - name: correct_format dtype: float64 - name: is_correct dtype: bool - name: input_question_hash dtype: string - name: input_final_prompts_hash sequence: string - name: benchmark_label dtype: string - name: eval_config struct: - name: max_gen_len dtype: string - name: max_prompt_len dtype: string - name: num_few_shot dtype: string - name: num_generations dtype: string - name: prompt_fn dtype: string - name: return_logprobs dtype: string - name: seed dtype: string - name: temperature dtype: string - name: top_k dtype: string - name: top_p dtype: string - name: my_prediction_text dtype: string splits: - name: latest num_bytes: 200569720 num_examples: 14042 download_size: 31334931 dataset_size: 200569720 configs: - config_name: default data_files: - split: latest path: data/latest-* ---
tmpmodelsave/beta05dpollama3_it_gsm8k_onlytype12_with_sft_loss_300tmp07
tmpmodelsave
"2025-01-17T05:20:44Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T05:20:43Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 10857209 num_examples: 3957 download_size: 3540064 dataset_size: 10857209 configs: - config_name: default data_files: - split: train path: data/train-* ---
weqweasdas/prompt_numinamath_with_gts
weqweasdas
"2025-01-17T05:36:58Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T05:36:51Z"
--- dataset_info: features: - name: source dtype: string - name: problem dtype: string - name: solution dtype: string - name: messages list: - name: content dtype: string - name: role dtype: string - name: gt sequence: string splits: - name: train num_bytes: 326922556 num_examples: 167874 download_size: 167082333 dataset_size: 326922556 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/llama3_it_gsm8k_type1_only_beta05_200tmp07
tmpmodelsave
"2025-01-17T06:16:08Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T06:16:07Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 10857351 num_examples: 3957 download_size: 3502178 dataset_size: 10857351 configs: - config_name: default data_files: - split: train path: data/train-* ---
mytestdpo/type12_7ktype3_5ktype4_beta05_sftloss_step450_dpo_scaling
mytestdpo
"2025-01-17T06:42:13Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T06:42:06Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 248586741 num_examples: 89692 download_size: 80721865 dataset_size: 248586741 --- # Dataset Card for "type12_7ktype3_5ktype4_beta05_sftloss_step450_dpo_scaling" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
budecosystem/GPT4-Mixtral-Judge-Battles-100K-Complexity-train
budecosystem
"2025-01-17T06:45:39Z"
20
0
[ "size_categories:100K<n<1M", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T06:45:28Z"
--- dataset_info: features: - name: id dtype: int64 - name: model_a dtype: string - name: model_b dtype: string - name: prompt dtype: string - name: response_a dtype: string - name: response_b dtype: string - name: winner_model_a dtype: int64 - name: winner_model_b dtype: int64 - name: winner_tie dtype: int64 - name: complexity dtype: float64 splits: - name: train num_bytes: 291530631 num_examples: 109101 download_size: 168033510 dataset_size: 291530631 configs: - config_name: default data_files: - split: train path: data/train-* ---
budecosystem/GPT4-Mixtral-GSM8K-MMLU-Preference-16K-Complexity-test
budecosystem
"2025-01-17T07:04:38Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T07:04:36Z"
--- dataset_info: features: - name: id dtype: int64 - name: domain dtype: string - name: prompt dtype: string - name: mixtral-8x7b-instruct-v0.1 dtype: bool - name: gpt-4-1106-preview dtype: bool - name: complexity dtype: float64 splits: - name: train num_bytes: 7564802 num_examples: 15361 download_size: 3819137 dataset_size: 7564802 configs: - config_name: default data_files: - split: train path: data/train-* ---
budecosystem/LMArena-Human-Preference-55K-Complexity-train
budecosystem
"2025-01-17T07:11:56Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T07:11:48Z"
--- dataset_info: features: - name: id dtype: int64 - name: model_a dtype: string - name: model_b dtype: string - name: prompt dtype: string - name: response_a dtype: string - name: response_b dtype: string - name: winner_model_a dtype: int64 - name: winner_model_b dtype: int64 - name: winner_tie dtype: int64 - name: complexity dtype: float64 splits: - name: train num_bytes: 185187324 num_examples: 57477 download_size: 101978865 dataset_size: 185187324 configs: - config_name: default data_files: - split: train path: data/train-* ---
birdsql/bird-critic-1.0-bigquery
birdsql
"2025-01-17T07:15:11Z"
20
0
[ "license:cc-by-sa-4.0", "region:us" ]
null
"2025-01-17T07:15:11Z"
--- license: cc-by-sa-4.0 ---
birdsql/bird-critic-1.5-user
birdsql
"2025-01-17T07:18:26Z"
20
0
[ "license:cc-by-sa-4.0", "region:us" ]
null
"2025-01-17T07:18:26Z"
--- license: cc-by-sa-4.0 ---
younanna/MRQA-CAI
younanna
"2025-01-21T05:32:43Z"
20
0
[ "language:en", "size_categories:100K<n<1M", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T07:34:09Z"
--- language: - en pretty_name: MRQA-CAI configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* dataset_info: features: - name: subset dtype: string - name: qid dtype: string - name: question dtype: string - name: original_context dtype: string - name: original_answers sequence: string - name: conflicting_context dtype: string - name: conflicting_answers sequence: string - name: random_irrelevant_context dtype: string - name: retrieved_irrelevant_context dtype: string splits: - name: train num_bytes: 387109602 num_examples: 165386 - name: test num_bytes: 78026005 num_examples: 32105 download_size: 291957901 dataset_size: 465135607 --- # MRQA-CAI: MRQA-Conflicting and Irrelevant ## Downloading our Dataset ```python # loading dataset from datasets import load_dataset dataset = load_dataset("younanna/MRQA-CAI") ``` ## Reference This dataset is the extended version of ["MRQA 2019 Shared Task: Evaluating Generalization in Reading Comprehension"](https://aclanthology.org/D19-5801/) dataset. ```bib @inproceedings{fisch-etal-2019-mrqa, title = "{MRQA} 2019 Shared Task: Evaluating Generalization in Reading Comprehension", author = "Fisch, Adam and Talmor, Alon and Jia, Robin and Seo, Minjoon and Choi, Eunsol and Chen, Danqi", editor = "Fisch, Adam and Talmor, Alon and Jia, Robin and Seo, Minjoon and Choi, Eunsol and Chen, Danqi", booktitle = "Proceedings of the 2nd Workshop on Machine Reading for Question Answering", month = nov, year = "2019", address = "Hong Kong, China", publisher = "Association for Computational Linguistics", url = "https://aclanthology.org/D19-5801/", doi = "10.18653/v1/D19-5801", pages = "1--13", abstract = "We present the results of the Machine Reading for Question Answering (MRQA) 2019 shared task on evaluating the generalization capabilities of reading comprehension systems. In this task, we adapted and unified 18 distinct question answering datasets into the same format. Among them, six datasets were made available for training, six datasets were made available for development, and the rest were hidden for final evaluation. Ten teams submitted systems, which explored various ideas including data sampling, multi-task learning, adversarial training and ensembling. The best system achieved an average F1 score of 72.5 on the 12 held-out datasets, 10.7 absolute points higher than our initial baseline based on BERT." } ```
tmpmodelsave/beta05dpollama3_it_gsm8k_type1_halftype2_with_sft_loss_100tmp10
tmpmodelsave
"2025-01-17T08:13:37Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T08:13:35Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 11030468 num_examples: 3957 download_size: 3637250 dataset_size: 11030468 configs: - config_name: default data_files: - split: train path: data/train-* ---
mytestdpo/llama3_it_onlytype12_step500_gsm8k_dpo_scalingtmp07
mytestdpo
"2025-01-17T08:26:08Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T08:26:03Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 238662140 num_examples: 85735 download_size: 77916432 dataset_size: 238662140 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/beta05dpollama3_it_gsm8k_type1_halftype2_with_sft_loss_300tmp10
tmpmodelsave
"2025-01-17T08:33:43Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T08:33:41Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: answer dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 11001602 num_examples: 3957 download_size: 3641543 dataset_size: 11001602 configs: - config_name: default data_files: - split: train path: data/train-* ---
ZhangShenao/math_gsm-gemma-1.1-7b-it-iter1_sample_7500_nsk_ml512
ZhangShenao
"2025-01-17T09:02:43Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T09:02:39Z"
--- dataset_info: features: - name: question dtype: string - name: answer dtype: string - name: rational_answer dtype: string splits: - name: train num_bytes: 7596412 num_examples: 7473 download_size: 3927158 dataset_size: 7596412 configs: - config_name: default data_files: - split: train path: data/train-* ---
mytestdpo/type2_llama3it_gsm8k
mytestdpo
"2025-01-17T09:04:13Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T09:04:10Z"
--- dataset_info: features: - name: chosen_txt dtype: string - name: rejected_txt dtype: string - name: gt dtype: string - name: prompt dtype: string - name: chosen dtype: string - name: rejected dtype: string splits: - name: train num_bytes: 77787006.0 num_examples: 9896 download_size: 26915187 dataset_size: 77787006.0 configs: - config_name: default data_files: - split: train path: data/train-* ---
marcomaccarini/DS_benigni_1
marcomaccarini
"2025-01-17T09:20:01Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T09:19:59Z"
--- dataset_info: features: - name: instruction dtype: string - name: input dtype: string - name: output dtype: string splits: - name: train num_bytes: 4323316 num_examples: 11127 download_size: 237893 dataset_size: 4323316 configs: - config_name: default data_files: - split: train path: data/train-* ---
selfcorrexp/llama3_non_delete_rr40k_3ep_dpo_newtype1
selfcorrexp
"2025-01-17T09:40:14Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T09:40:11Z"
--- dataset_info: features: - name: chosen_txt dtype: string - name: rejected_txt dtype: string - name: gt dtype: string - name: chosen dtype: string - name: rejected dtype: string - name: chosen_turn dtype: int64 - name: rejected_turn dtype: int64 - name: prompt dtype: string - name: margin dtype: float64 splits: - name: train num_bytes: 118808240 num_examples: 14510 download_size: 45342223 dataset_size: 118808240 configs: - config_name: default data_files: - split: train path: data/train-* ---
selfcorrexp/llama3_non_delete_rr40k_3ep_dpo_newtype1andtype2
selfcorrexp
"2025-01-17T09:40:49Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T09:40:43Z"
--- dataset_info: features: - name: chosen_txt dtype: string - name: rejected_txt dtype: string - name: gt dtype: string - name: chosen dtype: string - name: rejected dtype: string - name: chosen_turn dtype: int64 - name: rejected_turn dtype: int64 - name: prompt dtype: string - name: margin dtype: float64 splits: - name: train num_bytes: 194480988.0 num_examples: 23822 download_size: 74243598 dataset_size: 194480988.0 configs: - config_name: default data_files: - split: train path: data/train-* ---
SavyEIP/test
SavyEIP
"2025-01-17T12:13:21Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T10:11:51Z"
--- dataset_info: features: - name: messages dtype: string splits: - name: train num_bytes: 3692.1111111111113 num_examples: 21 - name: test num_bytes: 1054.888888888889 num_examples: 6 download_size: 4898 dataset_size: 4747.0 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* ---
kh4dien/chat-preference
kh4dien
"2025-01-17T10:17:39Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T10:17:28Z"
--- dataset_info: features: - name: chosen list: - name: role dtype: string - name: content dtype: string - name: rejected list: - name: role dtype: string - name: content dtype: string - name: source dtype: string splits: - name: train num_bytes: 136181803.0 num_examples: 51200 download_size: 74717149 dataset_size: 136181803.0 configs: - config_name: default data_files: - split: train path: data/train-* ---
Kavya-26/huggingface_dataset
Kavya-26
"2025-01-17T10:45:03Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T10:45:00Z"
--- dataset_info: features: - name: code dtype: string - name: explanation dtype: string splits: - name: train num_bytes: 934 num_examples: 6 download_size: 2880 dataset_size: 934 configs: - config_name: default data_files: - split: train path: data/train-* ---
AntZet/home_decoration_objects_images
AntZet
"2025-01-17T20:48:15Z"
20
0
[ "task_categories:image-to-text", "language:en", "license:mit", "size_categories:1K<n<10K", "format:imagefolder", "modality:image", "modality:text", "library:datasets", "library:mlcroissant", "region:us", "image-to-text", "computer-vision", "captioning" ]
[ "image-to-text" ]
"2025-01-17T11:00:53Z"
--- language: - en pretty_name: "Image Description Dataset" tags: - image-to-text - computer-vision - captioning license: "mit" task_categories: - image-to-text size_categories: - n<1K --- # Image Description Dataset ## Dataset Description This dataset contains 5125 images with their corresponding descriptions in both long and short formats. The descriptions were generated using the BLIP-large model. ### Dataset Statistics - Total images: 5125 - Average words in long description: 18.1 - Average words in short description: 9.4 ### Languages - English (en) ## Dataset Structure Each record in the dataset contains: - `file_name`: Relative path to the image file - `text`: Main description text used for training - `long_description`: Detailed description of the image - `short_description`: Concise description of the image ## Created Files ### Total Images - 5125 images ### Other Files - `dataset_infos.json`: 707 bytes - Contains metadata about the dataset structure and size. - `metadata.jsonl`: 1662073 bytes - Contains metadata for each image in JSON Lines format. ## Dataset Creation ### Source Data The source images were processed using AI image captioning to generate natural language descriptions. ### Annotations The descriptions were automatically generated using: - Model: Salesforce/blip-image-captioning-large - Two types of descriptions: 1. Long descriptions (max 150 tokens) 2. Short descriptions (max 10 tokens) ## Considerations for Using the Data ### Social Impact of Dataset This dataset can be used to train AI models for: - Automated image description generation - Image captioning systems - Accessibility features for visually impaired users - Content indexing and search ### Discussion of Biases The dataset may contain biases related to: - Types of images represented - AI model's training data biases - Language and cultural context ### Other Known Limitations - Descriptions are AI-generated and may contain inaccuracies - Limited dataset size (5125 images) - Model may miss subtle details or context ## Additional Information ### Dataset Curators This dataset was created using the py_ai_dataset_maker tool and JS image crawler plugin made by ANTHEZ in 2025. ### Licensing Information This dataset is released under the MIT License.
TianHongZXY/MATH-test-Tulu-3-8B-SFT-beam_search-completions-temp_0.8-range_400_to_500
TianHongZXY
"2025-01-19T20:54:02Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T11:11:09Z"
--- dataset_info: config_name: TianHongZXY_MATH--T-0.8--top_p-1.0--n-32--m-4--iters-20--look-1--seed-42--agg_strategy--last features: - name: problem dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: completions sequence: string - name: pred dtype: string - name: completion_tokens sequence: int64 - name: scores sequence: sequence: float64 - name: agg_scores sequence: float64 - name: pred_weighted@1 dtype: string - name: pred_maj@1 dtype: string - name: pred_naive@1 dtype: string - name: pred_weighted@2 dtype: string - name: pred_maj@2 dtype: string - name: pred_naive@2 dtype: string - name: pred_weighted@4 dtype: string - name: pred_maj@4 dtype: string - name: pred_naive@4 dtype: string - name: pred_weighted@8 dtype: string - name: pred_maj@8 dtype: string - name: pred_naive@8 dtype: string - name: pred_weighted@16 dtype: string - name: pred_maj@16 dtype: string - name: pred_naive@16 dtype: string - name: pred_weighted@32 dtype: string - name: pred_maj@32 dtype: string - name: pred_naive@32 dtype: string splits: - name: train num_bytes: 3251192 num_examples: 100 download_size: 431203 dataset_size: 3251192 configs: - config_name: TianHongZXY_MATH--T-0.8--top_p-1.0--n-32--m-4--iters-20--look-1--seed-42--agg_strategy--last data_files: - split: train path: TianHongZXY_MATH--T-0.8--top_p-1.0--n-32--m-4--iters-20--look-1--seed-42--agg_strategy--last/train-* ---
sinan67/ReelSektor
sinan67
"2025-01-17T11:46:16Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T11:35:41Z"
--- dataset_info: features: - name: instruction dtype: string - name: input dtype: string - name: response dtype: string splits: - name: train num_bytes: 1040.0 num_examples: 4 - name: test num_bytes: 243 num_examples: 1 download_size: 7047 dataset_size: 1283.0 configs: - config_name: default data_files: - split: train path: data/train-* - split: test path: data/test-* ---
fikreanteneh/Amharic-News-Classification
fikreanteneh
"2025-01-17T12:07:44Z"
20
0
[ "license:mit", "size_categories:10K<n<100K", "format:csv", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T12:05:55Z"
--- license: mit ---
syllasgiorgos/commonVoice_greek_clean-tags
syllasgiorgos
"2025-01-17T12:06:06Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T12:06:04Z"
--- dataset_info: features: - name: file_name dtype: string - name: text dtype: string - name: transcription_normalised dtype: string - name: utterance_pitch_mean dtype: float32 - name: utterance_pitch_std dtype: float32 - name: snr dtype: float64 - name: c50 dtype: float64 - name: speaking_rate dtype: float64 - name: phonemes dtype: string - name: stoi dtype: float64 - name: si-sdr dtype: float64 - name: pesq dtype: float64 splits: - name: train num_bytes: 3963963 num_examples: 14312 download_size: 2438160 dataset_size: 3963963 configs: - config_name: default data_files: - split: train path: data/train-* ---
nkasmanoff/wikipedia-wildfires
nkasmanoff
"2025-01-17T12:52:30Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T12:52:29Z"
--- dataset_info: features: - name: id dtype: string - name: url dtype: string - name: title dtype: string - name: text dtype: string splits: - name: train num_bytes: 1137519 num_examples: 223 download_size: 639404 dataset_size: 1137519 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/beta05dpollama3_it_math_type12only_with_sft_loss_200tmp10
tmpmodelsave
"2025-01-17T13:11:52Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T13:11:49Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 51284227 num_examples: 15000 download_size: 17631429 dataset_size: 51284227 configs: - config_name: default data_files: - split: train path: data/train-* ---
tmpmodelsave/beta05dpollama3_it_math_type12only_with_sft_loss_250tmp10
tmpmodelsave
"2025-01-17T13:28:13Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T13:28:10Z"
--- dataset_info: features: - name: idx dtype: int64 - name: gt dtype: string - name: prompt dtype: string - name: level dtype: string - name: type dtype: string - name: solution dtype: string - name: my_solu sequence: string - name: pred sequence: string - name: rewards sequence: bool splits: - name: train num_bytes: 51282906 num_examples: 15000 download_size: 17656479 dataset_size: 51282906 configs: - config_name: default data_files: - split: train path: data/train-* ---
Gorantlasuhas/sample_datetime_two
Gorantlasuhas
"2025-01-17T13:29:30Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T13:29:28Z"
--- dataset_info: features: - name: instruction dtype: string - name: context dtype: string - name: response dtype: string splits: - name: train num_bytes: 414836.8 num_examples: 320 - name: validation num_bytes: 103709.2 num_examples: 80 download_size: 144716 dataset_size: 518546.0 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* ---
amiri1990/PPP
amiri1990
"2025-01-17T13:37:26Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T13:33:35Z"
--- dataset_info: features: - name: ID dtype: int64 - name: Type dtype: string - name: Title dtype: string - name: Abstract dtype: string - name: Keywords dtype: string splits: - name: train num_bytes: 7486 num_examples: 5 download_size: 15635 dataset_size: 7486 configs: - config_name: default data_files: - split: train path: data/train-* ---
workbrain/payroll-distill-data
workbrain
"2025-01-17T13:34:46Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T13:34:41Z"
--- dataset_info: features: - name: query dtype: string - name: answer dtype: string - name: tax period dtype: int64 - name: Gross Salary dtype: float64 - name: Tax dtype: int64 - name: NI Contribution dtype: float64 - name: Pension dtype: float64 - name: Net Salary dtype: float64 - name: YTD Gross dtype: float64 - name: YTD Tax dtype: float64 - name: YTD NI dtype: float64 - name: YTD Pension dtype: float64 - name: YTD Net dtype: float64 - name: Bonus dtype: int64 - name: Overtime dtype: int64 - name: Student Loan dtype: int64 - name: Other Deductions dtype: int64 - name: categories_Bonus and Overtime dtype: bool - name: categories_Employer Payment Summary (EPS) dtype: bool - name: categories_Full Payment Submission (FPS) dtype: bool - name: categories_HMRC Rules on NI dtype: bool - name: categories_HMRC Rules on Pension dtype: bool - name: categories_HMRC Rules on Tax dtype: bool - name: categories_National Insurance dtype: bool - name: categories_Other Deductions dtype: bool - name: categories_Payroll Process Explanation dtype: bool - name: categories_Pension Contributions dtype: bool - name: categories_Salary Calculation dtype: bool - name: categories_Tax Code Explanation dtype: bool - name: categories_Tax Deduction dtype: bool - name: categories_Tax Period Comparison dtype: bool - name: categories_YTD Calculations dtype: bool - name: frequency_4-Weekly dtype: bool - name: frequency_Fortnightly dtype: bool - name: frequency_Monthly dtype: bool - name: frequency_Quarterly dtype: bool - name: frequency_Weekly dtype: bool - name: frequency_Yearly dtype: bool splits: - name: train num_bytes: 36848 num_examples: 140 - name: validation num_bytes: 7772 num_examples: 30 - name: test num_bytes: 7626 num_examples: 30 download_size: 74861 dataset_size: 52246 configs: - config_name: default data_files: - split: train path: data/train-* - split: validation path: data/validation-* - split: test path: data/test-* ---
PleIAs/KaribuAI
PleIAs
"2025-01-17T14:05:17Z"
20
1
[ "size_categories:n<1K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T14:05:15Z"
--- dataset_info: features: - name: index dtype: int64 - name: category dtype: string - name: gen_text dtype: string - name: source dtype: string - name: toxicity_level dtype: string - name: score_composite dtype: float64 - name: CECR_level dtype: string - name: theme dtype: string splits: - name: train num_bytes: 642108 num_examples: 447 download_size: 373900 dataset_size: 642108 configs: - config_name: default data_files: - split: train path: data/train-* ---
JasonYN/tco-for-uvr
JasonYN
"2025-01-17T15:18:02Z"
20
0
[ "size_categories:n<1K", "format:parquet", "modality:audio", "library:datasets", "library:dask", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T15:00:11Z"
--- dataset_info: features: - name: audio dtype: audio: sampling_rate: 16000 splits: - name: train num_bytes: 6119790788.0 num_examples: 161 download_size: 6062121357 dataset_size: 6119790788.0 configs: - config_name: default data_files: - split: train path: data/train-* ---
Asarkar07/MP_fin2_resized_images
Asarkar07
"2025-01-17T15:17:03Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:image", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T15:12:02Z"
--- dataset_info: features: - name: image dtype: image - name: label dtype: int64 splits: - name: train num_bytes: 25603653.36 num_examples: 3192 download_size: 24151788 dataset_size: 25603653.36 configs: - config_name: default data_files: - split: train path: data/train-* ---
RyanYr/reflect_mmlumathpro-test_mv_binlabel
RyanYr
"2025-01-17T16:13:44Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T16:13:42Z"
--- dataset_info: features: - name: question_id dtype: int64 - name: original_question dtype: string - name: options sequence: string - name: answer dtype: string - name: answer_index dtype: int64 - name: cot_content dtype: string - name: category dtype: string - name: src dtype: string - name: problem dtype: string - name: alt_answer dtype: string - name: response@0 sequence: string - name: response@0_ans sequence: string - name: response@0_correctness sequence: bool splits: - name: train num_bytes: 11142923 num_examples: 1351 download_size: 3613580 dataset_size: 11142923 configs: - config_name: default data_files: - split: train path: data/train-* ---
InsultedByMathematics/infoNCA-fix-1_eval
InsultedByMathematics
"2025-01-17T16:24:24Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T16:24:21Z"
--- dataset_info: features: - name: response_0 dtype: string - name: response_1 dtype: string - name: response_2 dtype: string - name: response_3 dtype: string - name: response_4 dtype: string - name: response_0_reward dtype: float64 - name: response_1_reward dtype: float64 - name: response_2_reward dtype: float64 - name: response_3_reward dtype: float64 - name: response_4_reward dtype: float64 - name: prompt_id dtype: string - name: prompt dtype: string - name: llama_prompt_tokens sequence: int64 - name: llama_chosen_tokens sequence: int64 - name: chosen_reward dtype: float64 - name: llama_reject_tokens sequence: int64 - name: reject_reward dtype: float64 - name: llama_middle_tokens sequence: int64 - name: middle_reward dtype: float64 - name: chosen_logprob dtype: float64 - name: middle_logprob dtype: float64 - name: reject_logprob dtype: float64 - name: finetuned_response_0 dtype: string - name: finetuned_response_1 dtype: string - name: finetuned_response_2 dtype: string - name: finetuned_response_3 dtype: string - name: finetuned_response_4 dtype: string splits: - name: test_prefs num_bytes: 95035283 num_examples: 1801 download_size: 24000362 dataset_size: 95035283 configs: - config_name: default data_files: - split: test_prefs path: data/test_prefs-* ---
RyanYr/reflect_collegemath-test_t4_binlabel
RyanYr
"2025-01-19T04:14:57Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T17:13:53Z"
--- dataset_info: features: - name: data_source dtype: string - name: question_number dtype: string - name: problem dtype: string - name: answer dtype: string - name: license dtype: string - name: data_topic dtype: string - name: response@0 sequence: string - name: response@1 sequence: string - name: response@2 sequence: string - name: response@3 sequence: string - name: response@4 sequence: string - name: response@5 sequence: string - name: response@6 sequence: string - name: response@7 sequence: string - name: response@8 sequence: string - name: response@0_ans sequence: string - name: response@0_correctness sequence: bool - name: response@2_ans sequence: string - name: response@2_correctness sequence: bool - name: response@4_ans sequence: string - name: response@4_correctness sequence: bool - name: response@6_ans sequence: string - name: response@6_correctness sequence: bool - name: response@8_ans sequence: string - name: response@8_correctness sequence: bool splits: - name: train num_bytes: 46641052 num_examples: 2818 download_size: 17199555 dataset_size: 46641052 configs: - config_name: default data_files: - split: train path: data/train-* ---
Ayush-Singh/reward-bench-hacking-rewards-harmless-train-normal
Ayush-Singh
"2025-01-17T18:11:58Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T18:00:52Z"
--- dataset_info: features: - name: prompt dtype: string - name: chosen dtype: string - name: chosen_model dtype: string - name: rejected dtype: string - name: rejected_model dtype: string - name: subset dtype: string - name: id dtype: int64 - name: reward_chosen dtype: float64 - name: reward_rejected dtype: float64 splits: - name: xstest_should_respond num_bytes: 321610 num_examples: 250 - name: hep_go num_bytes: 179381 num_examples: 164 - name: hep_js num_bytes: 159504 num_examples: 164 - name: donotanswer num_bytes: 293380 num_examples: 136 - name: refusals_offensive num_bytes: 174198 num_examples: 100 - name: refusals_dangerous num_bytes: 275982 num_examples: 100 - name: hep_java num_bytes: 202340 num_examples: 164 - name: llmbar_adver_manual num_bytes: 78234 num_examples: 46 - name: xstest_should_refuse num_bytes: 238780 num_examples: 154 - name: alpacaeval_easy num_bytes: 2169328 num_examples: 805 - name: hep_cpp num_bytes: 168833 num_examples: 164 - name: mt_bench_hard num_bytes: 94718 num_examples: 45 - name: llmbar_adver_neighbor num_bytes: 131026 num_examples: 134 - name: mt_bench_med num_bytes: 127416 num_examples: 45 - name: mt_bench_easy num_bytes: 94345 num_examples: 28 - name: llmbar_natural num_bytes: 90090 num_examples: 100 - name: alpacaeval_hard num_bytes: 1600609 num_examples: 805 - name: llmbar_adver_GPTOut num_bytes: 32156 num_examples: 47 - name: hep_rust num_bytes: 174778 num_examples: 164 - name: hep_python num_bytes: 141969 num_examples: 164 - name: alpacaeval_length num_bytes: 3081227 num_examples: 805 - name: math_prm num_bytes: 878524 num_examples: 447 - name: llmbar_adver_GPTInst num_bytes: 210583 num_examples: 92 download_size: 5981687 dataset_size: 10919011 configs: - config_name: default data_files: - split: xstest_should_respond path: data/xstest_should_respond-* - split: hep_go path: data/hep_go-* - split: hep_js path: data/hep_js-* - split: donotanswer path: data/donotanswer-* - split: refusals_offensive path: data/refusals_offensive-* - split: refusals_dangerous path: data/refusals_dangerous-* - split: hep_java path: data/hep_java-* - split: llmbar_adver_manual path: data/llmbar_adver_manual-* - split: xstest_should_refuse path: data/xstest_should_refuse-* - split: alpacaeval_easy path: data/alpacaeval_easy-* - split: hep_cpp path: data/hep_cpp-* - split: mt_bench_hard path: data/mt_bench_hard-* - split: llmbar_adver_neighbor path: data/llmbar_adver_neighbor-* - split: mt_bench_med path: data/mt_bench_med-* - split: mt_bench_easy path: data/mt_bench_easy-* - split: llmbar_natural path: data/llmbar_natural-* - split: alpacaeval_hard path: data/alpacaeval_hard-* - split: llmbar_adver_GPTOut path: data/llmbar_adver_GPTOut-* - split: hep_rust path: data/hep_rust-* - split: hep_python path: data/hep_python-* - split: alpacaeval_length path: data/alpacaeval_length-* - split: math_prm path: data/math_prm-* - split: llmbar_adver_GPTInst path: data/llmbar_adver_GPTInst-* ---
RyanYr/reflect_collegemath-test_nonGenCritic_t4_binlabel
RyanYr
"2025-01-17T18:02:05Z"
20
0
[ "size_categories:1K<n<10K", "format:parquet", "modality:tabular", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T18:02:03Z"
--- dataset_info: features: - name: data_source dtype: string - name: question_number dtype: string - name: problem dtype: string - name: answer dtype: string - name: license dtype: string - name: data_topic dtype: string - name: response@0 sequence: string - name: response@1 dtype: float64 - name: response@2 sequence: string - name: response@3 dtype: float64 - name: response@4 sequence: string - name: response@5 dtype: float64 - name: response@6 sequence: string - name: response@7 dtype: float64 - name: response@8 sequence: string - name: response@0_ans sequence: string - name: response@0_correctness sequence: bool - name: response@2_ans sequence: string - name: response@2_correctness sequence: bool - name: response@4_ans sequence: string - name: response@4_correctness sequence: bool - name: response@6_ans sequence: string - name: response@6_correctness sequence: bool - name: response@8_ans sequence: string - name: response@8_correctness sequence: bool splits: - name: train num_bytes: 20763008 num_examples: 2818 download_size: 7270167 dataset_size: 20763008 configs: - config_name: default data_files: - split: train path: data/train-* ---
chiyuanhsiao/mmlu_stage2_no-replay
chiyuanhsiao
"2025-01-17T18:03:24Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T18:03:18Z"
--- dataset_info: features: - name: task_type dtype: string - name: task_name dtype: string - name: subtask_name dtype: string - name: input_question dtype: string - name: input_choice_list struct: - name: A dtype: string - name: B dtype: string - name: C dtype: string - name: D dtype: string - name: input_final_prompts sequence: string - name: input_correct_responses sequence: string - name: output_prediction_text sequence: string - name: output_parsed_answer dtype: string - name: output_choice_completions dtype: 'null' - name: output_choice_negative_log_likelihoods dtype: 'null' - name: output_metrics struct: - name: acc dtype: float64 - name: correct_format dtype: float64 - name: is_correct dtype: bool - name: input_question_hash dtype: string - name: input_final_prompts_hash sequence: string - name: benchmark_label dtype: string - name: eval_config struct: - name: max_gen_len dtype: string - name: max_prompt_len dtype: string - name: num_few_shot dtype: string - name: num_generations dtype: string - name: prompt_fn dtype: string - name: return_logprobs dtype: string - name: seed dtype: string - name: temperature dtype: string - name: top_k dtype: string - name: top_p dtype: string - name: my_prediction_text dtype: string splits: - name: latest num_bytes: 351250736 num_examples: 14042 download_size: 40399331 dataset_size: 351250736 configs: - config_name: default data_files: - split: latest path: data/latest-* ---
weqweasdas/filtered_numia_prompt30k
weqweasdas
"2025-01-17T18:57:46Z"
20
0
[ "size_categories:10K<n<100K", "format:parquet", "modality:text", "library:datasets", "library:pandas", "library:mlcroissant", "library:polars", "region:us" ]
null
"2025-01-17T18:57:43Z"
--- dataset_info: features: - name: idx dtype: int64 - name: prompt dtype: string - name: gt sequence: string splits: - name: train num_bytes: 11934951.222528767 num_examples: 30568 download_size: 3840107 dataset_size: 11934951.222528767 configs: - config_name: default data_files: - split: train path: data/train-* ---