pixelsum_wiki / dataset_infos.json
zuzannad1's picture
Upload dataset_infos.json with huggingface_hub
34fe287
{"default": {
"description": "Wikipedia dataset containing cleaned articles of all languages.\nThe datasets are built from the Wikipedia dump\n(https://dumps.wikimedia.org/) with one split per language. Each example\ncontains the content of one full Wikipedia article with cleaning to strip\nmarkdown and unwanted sections (references, etc.).",
"citation": "@ONLINE {wikidump,\n author = {Wikimedia Foundation},\n title = {Wikimedia Downloads},\n url = {https://dumps.wikimedia.org}\n}",
"homepage": "https://dumps.wikimedia.org",
"license": "",
"features": {
"example": {
"dtype": "string",
"_type": "Value"
},
"summary": {
"dtype": "string",
"_type": "Value"
}
},
"splits": {
"train": {
"name": "train",
"num_bytes": 7401808572,
"num_examples": 6458670,
"dataset_name": "pixelsum_wiki"
}
},
"download_size": 4591048930,
"dataset_size": 7401808572,
"size_in_bytes": 11992857502
}}