Datasets:

Modalities:
Audio
Text
ArXiv:
Libraries:
Datasets
License:

Error Loading Catalan Dataset

#23
by MauroVazquez - opened

As soon as I try to load the catalan dataset:

ds = load_dataset(
        "mozilla-foundation/common_voice_17_0",
        "ca",
    )

I get the following error:

---------------------------------------------------------------------------
HTTPError                                 Traceback (most recent call last)
File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_http.py:409, in hf_raise_for_status(response, endpoint_name)
    408 try:
--> 409     response.raise_for_status()
    410 except HTTPError as e:

File /home/.conda/envs/whisperx/lib/python3.10/site-packages/requests/models.py:1024, in Response.raise_for_status(self)
   1023 if http_error_msg:
-> 1024     raise HTTPError(http_error_msg, response=self)

HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/datasets/mozilla-foundation/common_voice_17_0/resolve/main/audio/ca/other/ca_other_10.tar

The above exception was the direct cause of the following exception:

EntryNotFoundError                        Traceback (most recent call last)
File ~/.local/lib/python3.10/site-packages/datasets/utils/file_utils.py:183, in cached_path(url_or_filename, download_config, **download_kwargs)
    182 try:
--> 183     output_path = huggingface_hub.HfApi(
    184         endpoint=config.HF_ENDPOINT,
    185         token=download_config.token,
    186         library_name="datasets",
    187         library_version=__version__,
    188         user_agent=get_datasets_user_agent(download_config.user_agent),
    189     ).hf_hub_download(
    190         repo_id=resolved_path.repo_id,
    191         repo_type=resolved_path.repo_type,
    192         revision=resolved_path.revision,
    193         filename=resolved_path.path_in_repo,
    194         force_download=download_config.force_download,
    195         proxies=download_config.proxies,
    196     )
    197 except (
    198     huggingface_hub.utils.RepositoryNotFoundError,
    199     huggingface_hub.utils.EntryNotFoundError,
    200     huggingface_hub.utils.RevisionNotFoundError,
    201     huggingface_hub.utils.GatedRepoError,
    202 ) as e:

File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
    112     kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)

File ~/.local/lib/python3.10/site-packages/huggingface_hub/hf_api.py:5248, in HfApi.hf_hub_download(self, repo_id, filename, subfolder, repo_type, revision, cache_dir, local_dir, force_download, proxies, etag_timeout, token, local_files_only, resume_download, force_filename, local_dir_use_symlinks)
   5246     token = self.token
-> 5248 return hf_hub_download(
   5249     repo_id=repo_id,
   5250     filename=filename,
   5251     subfolder=subfolder,
   5252     repo_type=repo_type,
   5253     revision=revision,
   5254     endpoint=self.endpoint,
   5255     library_name=self.library_name,
   5256     library_version=self.library_version,
   5257     cache_dir=cache_dir,
   5258     local_dir=local_dir,
   5259     local_dir_use_symlinks=local_dir_use_symlinks,
   5260     user_agent=self.user_agent,
   5261     force_download=force_download,
   5262     force_filename=force_filename,
   5263     proxies=proxies,
   5264     etag_timeout=etag_timeout,
   5265     resume_download=resume_download,
   5266     token=token,
   5267     headers=self.headers,
   5268     local_files_only=local_files_only,
   5269 )

File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
    112     kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:862, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, user_agent, force_download, proxies, etag_timeout, token, local_files_only, headers, endpoint, resume_download, force_filename, local_dir_use_symlinks)
    861 else:
--> 862     return _hf_hub_download_to_cache_dir(
    863         # Destination
    864         cache_dir=cache_dir,
    865         # File info
    866         repo_id=repo_id,
    867         filename=filename,
    868         repo_type=repo_type,
    869         revision=revision,
    870         # HTTP info
    871         endpoint=endpoint,
    872         etag_timeout=etag_timeout,
    873         headers=hf_headers,
    874         proxies=proxies,
    875         token=token,
    876         # Additional options
    877         local_files_only=local_files_only,
    878         force_download=force_download,
    879     )

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:925, in _hf_hub_download_to_cache_dir(cache_dir, repo_id, filename, repo_type, revision, endpoint, etag_timeout, headers, proxies, token, local_files_only, force_download)
    923 # Try to get metadata (etag, commit_hash, url, size) from the server.
    924 # If we can't, a HEAD request error is returned.
--> 925 (url_to_download, etag, commit_hash, expected_size, head_call_error) = _get_metadata_or_catch_error(
    926     repo_id=repo_id,
    927     filename=filename,
    928     repo_type=repo_type,
    929     revision=revision,
    930     endpoint=endpoint,
    931     proxies=proxies,
    932     etag_timeout=etag_timeout,
    933     headers=headers,
    934     token=token,
    935     local_files_only=local_files_only,
    936     storage_folder=storage_folder,
    937     relative_filename=relative_filename,
    938 )
    940 # etag can be None for several reasons:
    941 # 1. we passed local_files_only.
    942 # 2. we don't have a connection
   (...)
    948 # If the specified revision is a commit hash, look inside "snapshots".
    949 # If the specified revision is a branch or tag, look inside "refs".

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:1376, in _get_metadata_or_catch_error(repo_id, filename, repo_type, revision, endpoint, proxies, etag_timeout, headers, token, local_files_only, relative_filename, storage_folder)
   1375 try:
-> 1376     metadata = get_hf_file_metadata(
   1377         url=url, proxies=proxies, timeout=etag_timeout, headers=headers, token=token
   1378     )
   1379 except EntryNotFoundError as http_error:

File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_validators.py:114, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
    112     kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 114 return fn(*args, **kwargs)

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:1296, in get_hf_file_metadata(url, token, proxies, timeout, library_name, library_version, user_agent, headers)
   1295 # Retrieve metadata
-> 1296 r = _request_wrapper(
   1297     method="HEAD",
   1298     url=url,
   1299     headers=hf_headers,
   1300     allow_redirects=False,
   1301     follow_relative_redirects=True,
   1302     proxies=proxies,
   1303     timeout=timeout,
   1304 )
   1305 hf_raise_for_status(r)

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:280, in _request_wrapper(method, url, follow_relative_redirects, **params)
    279 if follow_relative_redirects:
--> 280     response = _request_wrapper(
    281         method=method,
    282         url=url,
    283         follow_relative_redirects=False,
    284         **params,
    285     )
    287     # If redirection, we redirect only relative paths.
    288     # This is useful in case of a renamed repository.

File ~/.local/lib/python3.10/site-packages/huggingface_hub/file_download.py:304, in _request_wrapper(method, url, follow_relative_redirects, **params)
    303 response = get_session().request(method=method, url=url, **params)
--> 304 hf_raise_for_status(response)
    305 return response

File ~/.local/lib/python3.10/site-packages/huggingface_hub/utils/_http.py:420, in hf_raise_for_status(response, endpoint_name)
    419     message = f"{response.status_code} Client Error." + "\n\n" + f"Entry Not Found for url: {response.url}."
--> 420     raise _format(EntryNotFoundError, message, response) from e
    422 elif error_code == "GatedRepo":

EntryNotFoundError: 404 Client Error. (Request ID: Root=1-67eaa113-6757d3441601b565700e6963;786e25d1-6160-4e1c-a0c1-6d171b5535ef)

Entry Not Found for url: https://huggingface.co/datasets/mozilla-foundation/common_voice_17_0/resolve/main/audio/ca/other/ca_other_10.tar.

The above exception was the direct cause of the following exception:

FileNotFoundError                         Traceback (most recent call last)
Cell In[31], line 2
      1 print("Loading Common Voice Aina Train")
----> 2 ds = load_dataset(
      3         "mozilla-foundation/common_voice_17_0",
      4         "ca",
      5     )
      6 print("Loading Common Voice Aina Test")
      7 common_voice["test"] = load_dataset("mozilla-foundation/common_voice_17_0", "ca", split="test", use_auth_token=HF_TOKEN)

File ~/.local/lib/python3.10/site-packages/datasets/load.py:2083, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, keep_in_memory, save_infos, revision, token, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)
   2080     return builder_instance.as_streaming_dataset(split=split)
   2082 # Download and prepare data
-> 2083 builder_instance.download_and_prepare(
   2084     download_config=download_config,
   2085     download_mode=download_mode,
   2086     verification_mode=verification_mode,
   2087     num_proc=num_proc,
   2088     storage_options=storage_options,
   2089 )
   2091 # Build dataset for splits
   2092 keep_in_memory = (
   2093     keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
   2094 )

File ~/.local/lib/python3.10/site-packages/datasets/builder.py:925, in DatasetBuilder.download_and_prepare(self, output_dir, download_config, download_mode, verification_mode, dl_manager, base_path, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)
    923 if num_proc is not None:
    924     prepare_split_kwargs["num_proc"] = num_proc
--> 925 self._download_and_prepare(
    926     dl_manager=dl_manager,
    927     verification_mode=verification_mode,
    928     **prepare_split_kwargs,
    929     **download_and_prepare_kwargs,
    930 )
    931 # Sync info
    932 self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())

File ~/.local/lib/python3.10/site-packages/datasets/builder.py:1649, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs)
   1648 def _download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs):
-> 1649     super()._download_and_prepare(
   1650         dl_manager,
   1651         verification_mode,
   1652         check_duplicate_keys=verification_mode == VerificationMode.BASIC_CHECKS
   1653         or verification_mode == VerificationMode.ALL_CHECKS,
   1654         **prepare_splits_kwargs,
   1655     )

File ~/.local/lib/python3.10/site-packages/datasets/builder.py:979, in DatasetBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_split_kwargs)
    977 split_dict = SplitDict(dataset_name=self.dataset_name)
    978 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)
--> 979 split_generators = self._split_generators(dl_manager, **split_generators_kwargs)
    981 # Checksums verification
    982 if verification_mode == VerificationMode.ALL_CHECKS and dl_manager.record_checksums:

File /opt/huggingface_cache/modules/datasets_modules/datasets/mozilla-foundation--common_voice_17_0/9d10386a731ff6e6ed4ec973a4dc204a9820e8c842fbe388bdba0dd205ed5016/common_voice_17_0.py:145, in CommonVoice._split_generators(self, dl_manager)
    141 for split in splits:
    142     audio_urls[split] = [
    143         _AUDIO_URL.format(lang=lang, split=split, shard_idx=i) for i in range(n_shards[lang][split])
    144     ]
--> 145 archive_paths = dl_manager.download(audio_urls)
    146 local_extracted_archive_paths = dl_manager.extract(archive_paths) if not dl_manager.is_streaming else {}
    148 meta_urls = {split: _TRANSCRIPT_URL.format(lang=lang, split=split) for split in splits}

File ~/.local/lib/python3.10/site-packages/datasets/download/download_manager.py:159, in DownloadManager.download(self, url_or_urls)
    157 start_time = datetime.now()
    158 with stack_multiprocessing_download_progress_bars():
--> 159     downloaded_path_or_paths = map_nested(
    160         download_func,
    161         url_or_urls,
    162         map_tuple=True,
    163         num_proc=download_config.num_proc,
    164         desc="Downloading data files",
    165         batched=True,
    166         batch_size=-1,
    167     )
    168 duration = datetime.now() - start_time
    169 logger.info(f"Downloading took {duration.total_seconds() // 60} min")

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:496, in map_nested(function, data_struct, dict_only, map_list, map_tuple, map_numpy, num_proc, parallel_min_length, batched, batch_size, types, disable_tqdm, desc)
    494     num_proc = 1
    495 if any(isinstance(v, types) and len(v) > len(iterable) for v in iterable):
--> 496     mapped = [
    497         map_nested(
    498             function=function,
    499             data_struct=obj,
    500             num_proc=num_proc,
    501             parallel_min_length=parallel_min_length,
    502             batched=batched,
    503             batch_size=batch_size,
    504             types=types,
    505         )
    506         for obj in iterable
    507     ]
    508 elif num_proc != -1 and num_proc <= 1 or len(iterable) < parallel_min_length:
    509     if batched:

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:497, in <listcomp>(.0)
    494     num_proc = 1
    495 if any(isinstance(v, types) and len(v) > len(iterable) for v in iterable):
    496     mapped = [
--> 497         map_nested(
    498             function=function,
    499             data_struct=obj,
    500             num_proc=num_proc,
    501             parallel_min_length=parallel_min_length,
    502             batched=batched,
    503             batch_size=batch_size,
    504             types=types,
    505         )
    506         for obj in iterable
    507     ]
    508 elif num_proc != -1 and num_proc <= 1 or len(iterable) < parallel_min_length:
    509     if batched:

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:513, in map_nested(function, data_struct, dict_only, map_list, map_tuple, map_numpy, num_proc, parallel_min_length, batched, batch_size, types, disable_tqdm, desc)
    511         batch_size = max(len(iterable) // num_proc + int(len(iterable) % num_proc > 0), 1)
    512     iterable = list(iter_batched(iterable, batch_size))
--> 513 mapped = [
    514     _single_map_nested((function, obj, batched, batch_size, types, None, True, None))
    515     for obj in hf_tqdm(iterable, disable=disable_tqdm, desc=desc)
    516 ]
    517 if batched:
    518     mapped = [mapped_item for mapped_batch in mapped for mapped_item in mapped_batch]

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:514, in <listcomp>(.0)
    511         batch_size = max(len(iterable) // num_proc + int(len(iterable) % num_proc > 0), 1)
    512     iterable = list(iter_batched(iterable, batch_size))
    513 mapped = [
--> 514     _single_map_nested((function, obj, batched, batch_size, types, None, True, None))
    515     for obj in hf_tqdm(iterable, disable=disable_tqdm, desc=desc)
    516 ]
    517 if batched:
    518     mapped = [mapped_item for mapped_batch in mapped for mapped_item in mapped_batch]

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:382, in _single_map_nested(args)
    375         return function(data_struct)
    376 if (
    377     batched
    378     and not isinstance(data_struct, dict)
    379     and isinstance(data_struct, types)
    380     and all(not isinstance(v, (dict, types)) for v in data_struct)
    381 ):
--> 382     return [mapped_item for batch in iter_batched(data_struct, batch_size) for mapped_item in function(batch)]
    384 # Reduce logging to keep things readable in multiprocessing with tqdm
    385 if rank is not None and logging.get_verbosity() < logging.WARNING:

File ~/.local/lib/python3.10/site-packages/datasets/utils/py_utils.py:382, in <listcomp>(.0)
    375         return function(data_struct)
    376 if (
    377     batched
    378     and not isinstance(data_struct, dict)
    379     and isinstance(data_struct, types)
    380     and all(not isinstance(v, (dict, types)) for v in data_struct)
    381 ):
--> 382     return [mapped_item for batch in iter_batched(data_struct, batch_size) for mapped_item in function(batch)]
    384 # Reduce logging to keep things readable in multiprocessing with tqdm
    385 if rank is not None and logging.get_verbosity() < logging.WARNING:

File ~/.local/lib/python3.10/site-packages/datasets/download/download_manager.py:219, in DownloadManager._download_batched(self, url_or_filenames, download_config)
    206     return thread_map(
    207         download_func,
    208         url_or_filenames,
   (...)
    216         tqdm_class=tqdm,
    217     )
    218 else:
--> 219     return [
    220         self._download_single(url_or_filename, download_config=download_config)
    221         for url_or_filename in url_or_filenames
    222     ]

File ~/.local/lib/python3.10/site-packages/datasets/download/download_manager.py:220, in <listcomp>(.0)
    206     return thread_map(
    207         download_func,
    208         url_or_filenames,
   (...)
    216         tqdm_class=tqdm,
    217     )
    218 else:
    219     return [
--> 220         self._download_single(url_or_filename, download_config=download_config)
    221         for url_or_filename in url_or_filenames
    222     ]

File ~/.local/lib/python3.10/site-packages/datasets/download/download_manager.py:229, in DownloadManager._download_single(self, url_or_filename, download_config)
    226 if is_relative_path(url_or_filename):
    227     # append the relative path to the base_path
    228     url_or_filename = url_or_path_join(self._base_path, url_or_filename)
--> 229 out = cached_path(url_or_filename, download_config=download_config)
    230 out = tracked_str(out)
    231 out.set_origin(url_or_filename)

File ~/.local/lib/python3.10/site-packages/datasets/utils/file_utils.py:203, in cached_path(url_or_filename, download_config, **download_kwargs)
    183         output_path = huggingface_hub.HfApi(
    184             endpoint=config.HF_ENDPOINT,
    185             token=download_config.token,
   (...)
    195             proxies=download_config.proxies,
    196         )
    197     except (
    198         huggingface_hub.utils.RepositoryNotFoundError,
    199         huggingface_hub.utils.EntryNotFoundError,
    200         huggingface_hub.utils.RevisionNotFoundError,
    201         huggingface_hub.utils.GatedRepoError,
    202     ) as e:
--> 203         raise FileNotFoundError(str(e)) from e
    204 # Download external files
    205 else:
    206     output_path = get_from_cache(
    207         url_or_filename,
    208         cache_dir=cache_dir,
   (...)
    215         disable_tqdm=download_config.disable_tqdm,
    216     )

FileNotFoundError: 404 Client Error. (Request ID: Root=1-67eaa113-6757d3441601b565700e6963;786e25d1-6160-4e1c-a0c1-6d171b5535ef)

Entry Not Found for url: https://huggingface.co/datasets/mozilla-foundation/common_voice_17_0/resolve/main/audio/ca/other/ca_other_10.tar.

It appears not to find the file "ca_other_10.tar", which is not in the "other" folder. This issue does not happen with other languages, such us spanish.

Did you ever find a fix?

Sign up or log in to comment