python ValueError:无效模式:“**”只能是整个路径组件

uplii1fm  于 4个月前  发布在  Python
关注(0)|答案(1)|浏览(196)

我在试着调整一个法学硕士
我的代码到目前为止:

from datasets import load_dataset, DatasetDict, Dataset

from transformers import (
    AutoTokenizer,
    AutoConfig, 
    AutoModelForSequenceClassification,
    DataCollatorWithPadding,
    TrainingArguments,
        Trainer)

from peft import PeftModel, PeftConfig, get_peft_model, LoraConfig
import evaluate
import torch
import numpy as np

# load dataset
dataset = load_dataset('TokenBender/code_instructions_122k_alpaca_style')
dataset

字符串
错误:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Cell In [12], line 2
      1 # load dataset
----> 2 dataset = load_dataset('TokenBender/code_instructions_122k_alpaca_style')
      3 dataset

File /usr/local/lib/python3.9/dist-packages/datasets/load.py:1664, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, **config_kwargs)
   1661 ignore_verifications = ignore_verifications or save_infos
   1663 # Create a dataset builder
-> 1664 builder_instance = load_dataset_builder(
   1665     path=path,
   1666     name=name,
   1667     data_dir=data_dir,
   1668     data_files=data_files,
   1669     cache_dir=cache_dir,
   1670     features=features,
   1671     download_config=download_config,
   1672     download_mode=download_mode,
   1673     revision=revision,
   1674     use_auth_token=use_auth_token,
   1675     **config_kwargs,
   1676 )
   1678 # Return iterable dataset in case of streaming
   1679 if streaming:

File /usr/local/lib/python3.9/dist-packages/datasets/load.py:1490, in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, use_auth_token, **config_kwargs)
   1488     download_config = download_config.copy() if download_config else DownloadConfig()
   1489     download_config.use_auth_token = use_auth_token
-> 1490 dataset_module = dataset_module_factory(
   1491     path,
   1492     revision=revision,
   1493     download_config=download_config,
   1494     download_mode=download_mode,
   1495     data_dir=data_dir,
   1496     data_files=data_files,
   1497 )
   1499 # Get dataset builder class from the processing script
   1500 builder_cls = import_main_class(dataset_module.module_path)

File /usr/local/lib/python3.9/dist-packages/datasets/load.py:1242, in dataset_module_factory(path, revision, download_config, download_mode, force_local_path, dynamic_modules_path, data_dir, data_files, **download_kwargs)
   1237             if isinstance(e1, FileNotFoundError):
   1238                 raise FileNotFoundError(
   1239                     f"Couldn't find a dataset script at {relative_to_absolute_path(combined_path)} or any data file in the same directory. "
   1240                     f"Couldn't find '{path}' on the Hugging Face Hub either: {type(e1).__name__}: {e1}"
   1241                 ) from None
-> 1242             raise e1 from None
   1243 else:
   1244     raise FileNotFoundError(
   1245         f"Couldn't find a dataset script at {relative_to_absolute_path(combined_path)} or any data file in the same directory."
   1246     )

File /usr/local/lib/python3.9/dist-packages/datasets/load.py:1223, in dataset_module_factory(path, revision, download_config, download_mode, force_local_path, dynamic_modules_path, data_dir, data_files, **download_kwargs)
   1215             return HubDatasetModuleFactoryWithScript(
   1216                 path,
   1217                 revision=revision,
   (...)
   1220                 dynamic_modules_path=dynamic_modules_path,
   1221             ).get_module()
   1222         else:
-> 1223             return HubDatasetModuleFactoryWithoutScript(
   1224                 path,
   1225                 revision=revision,
   1226                 data_dir=data_dir,
   1227                 data_files=data_files,
   1228                 download_config=download_config,
   1229                 download_mode=download_mode,
   1230             ).get_module()
   1231 except Exception as e1:  # noqa: all the attempts failed, before raising the error we should check if the module is already cached.
   1232     try:

File /usr/local/lib/python3.9/dist-packages/datasets/load.py:846, in HubDatasetModuleFactoryWithoutScript.get_module(self)
    836     token = self.download_config.use_auth_token
    837 hfh_dataset_info = HfApi(config.HF_ENDPOINT).dataset_info(
    838     self.name,
    839     revision=self.revision,
    840     token=token,
    841     timeout=100.0,
    842 )
    843 patterns = (
    844     sanitize_patterns(self.data_files)
    845     if self.data_files is not None
--> 846     else get_patterns_in_dataset_repository(hfh_dataset_info)
    847 )
    848 data_files = DataFilesDict.from_hf_repo(
    849     patterns,
    850     dataset_info=hfh_dataset_info,
    851     allowed_extensions=ALL_ALLOWED_EXTENSIONS,
    852 )
    853 infered_module_names = {
    854     key: infer_module_for_data_files(data_files_list, use_auth_token=self.download_config.use_auth_token)
    855     for key, data_files_list in data_files.items()
    856 }

File /usr/local/lib/python3.9/dist-packages/datasets/data_files.py:471, in get_patterns_in_dataset_repository(dataset_info)
    469 resolver = partial(_resolve_single_pattern_in_dataset_repository, dataset_info)
    470 try:
--> 471     return _get_data_files_patterns(resolver)
    472 except FileNotFoundError:
    473     raise FileNotFoundError(
    474         f"The dataset repository at '{dataset_info.id}' doesn't contain any data file."
    475     ) from None

File /usr/local/lib/python3.9/dist-packages/datasets/data_files.py:99, in _get_data_files_patterns(pattern_resolver)
     97 try:
     98     for pattern in patterns:
---> 99         data_files = pattern_resolver(pattern)
    100         if len(data_files) > 0:
    101             non_empty_splits.append(split)

File /usr/local/lib/python3.9/dist-packages/datasets/data_files.py:303, in _resolve_single_pattern_in_dataset_repository(dataset_info, pattern, allowed_extensions)
    301 data_files_ignore = FILES_TO_IGNORE
    302 fs = HfFileSystem(repo_info=dataset_info)
--> 303 glob_iter = [PurePath(filepath) for filepath in fs.glob(PurePath(pattern).as_posix()) if fs.isfile(filepath)]
    304 matched_paths = [
    305     filepath
    306     for filepath in glob_iter
    307     if filepath.name not in data_files_ignore and not filepath.name.startswith(".")
    308 ]
    309 if allowed_extensions is not None:

File /usr/local/lib/python3.9/dist-packages/fsspec/spec.py:606, in AbstractFileSystem.glob(self, path, maxdepth, **kwargs)
    602         depth = None
    604 allpaths = self.find(root, maxdepth=depth, withdirs=True, detail=True, **kwargs)
--> 606 pattern = glob_translate(path + ("/" if ends_with_sep else ""))
    607 pattern = re.compile(pattern)
    609 out = {
    610     p: info
    611     for p, info in sorted(allpaths.items())
   (...)
    618     )
    619 }

File /usr/local/lib/python3.9/dist-packages/fsspec/utils.py:734, in glob_translate(pat)
    732     continue
    733 elif "**" in part:
--> 734     raise ValueError(
    735         "Invalid pattern: '**' can only be an entire path component"
    736     )
    737 if part:
    738     results.extend(_translate(part, f"{not_sep}*", not_sep))

ValueError: Invalid pattern: '**' can only be an entire path component


我试着在网上找东西,我找到的壁橱是这篇文章https://github.com/coala/coala/issues/401
但是我不能理解他们的解决方案。任何人都可以帮助我理解我所面临的错误的解决方案。谢谢。
我的图书馆版本:

  • peft:'0.6.0'
  • Torch :'2.1.2+cu121'
  • 数据集:'2.1.0'
  • transformers:'4.21.3'
bvuwiixz

bvuwiixz1#

该错误可能是由于datasets包中的更改(介于2.1到2.14之间)破坏了fsspec。它已在最新的datasets版本(2.15.0)中得到修复(参见问题中的讨论)。
使用pip install -U datasets更新您的安装以修复fspec ValueError
该解决方案适用于Python 3.10上的datasets版本2.10.1,因为它应该使用为版本> 2.15.0添加的修补程序更新包。

相关问题