diff --git a/pyproject.toml b/pyproject.toml index 977168c9789..53328fa65cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,9 @@ select = ["C", "E", "F", "I", "W"] lines-after-imports = 2 known-first-party = ["datasets"] +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401", "F403", "F405"] + [tool.pytest.ini_options] # Test fails if a FutureWarning is thrown by `huggingface_hub` # Temporarily disabled because transformers 4.41.1 calls deprecated code from `huggingface_hub` that causes FutureWarning diff --git a/src/datasets/__init__.py b/src/datasets/__init__.py index f36fc4fd728..653743ec635 100644 --- a/src/datasets/__init__.py +++ b/src/datasets/__init__.py @@ -1,4 +1,3 @@ -# ruff: noqa # Copyright 2020 The HuggingFace Datasets Authors and the TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -54,10 +53,13 @@ from .utils import logging -# deprecated modules -from datasets import arrow_dataset as _arrow_dataset # isort:skip -from datasets import utils as _utils # isort:skip -from datasets.utils import download_manager as _deprecated_download_manager # isort:skip +# isort: split + +# Deprecated modules +from . import arrow_dataset as _arrow_dataset +from . import utils as _utils +from .utils import download_manager as _deprecated_download_manager + _arrow_dataset.concatenate_datasets = concatenate_datasets _utils.DownloadConfig = DownloadConfig diff --git a/src/datasets/features/__init__.py b/src/datasets/features/__init__.py index 65ae879a2bb..b3c03fbfed7 100644 --- a/src/datasets/features/__init__.py +++ b/src/datasets/features/__init__.py @@ -1,5 +1,3 @@ -# ruff: noqa - __all__ = [ "Audio", "Array2D", diff --git a/src/datasets/filesystems/__init__.py b/src/datasets/filesystems/__init__.py index c2753e3d380..5c48c7069ce 100644 --- a/src/datasets/filesystems/__init__.py +++ b/src/datasets/filesystems/__init__.py @@ -14,7 +14,7 @@ _has_s3fs = importlib.util.find_spec("s3fs") is not None if _has_s3fs: - from .s3filesystem import S3FileSystem # noqa: F401 + from .s3filesystem import S3FileSystem COMPRESSION_FILESYSTEMS: List[compression.BaseCompressedFileFileSystem] = [ compression.Bz2FileSystem, diff --git a/src/datasets/formatting/__init__.py b/src/datasets/formatting/__init__.py index 78f64cfe912..8aa21d37bd2 100644 --- a/src/datasets/formatting/__init__.py +++ b/src/datasets/formatting/__init__.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# ruff: noqa - from typing import Dict, List, Optional, Type from .. import config @@ -134,6 +132,4 @@ def get_formatter(format_type: Optional[str], **format_kwargs) -> Formatter: if format_type in _FORMAT_TYPES_ALIASES_UNAVAILABLE: raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type] else: - raise ValueError( - f"Return type should be None or selected in {list(type for type in _FORMAT_TYPES.keys() if type != None)}, but got '{format_type}'" - ) + raise ValueError(f"Format type should be one of {list(_FORMAT_TYPES.keys())}, but got '{format_type}'") diff --git a/src/datasets/packaged_modules/__init__.py b/src/datasets/packaged_modules/__init__.py index bf1408eaf91..984dc0f03a3 100644 --- a/src/datasets/packaged_modules/__init__.py +++ b/src/datasets/packaged_modules/__init__.py @@ -6,13 +6,13 @@ from .arrow import arrow from .audiofolder import audiofolder -from .cache import cache # noqa F401 +from .cache import cache from .csv import csv from .imagefolder import imagefolder from .json import json from .pandas import pandas from .parquet import parquet -from .sql import sql # noqa F401 +from .sql import sql from .text import text from .webdataset import webdataset diff --git a/src/datasets/parallel/__init__.py b/src/datasets/parallel/__init__.py index d8309358851..a8d527f4d09 100644 --- a/src/datasets/parallel/__init__.py +++ b/src/datasets/parallel/__init__.py @@ -1 +1 @@ -from .parallel import parallel_backend, parallel_map, ParallelBackendConfig # noqa F401 +from .parallel import ParallelBackendConfig, parallel_backend, parallel_map diff --git a/src/datasets/utils/__init__.py b/src/datasets/utils/__init__.py index 5df74ff8cac..a1c3d293599 100644 --- a/src/datasets/utils/__init__.py +++ b/src/datasets/utils/__init__.py @@ -12,16 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -# ruff: noqa - from . import tqdm as _tqdm # _tqdm is the module +from .experimental import experimental from .info_utils import VerificationMode from .logging import disable_progress_bar, enable_progress_bar, is_progress_bar_enabled -from .version import Version -from .experimental import experimental from .tqdm import ( + are_progress_bars_disabled, disable_progress_bars, enable_progress_bars, - are_progress_bars_disabled, tqdm, ) +from .version import Version diff --git a/tests/_test_patching.py b/tests/_test_patching.py index dae9a419ec0..78aa45a2e35 100644 --- a/tests/_test_patching.py +++ b/tests/_test_patching.py @@ -1,13 +1,11 @@ -# isort: skip_file - +# ruff: noqa: F401 # This is the module that test_patching.py uses to test patch_submodule() - -import os # noqa: F401 - this is just for tests -import os as renamed_os # noqa: F401 - this is just for tests -from os import path # noqa: F401 - this is just for tests -from os import path as renamed_path # noqa: F401 - this is just for tests -from os.path import join # noqa: F401 - this is just for tests -from os.path import join as renamed_join # noqa: F401 - this is just for tests +import os +import os as renamed_os +from os import path +from os import path as renamed_path +from os.path import join +from os.path import join as renamed_join -open = open # noqa we just need to have a builtin inside this module to test it properly +open = open # we just need to have a builtin inside this module to test it properly