Skip to content

Commit

Permalink
Re-enable import sorting disabled by flake8:noqa directive when using…
Browse files Browse the repository at this point in the history
… ruff linter (#6946)

* Replace wrong ruff:noqa directive with per-file-ignores setting

* Remove unnecessary noqa F401

* Replace isort:skip with isort:split

* Fix import sorting

* Replace inline noqa:F401 with per-file
  • Loading branch information
albertvillanova authored Jun 4, 2024
1 parent 43fd659 commit 1b59c75
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 31 deletions.
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ select = ["C", "E", "F", "I", "W"]
lines-after-imports = 2
known-first-party = ["datasets"]

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401", "F403", "F405"]

[tool.pytest.ini_options]
# Test fails if a FutureWarning is thrown by `huggingface_hub`
# Temporarily disabled because transformers 4.41.1 calls deprecated code from `huggingface_hub` that causes FutureWarning
Expand Down
12 changes: 7 additions & 5 deletions src/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
# ruff: noqa
# Copyright 2020 The HuggingFace Datasets Authors and the TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
Expand Down Expand Up @@ -54,10 +53,13 @@
from .utils import logging


# deprecated modules
from datasets import arrow_dataset as _arrow_dataset # isort:skip
from datasets import utils as _utils # isort:skip
from datasets.utils import download_manager as _deprecated_download_manager # isort:skip
# isort: split

# Deprecated modules
from . import arrow_dataset as _arrow_dataset
from . import utils as _utils
from .utils import download_manager as _deprecated_download_manager


_arrow_dataset.concatenate_datasets = concatenate_datasets
_utils.DownloadConfig = DownloadConfig
Expand Down
2 changes: 0 additions & 2 deletions src/datasets/features/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# ruff: noqa

__all__ = [
"Audio",
"Array2D",
Expand Down
2 changes: 1 addition & 1 deletion src/datasets/filesystems/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
_has_s3fs = importlib.util.find_spec("s3fs") is not None

if _has_s3fs:
from .s3filesystem import S3FileSystem # noqa: F401
from .s3filesystem import S3FileSystem

COMPRESSION_FILESYSTEMS: List[compression.BaseCompressedFileFileSystem] = [
compression.Bz2FileSystem,
Expand Down
6 changes: 1 addition & 5 deletions src/datasets/formatting/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# ruff: noqa

from typing import Dict, List, Optional, Type

from .. import config
Expand Down Expand Up @@ -134,6 +132,4 @@ def get_formatter(format_type: Optional[str], **format_kwargs) -> Formatter:
if format_type in _FORMAT_TYPES_ALIASES_UNAVAILABLE:
raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type]
else:
raise ValueError(
f"Return type should be None or selected in {list(type for type in _FORMAT_TYPES.keys() if type != None)}, but got '{format_type}'"
)
raise ValueError(f"Format type should be one of {list(_FORMAT_TYPES.keys())}, but got '{format_type}'")
4 changes: 2 additions & 2 deletions src/datasets/packaged_modules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@

from .arrow import arrow
from .audiofolder import audiofolder
from .cache import cache # noqa F401
from .cache import cache
from .csv import csv
from .imagefolder import imagefolder
from .json import json
from .pandas import pandas
from .parquet import parquet
from .sql import sql # noqa F401
from .sql import sql
from .text import text
from .webdataset import webdataset

Expand Down
2 changes: 1 addition & 1 deletion src/datasets/parallel/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .parallel import parallel_backend, parallel_map, ParallelBackendConfig # noqa F401
from .parallel import ParallelBackendConfig, parallel_backend, parallel_map
8 changes: 3 additions & 5 deletions src/datasets/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# ruff: noqa

from . import tqdm as _tqdm # _tqdm is the module
from .experimental import experimental
from .info_utils import VerificationMode
from .logging import disable_progress_bar, enable_progress_bar, is_progress_bar_enabled
from .version import Version
from .experimental import experimental
from .tqdm import (
are_progress_bars_disabled,
disable_progress_bars,
enable_progress_bars,
are_progress_bars_disabled,
tqdm,
)
from .version import Version
18 changes: 8 additions & 10 deletions tests/_test_patching.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
# isort: skip_file

# ruff: noqa: F401
# This is the module that test_patching.py uses to test patch_submodule()

import os # noqa: F401 - this is just for tests
import os as renamed_os # noqa: F401 - this is just for tests
from os import path # noqa: F401 - this is just for tests
from os import path as renamed_path # noqa: F401 - this is just for tests
from os.path import join # noqa: F401 - this is just for tests
from os.path import join as renamed_join # noqa: F401 - this is just for tests
import os
import os as renamed_os
from os import path
from os import path as renamed_path
from os.path import join
from os.path import join as renamed_join


open = open # noqa we just need to have a builtin inside this module to test it properly
open = open # we just need to have a builtin inside this module to test it properly

0 comments on commit 1b59c75

Please sign in to comment.