Skip to content

Commit

Permalink
Enable more Ruff rules (#7930)
Browse files Browse the repository at this point in the history
* bump version

* Add more checks

* add simplify rules

* Add RUF rules

* small perf imrpovements

* pylint checks

* more style fixes

* fix a number of A002 cases

* fix A001 cases

* disable unsafe fixes

* remove unneeded branches
fixes SIM102

* re-enable .keys for specific case

* Revert "remove unneeded branches"

This reverts commit f74d41b.

* fix reference
  • Loading branch information
matmair authored Aug 26, 2024
1 parent bcbbae0 commit 1634258
Show file tree
Hide file tree
Showing 127 changed files with 525 additions and 739 deletions.
4 changes: 2 additions & 2 deletions .github/scripts/check_js_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def check_prohibited_tags(data):
for filename in pathlib.Path(js_i18n_dir).rglob('*.js'):
print(f"Checking file 'translated/{os.path.basename(filename)}':")

with open(filename, 'r') as js_file:
with open(filename, encoding='utf-8') as js_file:
data = js_file.readlines()

errors += check_invalid_tag(data)
Expand All @@ -81,7 +81,7 @@ def check_prohibited_tags(data):
print(f"Checking file 'dynamic/{os.path.basename(filename)}':")

# Check that the 'dynamic' files do not contains any translated strings
with open(filename, 'r') as js_file:
with open(filename, encoding='utf-8') as js_file:
data = js_file.readlines()

invalid_tags = ['blocktrans', 'blocktranslate', 'trans', 'translate']
Expand Down
4 changes: 2 additions & 2 deletions .github/scripts/check_migration_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
if len(migrations) == 0:
sys.exit(0)

print('There are {n} unstaged migration files:'.format(n=len(migrations)))
print(f'There are {len(migrations)} unstaged migration files:')

for m in migrations:
print(' - {m}'.format(m=m))
print(f' - {m}')

sys.exit(len(migrations))
11 changes: 4 additions & 7 deletions .github/scripts/version_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def check_version_number(version_string, allow_duplicate=False):

if release > version_tuple:
highest_release = False
print(f'Found newer release: {str(release)}')
print(f'Found newer release: {release!s}')

return highest_release

Expand Down Expand Up @@ -134,7 +134,7 @@ def check_version_number(version_string, allow_duplicate=False):

version = None

with open(version_file, 'r') as f:
with open(version_file, encoding='utf-8') as f:
text = f.read()

# Extract the InvenTree software version
Expand Down Expand Up @@ -175,10 +175,7 @@ def check_version_number(version_string, allow_duplicate=False):
print(f"Version number '{version}' does not match tag '{version_tag}'")
sys.exit

if highest_release:
docker_tags = [version_tag, 'stable']
else:
docker_tags = [version_tag]
docker_tags = [version_tag, 'stable'] if highest_release else [version_tag]

elif GITHUB_REF_TYPE == 'branch':
# Otherwise we know we are targeting the 'master' branch
Expand All @@ -202,7 +199,7 @@ def check_version_number(version_string, allow_duplicate=False):
target_repos = [REPO.lower(), f'ghcr.io/{REPO.lower()}']

# Ref: https://getridbug.com/python/how-to-set-environment-variables-in-github-actions-using-python/
with open(os.getenv('GITHUB_ENV'), 'a') as env_file:
with open(os.getenv('GITHUB_ENV'), 'a', encoding='utf-8') as env_file:
# Construct tag string
tag_list = [[f'{r}:{t}' for t in docker_tags] for r in target_repos]
tags = ','.join(itertools.chain(*tag_list))
Expand Down
7 changes: 4 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,18 @@ repos:
- id: check-yaml
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.1
rev: v0.6.1
hooks:
- id: ruff-format
args: [--preview]
- id: ruff
args: [
--fix,
# --unsafe-fixes,
--preview
]
- repo: https://github.com/astral-sh/uv-pre-commit
rev: 0.2.13
rev: 0.2.37
hooks:
- id: pip-compile
name: pip-compile requirements-dev.in
Expand Down Expand Up @@ -77,7 +78,7 @@ repos:
- "prettier@^2.4.1"
- "@trivago/prettier-plugin-sort-imports"
- repo: https://github.com/pre-commit/mirrors-eslint
rev: "v9.6.0"
rev: "v9.9.0"
hooks:
- id: eslint
additional_dependencies:
Expand Down
2 changes: 1 addition & 1 deletion docs/ci/check_mkdocs_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

config_file = os.path.join(tld, 'mkdocs.yml')

with open(config_file, 'r') as f:
with open(config_file, encoding='utf-8') as f:
data = yaml.load(f, yaml.BaseLoader)

assert data['strict'] == 'true'
12 changes: 6 additions & 6 deletions docs/docs/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def make_request(url, headers):
versions = sorted(versions, key=lambda x: StrictVersion(x['version']), reverse=True)

# Add "latest" version first
if not any((x['title'] == 'latest' for x in versions)):
if not any(x['title'] == 'latest' for x in versions):
versions.insert(
0,
{
Expand All @@ -70,7 +70,7 @@ def make_request(url, headers):
# Ensure we have the 'latest' version
current_version = os.environ.get('READTHEDOCS_VERSION', None)

if current_version and not any((x['title'] == current_version for x in versions)):
if current_version and not any(x['title'] == current_version for x in versions):
versions.append({
'version': current_version,
'title': current_version,
Expand All @@ -82,7 +82,7 @@ def make_request(url, headers):
print('Discovered the following versions:')
print(versions)

with open(output_filename, 'w') as file:
with open(output_filename, 'w', encoding='utf-8') as file:
json.dump(versions, file, indent=2)


Expand All @@ -100,7 +100,7 @@ def get_release_data():
# Release information has been cached to file

print("Loading release information from 'releases.json'")
with open(json_file) as f:
with open(json_file, encoding='utf-8') as f:
return json.loads(f.read())

# Download release information via the GitHub API
Expand All @@ -127,7 +127,7 @@ def get_release_data():
page += 1

# Cache these results to file
with open(json_file, 'w') as f:
with open(json_file, 'w', encoding='utf-8') as f:
print("Saving release information to 'releases.json'")
f.write(json.dumps(releases))

Expand Down Expand Up @@ -173,7 +173,7 @@ def on_config(config, *args, **kwargs):
# Add *all* readthedocs related keys
readthedocs = {}

for key in os.environ.keys():
for key in os.environ:
if key.startswith('READTHEDOCS_'):
k = key.replace('READTHEDOCS_', '').lower()
readthedocs[k] = os.environ[key]
Expand Down
14 changes: 6 additions & 8 deletions docs/extract_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,17 +46,15 @@ def top_level_path(path: str) -> str:

key = path.split('/')[1]

if key in SPECIAL_PATHS.keys():
if key in SPECIAL_PATHS:
return key

return GENERAL_PATH


def generate_schema_file(key: str) -> None:
"""Generate a schema file for the provided key."""
description = (
SPECIAL_PATHS[key] if key in SPECIAL_PATHS else 'General API Endpoints'
)
description = SPECIAL_PATHS.get(key, 'General API Endpoints')

output = f"""
---
Expand All @@ -75,7 +73,7 @@ def generate_schema_file(key: str) -> None:

print('Writing schema file to:', output_file)

with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
f.write(output)


Expand Down Expand Up @@ -121,7 +119,7 @@ def generate_index_file(version: str):

print('Writing index file to:', output_file)

with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
f.write(output)


Expand Down Expand Up @@ -173,7 +171,7 @@ def parse_api_file(filename: str):
The intent is to make the API schema easier to peruse on the documentation.
"""
with open(filename, 'r') as f:
with open(filename, encoding='utf-8') as f:
data = yaml.safe_load(f)

paths = data['paths']
Expand Down Expand Up @@ -213,7 +211,7 @@ def parse_api_file(filename: str):

output_file = os.path.abspath(output_file)

with open(output_file, 'w') as f:
with open(output_file, 'w', encoding='utf-8') as f:
yaml.dump(output, f)

# Generate a markdown file for the schema
Expand Down
23 changes: 12 additions & 11 deletions docs/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
here = os.path.dirname(__file__)
settings_file = os.path.join(here, 'inventree_settings.json')

with open(settings_file, 'r') as sf:
with open(settings_file, encoding='utf-8') as sf:
settings = json.load(sf)

GLOBAL_SETTINGS = settings['global']
Expand All @@ -27,7 +27,7 @@ def get_repo_url(raw=False):
"""Return the repository URL for the current project."""
mkdocs_yml = os.path.join(os.path.dirname(__file__), 'mkdocs.yml')

with open(mkdocs_yml, 'r') as f:
with open(mkdocs_yml, encoding='utf-8') as f:
mkdocs_config = yaml.safe_load(f)
repo_name = mkdocs_config['repo_name']

Expand All @@ -47,7 +47,7 @@ def check_link(url) -> bool:

# Keep a local cache file of URLs we have already checked
if os.path.exists(CACHE_FILE):
with open(CACHE_FILE, 'r') as f:
with open(CACHE_FILE, encoding='utf-8') as f:
cache = f.read().splitlines()

if url in cache:
Expand All @@ -59,7 +59,7 @@ def check_link(url) -> bool:
response = requests.head(url, timeout=5000)
if response.status_code == 200:
# Update the cache file
with open(CACHE_FILE, 'a') as f:
with open(CACHE_FILE, 'a', encoding='utf-8') as f:
f.write(f'{url}\n')

return True
Expand Down Expand Up @@ -177,7 +177,7 @@ def invoke_commands():

assert subprocess.call(command, shell=True) == 0

with open(output, 'r') as f:
with open(output, encoding='utf-8') as f:
content = f.read()

return content
Expand All @@ -200,12 +200,13 @@ def listimages(subdir):
return assets

@env.macro
def includefile(filename: str, title: str, format: str = ''):
def includefile(filename: str, title: str, fmt: str = ''):
"""Include a file in the documentation, in a 'collapse' block.
Arguments:
- filename: The name of the file to include (relative to the top-level directory)
- title:
- fmt:
"""
here = os.path.dirname(__file__)
path = os.path.join(here, '..', filename)
Expand All @@ -214,11 +215,11 @@ def includefile(filename: str, title: str, format: str = ''):
if not os.path.exists(path):
raise FileNotFoundError(f'Required file {path} does not exist.')

with open(path, 'r') as f:
with open(path, encoding='utf-8') as f:
content = f.read()

data = f'??? abstract "{title}"\n\n'
data += f' ```{format}\n'
data += f' ```{fmt}\n'
data += textwrap.indent(content, ' ')
data += '\n\n'
data += ' ```\n\n'
Expand All @@ -233,15 +234,15 @@ def templatefile(filename):
'src', 'backend', 'InvenTree', 'report', 'templates', filename
)

return includefile(fn, f'Template: {base}', format='html')
return includefile(fn, f'Template: {base}', fmt='html')

@env.macro
def rendersetting(setting: dict):
"""Render a provided setting object into a table row."""
name = setting['name']
description = setting['description']
default = setting.get('default', None)
units = setting.get('units', None)
default = setting.get('default')
units = setting.get('units')

return f'| {name} | {description} | {default if default is not None else ""} | {units if units is not None else ""} |'

Expand Down
21 changes: 19 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,30 @@ src = ["src/backend/InvenTree"]
"__init__.py" = ["D104"]

[tool.ruff.lint]
select = ["A", "B", "C4", "D", "I", "N", "F"]
select = ["A", "B", "C", "C4", "D", "F", "I", "N", "SIM", "PIE", "PLE", "PLW", "RUF", "UP", "W"]
# Things that should be enabled in the future:
# - LOG
# - DJ # for Django stuff
# - S # for security stuff (bandit)

ignore = [
"PLE1205",
# - PLE1205 - Too many arguments for logging format string
"PLW2901",
# - PLW2901 - Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"PLW0602","PLW0603","PLW0604", # global variable things
"RUF015",
# - RUF015 - Prefer next({iterable}) over single element slice
"RUF012",
# - RUF012 - Mutable class attributes should be annotated with typing.ClassVar
"SIM117",
# - SIM117 - Use a single with statement with multiple contexts instead of nested with statements
"SIM102",
# - SIM102 - Use a single if statement instead of nested if statements
"SIM105",
# - SIM105 - Use contextlib.suppress({exception}) instead of try-except-pass
"C901",
# - C901 - function is too complex
"N999",
# - N802 - function name should be lowercase
"N802",
Expand All @@ -42,7 +59,7 @@ ignore = [
"B904",

# Remove fast
"A001", "A002","A003","B018"
"A002", "B018"
]

[tool.ruff.lint.pydocstyle]
Expand Down
12 changes: 7 additions & 5 deletions src/backend/InvenTree/InvenTree/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,14 +104,16 @@ def before_import(self, dataset, using_transactions, dry_run, **kwargs):
attribute = getattr(field, 'attribute', field_name)

# Check if the associated database field is a non-nullable string
if db_field := db_fields.get(attribute):
if (
if (
(db_field := db_fields.get(attribute))
and (
isinstance(db_field, CharField)
and db_field.blank
and not db_field.null
):
if column not in self.CONVERT_NULL_FIELDS:
self.CONVERT_NULL_FIELDS.append(column)
)
and column not in self.CONVERT_NULL_FIELDS
):
self.CONVERT_NULL_FIELDS.append(column)

return super().before_import(dataset, using_transactions, dry_run, **kwargs)

Expand Down
Loading

0 comments on commit 1634258

Please sign in to comment.