Skip to content

Commit

Permalink
Replace black with ruff-format (#5052)
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard authored Nov 18, 2023
1 parent eb5ecc0 commit 3f1cecb
Show file tree
Hide file tree
Showing 28 changed files with 163 additions and 202 deletions.
15 changes: 3 additions & 12 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -48,28 +48,19 @@ repos:
files: \.py$
args: [--license-filepath, .github/disclaimer.txt, --no-extra-eol]
exclude: ^conda_build/version.py
- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
# upgrade standard Python codes
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/psf/black
rev: 23.11.0
hooks:
# auto format Python codes
- id: black
- repo: https://github.com/asottile/blacken-docs
rev: 1.16.0
hooks:
# auto format Python codes within docstrings
- id: blacken-docs
additional_dependencies: [black]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.5
hooks:
# lint & attempt to correct failures (e.g. pyupgrade)
- id: ruff
args: [--fix]
# compatible replacement for black
- id: ruff-format
- repo: meta
# see https://pre-commit.com/#meta-hooks
hooks:
Expand Down
6 changes: 3 additions & 3 deletions conda_build/_load_setup_py_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,18 +140,18 @@ def setup(**kw):
parser.add_argument("setup_file", help="path or filename of setup.py file")
parser.add_argument(
"--from-recipe-dir",
help=("look for setup.py file in recipe " "dir (as opposed to work dir)"),
help="look for setup.py file in recipe dir (as opposed to work dir)",
default=False,
action="store_true",
)
parser.add_argument(
"--recipe-dir",
help=("(optional) path to recipe dir, where " "setup.py should be found"),
help="(optional) path to recipe dir, where setup.py should be found",
)

parser.add_argument(
"--permit-undefined-jinja",
help=("look for setup.py file in recipe " "dir (as opposed to work dir)"),
help="look for setup.py file in recipe dir (as opposed to work dir)",
default=False,
action="store_true",
)
Expand Down
13 changes: 7 additions & 6 deletions conda_build/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,7 @@ def convert(
)
elif package_file.endswith(".whl"):
raise RuntimeError(
"Conversion from wheel packages is not " "implemented yet, stay tuned."
"Conversion from wheel packages is not implemented yet, stay tuned."
)
else:
raise RuntimeError("cannot convert: %s" % package_file)
Expand Down Expand Up @@ -656,17 +656,18 @@ def debug(
]
if len(matched_outputs) > 1:
raise ValueError(
"Specified --output-id matches more than one output ({}). Please refine your output id so that only "
"a single output is found.".format(matched_outputs)
f"Specified --output-id matches more than one output ({matched_outputs}). "
"Please refine your output id so that only a single output is found."
)
elif not matched_outputs:
raise ValueError(
f"Specified --output-id did not match any outputs. Available outputs are: {outputs} Please check it and try again"
f"Specified --output-id did not match any outputs. Available outputs are: {outputs} "
"Please check it and try again"
)
if len(matched_outputs) > 1 and not path_is_build_dir:
raise ValueError(
"More than one output found for this recipe ({}). Please use the --output-id argument to filter down "
"to a single output.".format(outputs)
f"More than one output found for this recipe ({outputs}). "
"Please use the --output-id argument to filter down to a single output."
)
else:
matched_outputs = outputs
Expand Down
26 changes: 15 additions & 11 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,12 @@ def regex_files_py(
match_records[file] = {"type": type, "submatches": []}
# else:
# if match_records[file]['absolute_offset'] != absolute_offset:
# print("Dropping match.pos() of {}, neq {}".format(absolute_offset, match_records[file]['absolute_offset']))
# print(
# "Dropping match.pos() of {}, neq {}".format(
# absolute_offset,
# match_records[file]['absolute_offset'],
# )
# )
g_index = len(match.groups())
if g_index == 0:
# Complete match.
Expand Down Expand Up @@ -636,8 +641,9 @@ def have_regex_files(
return match_records
import copy

match_records_rg, match_records_re = copy.deepcopy(match_records), copy.deepcopy(
match_records
match_records_rg, match_records_re = (
copy.deepcopy(match_records),
copy.deepcopy(match_records),
)
if not isinstance(regex_re, (bytes, bytearray)):
regex_re = regex_re.encode("utf-8")
Expand Down Expand Up @@ -2254,7 +2260,9 @@ def _write_sh_activation_text(file_handle, m):
if value:
if not done_necessary_env:
# file_handle.write(
# 'export CCACHE_SLOPPINESS="pch_defines,time_macros${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n')
# 'export CCACHE_SLOPPINESS="pch_defines,time_macros'
# '${CCACHE_SLOPPINESS+,$CCACHE_SLOPPINESS}"\n'
# )
# file_handle.write('export CCACHE_CPP2=true\n')
done_necessary_env = True
if method == "symlinks":
Expand All @@ -2263,16 +2271,12 @@ def _write_sh_activation_text(file_handle, m):
file_handle.write(f"pushd {dirname_ccache_ln_bin}\n")
file_handle.write('if [ -n "$CC" ]; then\n')
file_handle.write(
" [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n".format(
ccache=ccache
)
f" [ -f {ccache} ] && [ ! -f $(basename $CC) ] && ln -s {ccache} $(basename $CC) || true\n"
)
file_handle.write("fi\n")
file_handle.write('if [ -n "$CXX" ]; then\n')
file_handle.write(
" [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n".format(
ccache=ccache
)
f" [ -f {ccache} ] && [ ! -f $(basename $CXX) ] && ln -s {ccache} $(basename $CXX) || true\n"
)
file_handle.write("fi\n")
file_handle.write("popd\n")
Expand Down Expand Up @@ -4084,7 +4088,7 @@ def handle_anaconda_upload(paths, config):
prompter = "$ "
if not upload or anaconda is None:
no_upload_message = (
"# If you want to upload package(s) to anaconda.org later, type:\n" "\n"
"# If you want to upload package(s) to anaconda.org later, type:\n\n"
)
no_upload_message += (
"\n"
Expand Down
2 changes: 1 addition & 1 deletion conda_build/cli/main_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,7 +441,7 @@ def parse_args(args):
)
p.add_argument(
"--stats-file",
help=("File path to save build statistics to. Stats are " "in JSON format"),
help="File path to save build statistics to. Stats are in JSON format",
)
p.add_argument(
"--extra-deps",
Expand Down
2 changes: 1 addition & 1 deletion conda_build/cli/main_develop.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def parse_args(args):
Install a Python package in 'development mode'.
This works by creating a conda.pth file in site-packages."""
This works by creating a conda.pth file in site-packages.""",
# TODO: Use setup.py to determine any entry-points to install.
)

Expand Down
2 changes: 1 addition & 1 deletion conda_build/cli/main_render.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def get_render_parser():
p.add_argument(
"--output",
action="store_true",
help="Output the conda package filename which would have been " "created",
help="Output the conda package filename which would have been created",
)
p.add_argument(
"--python",
Expand Down
4 changes: 2 additions & 2 deletions conda_build/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -868,11 +868,11 @@ def clean(self, remove_folders=True):
rm_rf(os.path.join(self.build_folder, "prefix_files"))
else:
print(
"\nLeaving build/test directories:" "\n Work:\n",
"\nLeaving build/test directories:\n Work:\n",
self.work_dir,
"\n Test:\n",
self.test_dir,
"\nLeaving build/test environments:" "\n Test:\nsource activate ",
"\nLeaving build/test environments:\n Test:\nsource activate ",
self.test_prefix,
"\n Build:\nsource activate ",
self.build_prefix,
Expand Down
3 changes: 1 addition & 2 deletions conda_build/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,8 +88,7 @@ def __init__(
self.packages.append(pkg)
if not self.packages:
raise RuntimeError(
"failed to parse packages from exception:"
" {}".format(str(conda_exception))
f"failed to parse packages from exception: {conda_exception}"
)

def __str__(self):
Expand Down
3 changes: 2 additions & 1 deletion conda_build/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,8 @@ def update_index(
if dirname in utils.DEFAULT_SUBDIRS:
if warn:
log.warn(
"The update_index function has changed to index all subdirs at once. You're pointing it at a single subdir. "
"The update_index function has changed to index all subdirs at once. "
"You're pointing it at a single subdir. "
"Please update your code to point it at the channel root, rather than a subdir."
)
return update_index(
Expand Down
14 changes: 2 additions & 12 deletions conda_build/jinja_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,18 +97,8 @@ def __init__(
__call__
) = (
__getitem__
) = (
__lt__
) = (
__le__
) = (
__gt__
) = (
__ge__
) = (
__complex__
) = __pow__ = __rpow__ = lambda self, *args, **kwargs: self._return_undefined(
self._undefined_name
) = __lt__ = __le__ = __gt__ = __ge__ = __complex__ = __pow__ = __rpow__ = (
lambda self, *args, **kwargs: self._return_undefined(self._undefined_name)
)

# Accessing an attribute of an Undefined variable
Expand Down
5 changes: 3 additions & 2 deletions conda_build/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ def ensure_valid_fields(meta):
pin_depends = meta.get("build", {}).get("pin_depends", "")
if pin_depends and pin_depends not in ("", "record", "strict"):
raise RuntimeError(
"build/pin_depends must be 'record' or 'strict' - " "not '%s'" % pin_depends
f"build/pin_depends must be 'record' or 'strict' - not '{pin_depends}'"
)


Expand Down Expand Up @@ -1470,7 +1470,8 @@ def get_depends_top_and_out(self, typ):
meta_requirements = ensure_list(self.get_value("requirements/" + typ, []))[:]
req_names = {req.split()[0] for req in meta_requirements if req}
extra_reqs = []
# this is for the edge case of requirements for top-level being also partially defined in a similarly named output
# this is for the edge case of requirements for top-level being
# partially defined in a similarly named output
if not self.is_output:
matching_output = [
out
Expand Down
6 changes: 1 addition & 5 deletions conda_build/noarch_python.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,11 +142,7 @@ def transform(m, files, prefix):
"""\
@echo off
"%PREFIX%\\python.exe" "%SOURCE_DIR%\\link.py"
""".replace(
"\n", "\r\n"
).encode(
"utf-8"
)
""".replace("\n", "\r\n").encode("utf-8")
)

d = populate_files(m, files, prefix)
Expand Down
56 changes: 25 additions & 31 deletions conda_build/os_utils/liefldd.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,8 +588,14 @@ def inspect_linkages_lief(
"""
if binary.format == lief.EXE_FORMATS.PE:
import random
path_fixed = os.path.dirname(path_fixed) + os.sep + \
''.join(random.choice((str.upper, str.lower))(c) for c in os.path.basename(path_fixed))
path_fixed = (
os.path.dirname(path_fixed)
+ os.sep
+ ''.join(
random.choice((str.upper, str.lower))(c)
for c in os.path.basename(path_fixed)
)
)
if random.getrandbits(1):
path_fixed = path_fixed.replace(os.sep + 'lib' + os.sep, os.sep + 'Lib' + os.sep)
else:
Expand Down Expand Up @@ -650,16 +656,11 @@ def get_linkages(
)
if debug and result_pyldd and set(result_lief) != set(result_pyldd):
print(
"WARNING: Disagreement in get_linkages(filename={}, resolve_filenames={}, recurse={}, sysroot={}, envroot={}, arch={}):\n lief: {}\npyldd: {}\n (using lief)".format(
filename,
resolve_filenames,
recurse,
sysroot,
envroot,
arch,
result_lief,
result_pyldd,
)
f"WARNING: Disagreement in get_linkages({filename=}, "
f"{resolve_filenames=}, {recurse=}, {sysroot=}, {envroot=}, {arch=}):\n"
f" lief: {result_lief}\n"
f"pyldd: {result_pyldd}\n"
" (using lief)"
)
return result_lief

Expand Down Expand Up @@ -689,7 +690,7 @@ def is_archive(file):


def get_static_lib_exports(file):
# file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a'
# file = '/Users/rdonnelly/conda/main-augmented-tmp/osx-64_14354bd0cd1882bc620336d9a69ae5b9/lib/python2.7/config/libpython2.7.a' # noqa: E501
# References:
# https://github.com/bminor/binutils-gdb/tree/master/bfd/archive.c
# https://en.wikipedia.org/wiki/Ar_(Unix)
Expand Down Expand Up @@ -737,7 +738,8 @@ def _parse_ar_hdr(content, index):
typ = "NORMAL"
if b"/" in name:
name = name[: name.find(b"/")]
# if debug_static_archives: print("index={}, name={}, ending={}, size={}, type={}".format(index, name, ending, size, typ))
# if debug_static_archives:
# print(f"index={index}, name={name}, ending={ending}, size={size}, type={typ}")
index += header_sz + name_len
return index, name, name_len, size, typ

Expand Down Expand Up @@ -813,9 +815,7 @@ def _parse_ar_hdr(content, index):
(size_string_table,) = struct.unpack(
"<" + toc_integers_fmt,
content[
index
+ toc_integers_sz
+ (nsymbols * ranlib_struct_sz) : index
index + toc_integers_sz + (nsymbols * ranlib_struct_sz) : index
+ 4
+ 4
+ (nsymbols * ranlib_struct_sz)
Expand All @@ -827,8 +827,7 @@ def _parse_ar_hdr(content, index):
ran_off, ran_strx = struct.unpack(
"<" + ranlib_struct_field_fmt + ranlib_struct_field_fmt,
content[
ranlib_index
+ (i * ranlib_struct_sz) : ranlib_index
ranlib_index + (i * ranlib_struct_sz) : ranlib_index
+ ((i + 1) * ranlib_struct_sz)
],
)
Expand All @@ -845,8 +844,7 @@ def _parse_ar_hdr(content, index):
)
)
string_table = content[
ranlib_index
+ (nsymbols * ranlib_struct_sz) : ranlib_index
ranlib_index + (nsymbols * ranlib_struct_sz) : ranlib_index
+ (nsymbols * ranlib_struct_sz)
+ size_string_table
]
Expand Down Expand Up @@ -958,7 +956,7 @@ def get_static_lib_exports_dumpbin(filename):
> 020 00000000 UNDEF notype () External | malloc
> vs
> 004 00000010 SECT1 notype () External | _ZN3gnu11autosprintfC1EPKcz
"""
""" # noqa: E501
dumpbin_exe = find_executable("dumpbin")
if not dumpbin_exe:
"""
Expand Down Expand Up @@ -1077,19 +1075,15 @@ def get_exports(filename, arch="native", enable_static=False):
print(f"errors: {error_count} (-{len(diff1)}, +{len(diff2)})")
if debug_static_archives:
print(
"WARNING :: Disagreement regarding static lib exports in {} between nm (nsyms={}) and lielfldd (nsyms={}):".format(
filename, len(exports), len(exports2)
)
"WARNING :: Disagreement regarding static lib exports in "
f"{filename} between nm (nsyms={len(exports)}) and "
"lielfldd (nsyms={len(exports2)}):"
)
print(
"** nm.diff(liefldd) [MISSING SYMBOLS] **\n{}".format(
"\n".join(diff1)
)
"\n".join(("** nm.diff(liefldd) [MISSING SYMBOLS] **", *diff1))
)
print(
"** liefldd.diff(nm) [ EXTRA SYMBOLS] **\n{}".format(
"\n".join(diff2)
)
"\n".join(("** liefldd.diff(nm) [ EXTRA SYMBOLS] **", *diff2))
)

if not result:
Expand Down
Loading

0 comments on commit 3f1cecb

Please sign in to comment.