Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PyLong_Export with PyLong_AsLongAndOverflow #6

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add capability to ignore entire files or directories in check warning CI tool
106 changes: 46 additions & 60 deletions Objects/longobject.c
Original file line number Diff line number Diff line change
Expand Up @@ -1117,18 +1117,53 @@ _resolve_endianness(int *endianness)
return 0;
}


static Py_ssize_t
long_asnativebytes(PyLongObject *v, void* buffer, Py_ssize_t n,
int little_endian, int unsigned_buffer)
Py_ssize_t
PyLong_AsNativeBytes(PyObject* vv, void* buffer, Py_ssize_t n, int flags)
{
Py_ssize_t res = 0;
PyLongObject *v;
union {
Py_ssize_t v;
unsigned char b[sizeof(Py_ssize_t)];
} cv;
int do_decref = 0;
Py_ssize_t res = 0;

if (vv == NULL || n < 0) {
PyErr_BadInternalCall();
return -1;
}

int little_endian = flags;
if (_resolve_endianness(&little_endian) < 0) {
return -1;
}

if (PyLong_Check(vv)) {
v = (PyLongObject *)vv;
}
else if (flags != -1 && (flags & Py_ASNATIVEBYTES_ALLOW_INDEX)) {
v = (PyLongObject *)_PyNumber_Index(vv);
if (v == NULL) {
return -1;
}
do_decref = 1;
}
else {
PyErr_Format(PyExc_TypeError, "expect int, got %T", vv);
return -1;
}

if ((flags != -1 && (flags & Py_ASNATIVEBYTES_REJECT_NEGATIVE))
&& _PyLong_IsNegative(v)) {
PyErr_SetString(PyExc_ValueError, "Cannot convert negative int");
if (do_decref) {
Py_DECREF(v);
}
return -1;
}

if (_PyLong_IsCompact(v)) {
res = 0;
cv.v = _PyLong_CompactValue(v);
/* Most paths result in res = sizeof(compact value). Only the case
* where 0 < n < sizeof(compact value) do we need to check and adjust
Expand Down Expand Up @@ -1165,7 +1200,7 @@ long_asnativebytes(PyLongObject *v, void* buffer, Py_ssize_t n,
/* Positive values with the MSB set do not require an
* additional bit when the caller's intent is to treat them
* as unsigned. */
if (unsigned_buffer) {
if (flags == -1 || (flags & Py_ASNATIVEBYTES_UNSIGNED_BUFFER)) {
res = n;
} else {
res = n + 1;
Expand Down Expand Up @@ -1252,7 +1287,7 @@ long_asnativebytes(PyLongObject *v, void* buffer, Py_ssize_t n,
* as unsigned. */
unsigned char *b = (unsigned char *)buffer;
if (b[little_endian ? n - 1 : 0] & 0x80) {
if (unsigned_buffer) {
if (flags == -1 || (flags & Py_ASNATIVEBYTES_UNSIGNED_BUFFER)) {
res = n;
} else {
res = n + 1;
Expand All @@ -1261,54 +1296,6 @@ long_asnativebytes(PyLongObject *v, void* buffer, Py_ssize_t n,
}
}
}
return res;
}


Py_ssize_t
PyLong_AsNativeBytes(PyObject* vv, void* buffer, Py_ssize_t n, int flags)
{
PyLongObject *v;
int do_decref = 0;
Py_ssize_t res = 0;

if (vv == NULL || n < 0) {
PyErr_BadInternalCall();
return -1;
}

int little_endian = flags;
if (_resolve_endianness(&little_endian) < 0) {
return -1;
}

if (PyLong_Check(vv)) {
v = (PyLongObject *)vv;
}
else if (flags != -1 && (flags & Py_ASNATIVEBYTES_ALLOW_INDEX)) {
v = (PyLongObject *)_PyNumber_Index(vv);
if (v == NULL) {
return -1;
}
do_decref = 1;
}
else {
PyErr_Format(PyExc_TypeError, "expect int, got %T", vv);
return -1;
}

if ((flags != -1 && (flags & Py_ASNATIVEBYTES_REJECT_NEGATIVE))
&& _PyLong_IsNegative(v)) {
PyErr_SetString(PyExc_ValueError, "Cannot convert negative int");
if (do_decref) {
Py_DECREF(v);
}
return -1;
}

int unsigned_buffer = (flags == -1
|| (flags & Py_ASNATIVEBYTES_UNSIGNED_BUFFER));
res = long_asnativebytes(v, buffer, n, little_endian, unsigned_buffer);

if (do_decref) {
Py_DECREF(v);
Expand Down Expand Up @@ -6810,20 +6797,19 @@ PyLong_Export(PyObject *obj, PyLongExport *export_long)
PyErr_Format(PyExc_TypeError, "expect int, got %T", obj);
return -1;
}
PyLongObject *self = (PyLongObject*)obj;

int64_t value;
Py_ssize_t bytes = long_asnativebytes(self, &value, sizeof(value),
PY_LITTLE_ENDIAN, 0);
int overflow;
long value = PyLong_AsLongAndOverflow(obj, &overflow);

if ((size_t)bytes <= sizeof(value)) {
if (!overflow) {
export_long->value = value;
export_long->negative = 0;
export_long->ndigits = 0;
export_long->digits = 0;
export_long->_reserved = 0;
}
else {
PyLongObject *self = (PyLongObject*)obj;
export_long->value = 0;
export_long->negative = _PyLong_IsNegative(self);
export_long->ndigits = _PyLong_DigitCount(self);
Expand Down
2 changes: 1 addition & 1 deletion Tools/build/.warningignore_macos
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
Modules/expat/siphash.h 7
Modules/expat/xmlparse.c 8
Modules/expat/xmltok.c 3
Modules/expat/xmltok_impl.c 26
Modules/expat/xmltok_impl.c 26
1 change: 0 additions & 1 deletion Tools/build/.warningignore_ubuntu
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,3 @@
# Keep lines sorted lexicographically to help avoid merge conflicts.
# Format example:
# /path/to/file (number of warnings in file)

148 changes: 103 additions & 45 deletions Tools/build/check_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,49 @@
from typing import NamedTuple


class FileWarnings(NamedTuple):
name: str
class IgnoreRule(NamedTuple):
file_path: str
count: int
ignore_all: bool = False
is_directory: bool = False


def parse_warning_ignore_file(file_path: str) -> set[IgnoreRule]:
"""
Parses the warning ignore file and returns a set of IgnoreRules
"""
files_with_expected_warnings = set()
with Path(file_path).open(encoding="UTF-8") as ignore_rules_file:
files_with_expected_warnings = set()
for i, line in enumerate(ignore_rules_file):
line = line.strip()
if line and not line.startswith("#"):
line_parts = line.split()
if len(line_parts) >= 2:
file_name = line_parts[0]
count = line_parts[1]
ignore_all = count == "*"
is_directory = file_name.endswith("/")

# Directories must have a wildcard count
if is_directory and count != "*":
print(
f"Error parsing ignore file: {file_path} at line: {i}"
)
print(
f"Directory {file_name} must have count set to *"
)
sys.exit(1)
if ignore_all:
count = 0

files_with_expected_warnings.add(
IgnoreRule(
file_name, int(count), ignore_all, is_directory
)
)

return files_with_expected_warnings


def extract_warnings_from_compiler_output(
Expand Down Expand Up @@ -48,11 +88,15 @@ def extract_warnings_from_compiler_output(
"line": match.group("line"),
"column": match.group("column"),
"message": match.group("message"),
"option": match.group("option").lstrip("[").rstrip("]"),
"option": match.group("option")
.lstrip("[")
.rstrip("]"),
}
)
except:
print(f"Error parsing compiler output. Unable to extract warning on line {i}:\n{line}")
print(
f"Error parsing compiler output. Unable to extract warning on line {i}:\n{line}"
)
sys.exit(1)

return compiler_warnings
Expand All @@ -78,9 +122,24 @@ def get_warnings_by_file(warnings: list[dict]) -> dict[str, list[dict]]:
return warnings_by_file


def is_file_ignored(
file_path: str, ignore_rules: set[IgnoreRule]
) -> IgnoreRule | None:
"""
Returns the IgnoreRule object for the file path if there is a related rule for it
"""
for rule in ignore_rules:
if rule.is_directory:
if file_path.startswith(rule.file_path):
return rule
elif file_path == rule.file_path:
return rule
return None


def get_unexpected_warnings(
files_with_expected_warnings: set[FileWarnings],
files_with_warnings: set[FileWarnings],
ignore_rules: set[IgnoreRule],
files_with_warnings: set[IgnoreRule],
) -> int:
"""
Returns failure status if warnings discovered in list of warnings
Expand All @@ -89,14 +148,21 @@ def get_unexpected_warnings(
"""
unexpected_warnings = {}
for file in files_with_warnings.keys():
found_file_in_ignore_list = False
for ignore_file in files_with_expected_warnings:
if file == ignore_file.name:
if len(files_with_warnings[file]) > ignore_file.count:
unexpected_warnings[file] = (files_with_warnings[file], ignore_file.count)
found_file_in_ignore_list = True
break
if not found_file_in_ignore_list:

rule = is_file_ignored(file, ignore_rules)

if rule:
if rule.ignore_all:
continue

if len(files_with_warnings[file]) > rule.count:
unexpected_warnings[file] = (
files_with_warnings[file],
rule.count,
)
continue
elif rule is None:
# If the file is not in the ignore list, then it is unexpected
unexpected_warnings[file] = (files_with_warnings[file], 0)

if unexpected_warnings:
Expand All @@ -115,19 +181,27 @@ def get_unexpected_warnings(


def get_unexpected_improvements(
files_with_expected_warnings: set[FileWarnings],
files_with_warnings: set[FileWarnings],
ignore_rules: set[IgnoreRule],
files_with_warnings: set[IgnoreRule],
) -> int:
"""
Returns failure status if there are no warnings in the list of warnings
for a file that is in the list of files with expected warnings
Returns failure status if the number of warnings for a file is greater
than the expected number of warnings for that file based on the ignore
rules
"""
unexpected_improvements = []
for file in files_with_expected_warnings:
if file.name not in files_with_warnings.keys():
unexpected_improvements.append((file.name, file.count, 0))
elif len(files_with_warnings[file.name]) < file.count:
unexpected_improvements.append((file.name, file.count, len(files_with_warnings[file.name])))
for rule in ignore_rules:
if not rule.ignore_all and rule.file_path not in files_with_warnings.keys():
if rule.file_path not in files_with_warnings.keys():
unexpected_improvements.append((rule.file_path, rule.count, 0))
elif len(files_with_warnings[rule.file_path]) < rule.count:
unexpected_improvements.append(
(
rule.file_path,
rule.count,
len(files_with_warnings[rule.file_path]),
)
)

if unexpected_improvements:
print("Unexpected improvements:")
Expand Down Expand Up @@ -202,55 +276,39 @@ def main(argv: list[str] | None = None) -> int:
"Warning ignore file not specified."
" Continuing without it (no warnings ignored)."
)
files_with_expected_warnings = set()
ignore_rules = set()
else:
if not Path(args.warning_ignore_file_path).is_file():
print(
f"Warning ignore file does not exist:"
f" {args.warning_ignore_file_path}"
)
return 1
with Path(args.warning_ignore_file_path).open(
encoding="UTF-8"
) as clean_files:
# Files with expected warnings are stored as a set of tuples
# where the first element is the file name and the second element
# is the number of warnings expected in that file
files_with_expected_warnings = {
FileWarnings(
file.strip().split()[0], int(file.strip().split()[1])
)
for file in clean_files
if file.strip() and not file.startswith("#")
}
ignore_rules = parse_warning_ignore_file(args.warning_ignore_file_path)

with Path(args.compiler_output_file_path).open(encoding="UTF-8") as f:
compiler_output_file_contents = f.read()

warnings = extract_warnings_from_compiler_output(
compiler_output_file_contents,
args.compiler_output_type,
args.path_prefix
args.path_prefix,
)

files_with_warnings = get_warnings_by_file(warnings)

status = get_unexpected_warnings(
files_with_expected_warnings, files_with_warnings
)
status = get_unexpected_warnings(ignore_rules, files_with_warnings)
if args.fail_on_regression:
exit_code |= status

status = get_unexpected_improvements(
files_with_expected_warnings, files_with_warnings
)
status = get_unexpected_improvements(ignore_rules, files_with_warnings)
if args.fail_on_improvement:
exit_code |= status

print(
"For information about this tool and its configuration"
" visit https://devguide.python.org/development-tools/warnings/"
)
)

return exit_code

Expand Down
Loading