diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 28ebc1643bd694..ffcfbac290b726 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -140,9 +140,6 @@ jobs: run: make regen-configure - name: Build CPython run: | - # Deepfreeze will usually cause global objects to be added or removed, - # so we run it before regen-global-objects gets rum (in regen-all). - make regen-deepfreeze make -j4 regen-all make regen-stdlib-module-names - name: Check for changes @@ -261,7 +258,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1v + OPENSSL_VER: 3.0.11 PYTHONSTRICTEXTENSIONBUILD: 1 steps: - uses: actions/checkout@v4 @@ -330,7 +327,7 @@ jobs: strategy: fail-fast: false matrix: - openssl_ver: [1.1.1v, 3.0.10, 3.1.2] + openssl_ver: [1.1.1w, 3.0.11, 3.1.3] env: OPENSSL_VER: ${{ matrix.openssl_ver }} MULTISSL_DIR: ${{ github.workspace }}/multissl @@ -382,7 +379,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true' env: - OPENSSL_VER: 1.1.1v + OPENSSL_VER: 3.0.11 PYTHONSTRICTEXTENSIONBUILD: 1 steps: - uses: actions/checkout@v4 @@ -491,7 +488,7 @@ jobs: needs: check_source if: needs.check_source.outputs.run_tests == 'true' env: - OPENSSL_VER: 1.1.1v + OPENSSL_VER: 3.0.11 PYTHONSTRICTEXTENSIONBUILD: 1 ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0 steps: diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst index 86137fb38c9b93..570435d48866d3 100644 --- a/Doc/howto/urllib2.rst +++ b/Doc/howto/urllib2.rst @@ -194,11 +194,11 @@ which comes after we have a look at what happens when things go wrong. Handling Exceptions =================== -*urlopen* raises :exc:`URLError` when it cannot handle a response (though as +*urlopen* raises :exc:`~urllib.error.URLError` when it cannot handle a response (though as usual with Python APIs, built-in exceptions such as :exc:`ValueError`, :exc:`TypeError` etc. may also be raised). -:exc:`HTTPError` is the subclass of :exc:`URLError` raised in the specific case of +:exc:`~urllib.error.HTTPError` is the subclass of :exc:`~urllib.error.URLError` raised in the specific case of HTTP URLs. The exception classes are exported from the :mod:`urllib.error` module. @@ -229,12 +229,12 @@ the status code indicates that the server is unable to fulfil the request. The default handlers will handle some of these responses for you (for example, if the response is a "redirection" that requests the client fetch the document from a different URL, urllib will handle that for you). For those it can't handle, -urlopen will raise an :exc:`HTTPError`. Typical errors include '404' (page not +urlopen will raise an :exc:`~urllib.error.HTTPError`. Typical errors include '404' (page not found), '403' (request forbidden), and '401' (authentication required). See section 10 of :rfc:`2616` for a reference on all the HTTP error codes. -The :exc:`HTTPError` instance raised will have an integer 'code' attribute, which +The :exc:`~urllib.error.HTTPError` instance raised will have an integer 'code' attribute, which corresponds to the error sent by the server. Error Codes @@ -317,7 +317,7 @@ dictionary is reproduced here for convenience :: } When an error is raised the server responds by returning an HTTP error code -*and* an error page. You can use the :exc:`HTTPError` instance as a response on the +*and* an error page. You can use the :exc:`~urllib.error.HTTPError` instance as a response on the page returned. This means that as well as the code attribute, it also has read, geturl, and info, methods as returned by the ``urllib.response`` module:: @@ -338,7 +338,7 @@ geturl, and info, methods as returned by the ``urllib.response`` module:: Wrapping it Up -------------- -So if you want to be prepared for :exc:`HTTPError` *or* :exc:`URLError` there are two +So if you want to be prepared for :exc:`~urllib.error.HTTPError` *or* :exc:`~urllib.error.URLError` there are two basic approaches. I prefer the second approach. Number 1 @@ -365,7 +365,7 @@ Number 1 .. note:: The ``except HTTPError`` *must* come first, otherwise ``except URLError`` - will *also* catch an :exc:`HTTPError`. + will *also* catch an :exc:`~urllib.error.HTTPError`. Number 2 ~~~~~~~~ @@ -391,7 +391,7 @@ Number 2 info and geturl =============== -The response returned by urlopen (or the :exc:`HTTPError` instance) has two +The response returned by urlopen (or the :exc:`~urllib.error.HTTPError` instance) has two useful methods :meth:`info` and :meth:`geturl` and is defined in the module :mod:`urllib.response`.. diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst index bbac1c32b5695f..d8186b6ce75c79 100644 --- a/Doc/library/asyncio-stream.rst +++ b/Doc/library/asyncio-stream.rst @@ -157,8 +157,8 @@ and work with streams: .. versionchanged:: 3.10 Removed the *loop* parameter. - .. versionchanged:: 3.11 - Added the *ssl_shutdown_timeout* parameter. + .. versionchanged:: 3.11 + Added the *ssl_shutdown_timeout* parameter. .. coroutinefunction:: start_unix_server(client_connected_cb, path=None, \ diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst index 053bf64addb821..2db4a67d1973d5 100644 --- a/Doc/library/codecs.rst +++ b/Doc/library/codecs.rst @@ -1350,7 +1350,7 @@ encodings. +--------------------+---------+---------------------------+ | raw_unicode_escape | | Latin-1 encoding with | | | | :samp:`\\u{XXXX}` and | -| | | :samp:`\\U{XXXXXXXX}`` | +| | | :samp:`\\U{XXXXXXXX}` | | | | for other code points. | | | | Existing | | | | backslashes are not | diff --git a/Doc/library/collections.rst b/Doc/library/collections.rst index 03cb1dca8f816c..43f4ff077b40e0 100644 --- a/Doc/library/collections.rst +++ b/Doc/library/collections.rst @@ -358,7 +358,7 @@ Common patterns for working with :class:`Counter` objects:: list(c) # list unique elements set(c) # convert to a set dict(c) # convert to a regular dictionary - c.items() # convert to a list of (elem, cnt) pairs + c.items() # access the (elem, cnt) pairs Counter(dict(list_of_pairs)) # convert from a list of (elem, cnt) pairs c.most_common()[:-n-1:-1] # n least common elements +c # remove zero and negative counts diff --git a/Doc/library/copy.rst b/Doc/library/copy.rst index cc4ca034d07a00..74333b2e934814 100644 --- a/Doc/library/copy.rst +++ b/Doc/library/copy.rst @@ -87,28 +87,40 @@ pickle functions from the :mod:`copyreg` module. single: __copy__() (copy protocol) single: __deepcopy__() (copy protocol) +.. currentmodule:: None + In order for a class to define its own copy implementation, it can define -special methods :meth:`__copy__` and :meth:`__deepcopy__`. The former is called -to implement the shallow copy operation; no additional arguments are passed. -The latter is called to implement the deep copy operation; it is passed one -argument, the ``memo`` dictionary. If the :meth:`__deepcopy__` implementation needs -to make a deep copy of a component, it should call the :func:`deepcopy` function -with the component as first argument and the memo dictionary as second argument. -The memo dictionary should be treated as an opaque object. +special methods :meth:`~object.__copy__` and :meth:`~object.__deepcopy__`. + +.. method:: object.__copy__(self) + :noindexentry: + + Called to implement the shallow copy operation; + no additional arguments are passed. + +.. method:: object.__deepcopy__(self, memo) + :noindexentry: + + Called to implement the deep copy operation; it is passed one + argument, the *memo* dictionary. If the ``__deepcopy__`` implementation needs + to make a deep copy of a component, it should call the :func:`~copy.deepcopy` function + with the component as first argument and the *memo* dictionary as second argument. + The *memo* dictionary should be treated as an opaque object. .. index:: single: __replace__() (replace protocol) -Function :func:`replace` is more limited than :func:`copy` and :func:`deepcopy`, +Function :func:`!copy.replace` is more limited +than :func:`~copy.copy` and :func:`~copy.deepcopy`, and only supports named tuples created by :func:`~collections.namedtuple`, -:mod:`dataclasses`, and other classes which define method :meth:`!__replace__`. +:mod:`dataclasses`, and other classes which define method :meth:`~object.__replace__`. - .. method:: __replace__(self, /, **changes) - :noindex: +.. method:: object.__replace__(self, /, **changes) + :noindexentry: -:meth:`!__replace__` should create a new object of the same type, -replacing fields with values from *changes*. + This method should create a new object of the same type, + replacing fields with values from *changes*. .. seealso:: diff --git a/Doc/library/devmode.rst b/Doc/library/devmode.rst index 914aa45cf9cbc3..5b8a9bd1908456 100644 --- a/Doc/library/devmode.rst +++ b/Doc/library/devmode.rst @@ -59,8 +59,9 @@ Effects of the Python Development Mode: ``default``. * Call :func:`faulthandler.enable` at Python startup to install handlers for - the :const:`SIGSEGV`, :const:`SIGFPE`, :const:`SIGABRT`, :const:`SIGBUS` and - :const:`SIGILL` signals to dump the Python traceback on a crash. + the :const:`~signal.SIGSEGV`, :const:`~signal.SIGFPE`, + :const:`~signal.SIGABRT`, :const:`~signal.SIGBUS` and + :const:`~signal.SIGILL` signals to dump the Python traceback on a crash. It behaves as if the :option:`-X faulthandler <-X>` command line option is used or if the :envvar:`PYTHONFAULTHANDLER` environment variable is set to diff --git a/Doc/library/difflib.rst b/Doc/library/difflib.rst index 5ee1f4a02c6816..c553611401d018 100644 --- a/Doc/library/difflib.rst +++ b/Doc/library/difflib.rst @@ -570,8 +570,8 @@ The :class:`SequenceMatcher` class has this constructor: The three methods that return the ratio of matching to total characters can give different results due to differing levels of approximation, although -:meth:`quick_ratio` and :meth:`real_quick_ratio` are always at least as large as -:meth:`ratio`: +:meth:`~SequenceMatcher.quick_ratio` and :meth:`~SequenceMatcher.real_quick_ratio` +are always at least as large as :meth:`~SequenceMatcher.ratio`: >>> s = SequenceMatcher(None, "abcd", "bcde") >>> s.ratio() @@ -593,15 +593,15 @@ This example compares two strings, considering blanks to be "junk": ... "private Thread currentThread;", ... "private volatile Thread currentThread;") -:meth:`ratio` returns a float in [0, 1], measuring the similarity of the -sequences. As a rule of thumb, a :meth:`ratio` value over 0.6 means the +:meth:`~SequenceMatcher.ratio` returns a float in [0, 1], measuring the similarity of the +sequences. As a rule of thumb, a :meth:`~SequenceMatcher.ratio` value over 0.6 means the sequences are close matches: >>> print(round(s.ratio(), 3)) 0.866 If you're only interested in where the sequences match, -:meth:`get_matching_blocks` is handy: +:meth:`~SequenceMatcher.get_matching_blocks` is handy: >>> for block in s.get_matching_blocks(): ... print("a[%d] and b[%d] match for %d elements" % block) @@ -609,12 +609,12 @@ If you're only interested in where the sequences match, a[8] and b[17] match for 21 elements a[29] and b[38] match for 0 elements -Note that the last tuple returned by :meth:`get_matching_blocks` is always a -dummy, ``(len(a), len(b), 0)``, and this is the only case in which the last +Note that the last tuple returned by :meth:`~SequenceMatcher.get_matching_blocks` +is always a dummy, ``(len(a), len(b), 0)``, and this is the only case in which the last tuple element (number of elements matched) is ``0``. If you want to know how to change the first sequence into the second, use -:meth:`get_opcodes`: +:meth:`~SequenceMatcher.get_opcodes`: >>> for opcode in s.get_opcodes(): ... print("%6s a[%d:%d] b[%d:%d]" % opcode) @@ -689,7 +689,7 @@ Differ Example This example compares two texts. First we set up the texts, sequences of individual single-line strings ending with newlines (such sequences can also be -obtained from the :meth:`~io.BaseIO.readlines` method of file-like objects): +obtained from the :meth:`~io.IOBase.readlines` method of file-like objects): >>> text1 = ''' 1. Beautiful is better than ugly. ... 2. Explicit is better than implicit. diff --git a/Doc/library/gettext.rst b/Doc/library/gettext.rst index 88a65b980d310f..7ebe91b372d35a 100644 --- a/Doc/library/gettext.rst +++ b/Doc/library/gettext.rst @@ -58,7 +58,7 @@ class-based API instead. Return the localized translation of *message*, based on the current global domain, language, and locale directory. This function is usually aliased as - :func:`_` in the local namespace (see examples below). + :func:`!_` in the local namespace (see examples below). .. function:: dgettext(domain, message) @@ -98,7 +98,7 @@ class-based API instead. .. versionadded:: 3.8 -Note that GNU :program:`gettext` also defines a :func:`dcgettext` method, but +Note that GNU :program:`gettext` also defines a :func:`!dcgettext` method, but this was deemed not useful and so it is currently unimplemented. Here's an example of typical usage for this API:: @@ -119,7 +119,7 @@ greater convenience than the GNU :program:`gettext` API. It is the recommended way of localizing your Python applications and modules. :mod:`!gettext` defines a :class:`GNUTranslations` class which implements the parsing of GNU :file:`.mo` format files, and has methods for returning strings. Instances of this class can also -install themselves in the built-in namespace as the function :func:`_`. +install themselves in the built-in namespace as the function :func:`!_`. .. function:: find(domain, localedir=None, languages=None, all=False) @@ -150,15 +150,12 @@ install themselves in the built-in namespace as the function :func:`_`. .. function:: translation(domain, localedir=None, languages=None, class_=None, fallback=False) - Return a :class:`*Translations` instance based on the *domain*, *localedir*, + Return a ``*Translations`` instance based on the *domain*, *localedir*, and *languages*, which are first passed to :func:`find` to get a list of the associated :file:`.mo` file paths. Instances with identical :file:`.mo` file names are cached. The actual class instantiated is *class_* if provided, otherwise :class:`GNUTranslations`. The class's constructor must - take a single :term:`file object` argument. If provided, *codeset* will change - the charset used to encode translated strings in the - :meth:`~NullTranslations.lgettext` and :meth:`~NullTranslations.lngettext` - methods. + take a single :term:`file object` argument. If multiple files are found, later files are used as fallbacks for earlier ones. To allow setting the fallback, :func:`copy.copy` is used to clone each @@ -177,19 +174,19 @@ install themselves in the built-in namespace as the function :func:`_`. .. function:: install(domain, localedir=None, *, names=None) - This installs the function :func:`_` in Python's builtins namespace, based on + This installs the function :func:`!_` in Python's builtins namespace, based on *domain* and *localedir* which are passed to the function :func:`translation`. For the *names* parameter, please see the description of the translation object's :meth:`~NullTranslations.install` method. As seen below, you usually mark the strings in your application that are - candidates for translation, by wrapping them in a call to the :func:`_` + candidates for translation, by wrapping them in a call to the :func:`!_` function, like this:: print(_('This string will be translated.')) - For convenience, you want the :func:`_` function to be installed in Python's + For convenience, you want the :func:`!_` function to be installed in Python's builtins namespace, so it is easily accessible in all modules of your application. @@ -276,20 +273,20 @@ are the methods of :class:`!NullTranslations`: If the *names* parameter is given, it must be a sequence containing the names of functions you want to install in the builtins namespace in - addition to :func:`_`. Supported names are ``'gettext'``, ``'ngettext'``, - ``'pgettext'``, ``'npgettext'``, ``'lgettext'``, and ``'lngettext'``. + addition to :func:`!_`. Supported names are ``'gettext'``, ``'ngettext'``, + ``'pgettext'``, and ``'npgettext'``. Note that this is only one way, albeit the most convenient way, to make - the :func:`_` function available to your application. Because it affects + the :func:`!_` function available to your application. Because it affects the entire application globally, and specifically the built-in namespace, - localized modules should never install :func:`_`. Instead, they should use - this code to make :func:`_` available to their module:: + localized modules should never install :func:`!_`. Instead, they should use + this code to make :func:`!_` available to their module:: import gettext t = gettext.translation('mymodule', ...) _ = t.gettext - This puts :func:`_` only in the module's global namespace and so only + This puts :func:`!_` only in the module's global namespace and so only affects calls within this module. .. versionchanged:: 3.8 @@ -314,7 +311,7 @@ initialize the "protected" :attr:`_charset` instance variable, defaulting to ids and message strings read from the catalog are converted to Unicode using this encoding, else ASCII is assumed. -Since message ids are read as Unicode strings too, all :meth:`*gettext` methods +Since message ids are read as Unicode strings too, all ``*gettext()`` methods will assume message ids as Unicode strings, not byte strings. The entire set of key/value pairs are placed into a dictionary and set as the @@ -404,7 +401,7 @@ version has a slightly different API. Its documented usage was:: _ = cat.gettext print(_('hello world')) -For compatibility with this older module, the function :func:`Catalog` is an +For compatibility with this older module, the function :func:`!Catalog` is an alias for the :func:`translation` function described above. One difference between this module and Henstridge's: his catalog objects @@ -432,7 +429,7 @@ take the following steps: In order to prepare your code for I18N, you need to look at all the strings in your files. Any string that needs to be translated should be marked by wrapping -it in ``_('...')`` --- that is, a call to the function :func:`_`. For example:: +it in ``_('...')`` --- that is, a call to the function :func:`_ `. For example:: filename = 'mylog.txt' message = _('writing a log message') @@ -504,7 +501,7 @@ module:: Localizing your application ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -If you are localizing your application, you can install the :func:`_` function +If you are localizing your application, you can install the :func:`!_` function globally into the built-in namespace, usually in the main driver file of your application. This will let all your application-specific files just use ``_('...')`` without having to explicitly install it in each file. @@ -581,13 +578,13 @@ Here is one way you can handle this situation:: for a in animals: print(_(a)) -This works because the dummy definition of :func:`_` simply returns the string +This works because the dummy definition of :func:`!_` simply returns the string unchanged. And this dummy definition will temporarily override any definition -of :func:`_` in the built-in namespace (until the :keyword:`del` command). Take -care, though if you have a previous definition of :func:`_` in the local +of :func:`!_` in the built-in namespace (until the :keyword:`del` command). Take +care, though if you have a previous definition of :func:`!_` in the local namespace. -Note that the second use of :func:`_` will not identify "a" as being +Note that the second use of :func:`!_` will not identify "a" as being translatable to the :program:`gettext` program, because the parameter is not a string literal. @@ -606,13 +603,13 @@ Another way to handle this is with the following example:: print(_(a)) In this case, you are marking translatable strings with the function -:func:`N_`, which won't conflict with any definition of :func:`_`. +:func:`!N_`, which won't conflict with any definition of :func:`!_`. However, you will need to teach your message extraction program to -look for translatable strings marked with :func:`N_`. :program:`xgettext`, +look for translatable strings marked with :func:`!N_`. :program:`xgettext`, :program:`pygettext`, ``pybabel extract``, and :program:`xpot` all support this through the use of the :option:`!-k` command-line switch. -The choice of :func:`N_` here is totally arbitrary; it could have just -as easily been :func:`MarkThisStringForTranslation`. +The choice of :func:`!N_` here is totally arbitrary; it could have just +as easily been :func:`!MarkThisStringForTranslation`. Acknowledgements diff --git a/Doc/library/statistics.rst b/Doc/library/statistics.rst index a8a79012565321..f3c1bf20ae3ac8 100644 --- a/Doc/library/statistics.rst +++ b/Doc/library/statistics.rst @@ -14,6 +14,7 @@ .. testsetup:: * from statistics import * + import math __name__ = '' -------------- @@ -741,6 +742,24 @@ However, for reading convenience, most of the examples show sorted sequences. *y = slope \* x + noise* + Continuing the example from :func:`correlation`, we look to see + how well a model based on major planets can predict the orbital + distances for dwarf planets: + + .. doctest:: + + >>> model = linear_regression(period_squared, dist_cubed, proportional=True) + >>> slope = model.slope + + >>> # Dwarf planets: Pluto, Eris, Makemake, Haumea, Ceres + >>> orbital_periods = [90_560, 204_199, 111_845, 103_410, 1_680] # days + >>> predicted_dist = [math.cbrt(slope * (p * p)) for p in orbital_periods] + >>> list(map(round, predicted_dist)) + [5912, 10166, 6806, 6459, 414] + + >>> [5_906, 10_152, 6_796, 6_450, 414] # actual distance in million km + [5906, 10152, 6796, 6450, 414] + .. versionadded:: 3.10 .. versionchanged:: 3.11 diff --git a/Doc/library/symtable.rst b/Doc/library/symtable.rst index 65ff5bfe7abd61..85eae5f3822575 100644 --- a/Doc/library/symtable.rst +++ b/Doc/library/symtable.rst @@ -38,7 +38,13 @@ Examining Symbol Tables .. method:: get_type() Return the type of the symbol table. Possible values are ``'class'``, - ``'module'``, and ``'function'``. + ``'module'``, ``'function'``, ``'annotation'``, ``'TypeVar bound'``, + ``'type alias'``, and ``'type parameter'``. The latter four refer to + different flavors of :ref:`annotation scopes `. + + .. versionchanged:: 3.12 + Added ``'annotation'``, ``'TypeVar bound'``, ``'type alias'``, + and ``'type parameter'`` as possible return values. .. method:: get_id() @@ -49,6 +55,10 @@ Examining Symbol Tables Return the table's name. This is the name of the class if the table is for a class, the name of the function if the table is for a function, or ``'top'`` if the table is global (:meth:`get_type` returns ``'module'``). + For type parameter scopes (which are used for generic classes, functions, + and type aliases), it is the name of the underlying class, function, or + type alias. For type alias scopes, it is the name of the type alias. + For :class:`~typing.TypeVar` bound scopes, it is the name of the ``TypeVar``. .. method:: get_lineno() diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index ef818a7da016de..f9f556306f5827 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -22,6 +22,8 @@ always available. .. versionadded:: 3.2 + .. availability:: Unix. + .. function:: addaudithook(hook) diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst index e63b839931822c..8f691932201225 100644 --- a/Doc/library/typing.rst +++ b/Doc/library/typing.rst @@ -2404,6 +2404,13 @@ types. >>> Point3D.__total__ True + This attribute reflects *only* the value of the ``total`` argument + to the current ``TypedDict`` class, not whether the class is semantically + total. For example, a ``TypedDict`` with ``__total__`` set to True may + have keys marked with :data:`NotRequired`, or it may inherit from another + ``TypedDict`` with ``total=False``. Therefore, it is generally better to use + :attr:`__required_keys__` and :attr:`__optional_keys__` for introspection. + .. attribute:: __required_keys__ .. versionadded:: 3.9 @@ -2439,6 +2446,14 @@ types. .. versionadded:: 3.9 + .. note:: + + If ``from __future__ import annotations`` is used or if annotations + are given as strings, annotations are not evaluated when the + ``TypedDict`` is defined. Therefore, the runtime introspection that + ``__required_keys__`` and ``__optional_keys__`` rely on may not work + properly, and the values of the attributes may be incorrect. + See :pep:`589` for more examples and detailed rules of using ``TypedDict``. .. versionadded:: 3.8 diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst index 9fd80b1cb7f84c..e54e0ebb7fae96 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -582,7 +582,7 @@ Standard C. The recognized escape sequences are: +-------------------------+---------------------------------+-------+ | ``\v`` | ASCII Vertical Tab (VT) | | +-------------------------+---------------------------------+-------+ -| :samp:`\\{ooo}` | Character with octal value | (2,4) | +| :samp:`\\\\{ooo}` | Character with octal value | (2,4) | | | *ooo* | | +-------------------------+---------------------------------+-------+ | :samp:`\\x{hh}` | Character with hex value *hh* | (3,4) | diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index f260a571661b76..f217da9052ca78 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -48,13 +48,10 @@ Doc/library/collections.rst Doc/library/concurrent.futures.rst Doc/library/configparser.rst Doc/library/contextlib.rst -Doc/library/copy.rst Doc/library/csv.rst Doc/library/datetime.rst Doc/library/dbm.rst Doc/library/decimal.rst -Doc/library/devmode.rst -Doc/library/difflib.rst Doc/library/doctest.rst Doc/library/email.charset.rst Doc/library/email.compat32-message.rst @@ -149,11 +146,9 @@ Doc/reference/datamodel.rst Doc/reference/expressions.rst Doc/reference/import.rst Doc/reference/simple_stmts.rst -Doc/tutorial/controlflow.rst Doc/tutorial/datastructures.rst Doc/tutorial/introduction.rst Doc/using/cmdline.rst -Doc/using/configure.rst Doc/using/windows.rst Doc/whatsnew/2.0.rst Doc/whatsnew/2.1.rst diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 4bcc3768111ccd..aa9caa101da40a 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -534,7 +534,7 @@ This example, as usual, demonstrates some new Python features: Different types define different methods. Methods of different types may have the same name without causing ambiguity. (It is possible to define your own object types and methods, using *classes*, see :ref:`tut-classes`) - The method :meth:`~list.append` shown in the example is defined for list objects; it + The method :meth:`!append` shown in the example is defined for list objects; it adds a new element at the end of the list. In this example it is equivalent to ``result = result + [a]``, but more efficient. @@ -1046,7 +1046,7 @@ Function Annotations information about the types used by user-defined functions (see :pep:`3107` and :pep:`484` for more information). -:term:`Annotations ` are stored in the :attr:`__annotations__` +:term:`Annotations ` are stored in the :attr:`!__annotations__` attribute of the function as a dictionary and have no effect on any other part of the function. Parameter annotations are defined by a colon after the parameter name, followed by an expression evaluating to the value of the annotation. Return annotations are diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index a9555199a2ac24..83b4c7aa0481e9 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -99,8 +99,8 @@ General Options .. cmdoption:: --enable-loadable-sqlite-extensions - Support loadable extensions in the :mod:`_sqlite` extension module (default - is no). + Support loadable extensions in the :mod:`!_sqlite` extension module (default + is no) of the :mod:`sqlite3` module. See the :meth:`sqlite3.Connection.enable_load_extension` method of the :mod:`sqlite3` module. @@ -198,7 +198,7 @@ General Options Some Linux distribution packaging policies recommend against bundling dependencies. For example, Fedora installs wheel packages in the ``/usr/share/python-wheels/`` directory and don't install the - :mod:`ensurepip._bundled` package. + :mod:`!ensurepip._bundled` package. .. versionadded:: 3.10 @@ -469,7 +469,7 @@ Install Options .. cmdoption:: --disable-test-modules Don't build nor install test modules, like the :mod:`test` package or the - :mod:`_testcapi` extension module (built and installed by default). + :mod:`!_testcapi` extension module (built and installed by default). .. versionadded:: 3.10 @@ -615,7 +615,7 @@ Effects of a debug build: * Display all warnings by default: the list of default warning filters is empty in the :mod:`warnings` module. * Add ``d`` to :data:`sys.abiflags`. -* Add :func:`sys.gettotalrefcount` function. +* Add :func:`!sys.gettotalrefcount` function. * Add :option:`-X showrefcount <-X>` command line option. * Add :option:`-d` command line option and :envvar:`PYTHONDEBUG` environment variable to debug the parser. @@ -637,7 +637,7 @@ Effects of a debug build: * Check that deallocator functions don't change the current exception. * The garbage collector (:func:`gc.collect` function) runs some basic checks on objects consistency. - * The :c:macro:`Py_SAFE_DOWNCAST()` macro checks for integer underflow and + * The :c:macro:`!Py_SAFE_DOWNCAST()` macro checks for integer underflow and overflow when downcasting from wide types to narrow types. See also the :ref:`Python Development Mode ` and the @@ -664,7 +664,7 @@ Debug options Effects: * Define the ``Py_TRACE_REFS`` macro. - * Add :func:`sys.getobjects` function. + * Add :func:`!sys.getobjects` function. * Add :envvar:`PYTHONDUMPREFS` environment variable. The :envvar:`PYTHONDUMPREFS` environment variable can be used to dump @@ -748,7 +748,7 @@ Libraries options .. cmdoption:: --with-system-expat - Build the :mod:`pyexpat` module using an installed ``expat`` library + Build the :mod:`!pyexpat` module using an installed ``expat`` library (default is no). .. cmdoption:: --with-system-libmpdec @@ -758,11 +758,12 @@ Libraries options .. versionadded:: 3.3 -.. cmdoption:: --with-readline=editline +.. cmdoption:: --with-readline=readline|editline - Use ``editline`` library for backend of the :mod:`readline` module. + Designate a backend library for the :mod:`readline` module. - Define the ``WITH_EDITLINE`` macro. + * readline: Use readline as the backend. + * editline: Use editline as the backend. .. versionadded:: 3.10 diff --git a/Doc/whatsnew/2.6.rst b/Doc/whatsnew/2.6.rst index f3912d42180bfd..2f749dc40f7ee7 100644 --- a/Doc/whatsnew/2.6.rst +++ b/Doc/whatsnew/2.6.rst @@ -1850,8 +1850,8 @@ changes, or look through the Subversion logs for all the details. special values and floating-point exceptions in a manner consistent with Annex 'G' of the C99 standard. -* A new data type in the :mod:`collections` module: :class:`namedtuple(typename, - fieldnames)` is a factory function that creates subclasses of the standard tuple +* A new data type in the :mod:`collections` module: ``namedtuple(typename, fieldnames)`` + is a factory function that creates subclasses of the standard tuple whose fields are accessible by name as well as index. For example:: >>> var_type = collections.namedtuple('variable', @@ -1873,7 +1873,7 @@ changes, or look through the Subversion logs for all the details. variable(id=1, name='amplitude', type='int', size=4) Several places in the standard library that returned tuples have - been modified to return :class:`namedtuple` instances. For example, + been modified to return :func:`namedtuple` instances. For example, the :meth:`Decimal.as_tuple` method now returns a named tuple with :attr:`sign`, :attr:`digits`, and :attr:`exponent` fields. diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst index dfe2ea886a9209..ec39616d7c9d2b 100644 --- a/Doc/whatsnew/3.12.rst +++ b/Doc/whatsnew/3.12.rst @@ -59,36 +59,106 @@ Summary -- Release highlights .. This section singles out the most important changes in Python 3.12. Brevity is key. +Python 3.12 is the latest stable release of the Python programming language, +with a mix of changes to the language and the standard library. +The library changes focus on cleaning up deprecated APIs, usability, and correctness. +Of note, the :mod:`!distutils` package has been removed from the standard library. +Filesystem support in :mod:`os` and :mod:`pathlib` has seen a number of improvements, +and several modules have better performance. + +The language changes focus on usability, +as :term:`f-strings ` have had many limitations removed +and 'Did you mean ...' suggestions continue to improve. +The new :ref:`type parameter syntax ` +and :keyword:`type` statement improve ergonomics for using :term:`generic types +` and :term:`type aliases ` with static type checkers. + +This article doesn't attempt to provide a complete specification of all new features, +but instead gives a convenient overview. +For full details, you should refer to the documentation, +such as the :ref:`Library Reference ` +and :ref:`Language Reference `. +If you want to understand the complete implementation and design rationale for a change, +refer to the PEP for a particular new feature; +but note that PEPs usually are not kept up-to-date +once a feature has been fully implemented. + +-------------- .. PEP-sized items next. +New syntax features: + +* :ref:`PEP 695 `, type parameter syntax and the :keyword:`type` statement + New grammar features: -* :ref:`whatsnew312-pep701` +* :ref:`PEP 701 `, :term:`f-strings ` in the grammar Interpreter improvements: -* :ref:`whatsnew312-pep684` +* :ref:`PEP 684 `, a unique per-interpreter :term:`GIL + ` +* :ref:`PEP 669 `, low impact monitoring +* `Improved 'Did you mean ...' suggestions `_ + for :exc:`NameError`, :exc:`ImportError`, and :exc:`SyntaxError` exceptions -* :ref:`whatsnew312-pep669` +Python data model improvements: -New typing features: +* :ref:`PEP 688 `, using the :ref:`buffer protocol + ` from Python + +Significant improvements in the standard library: + +* The :class:`pathlib.Path` class now supports subclassing +* The :mod:`os` module received several improvements for Windows support +* A :ref:`command-line interface ` has been added to the + :mod:`sqlite3` module +* :func:`isinstance` checks against :func:`runtime-checkable protocols + ` enjoy a speed up of between two and 20 times +* The :mod:`asyncio` package has had a number of performance improvements, + with some benchmarks showing a 75% speed up. +* A :ref:`command-line interface ` has been added to the + :mod:`uuid` module +* Due to the changes in :ref:`PEP 701 `, + producing tokens via the :mod:`tokenize` module is up to up to 64% faster. + +Security improvements: -* :ref:`whatsnew312-pep688` +* Replace the builtin :mod:`hashlib` implementations of + SHA1, SHA3, SHA2-384, SHA2-512, and MD5 with formally verified code from the + `HACL* `__ project. + These builtin implementations remain as fallbacks that are only used when + OpenSSL does not provide them. -* :ref:`whatsnew312-pep692` +C API improvements: -* :ref:`whatsnew312-pep695` +* :ref:`PEP 697 `, unstable C API tier +* :ref:`PEP 683 `, immortal objects -* :ref:`whatsnew312-pep698` +CPython implementation improvements: + +* :ref:`PEP 709 `, comprehension inlining +* :ref:`CPython support ` for the Linux ``perf`` profiler +* Implement stack overflow protection on supported platforms + +New typing features: + +* :ref:`PEP 692 `, using :class:`~typing.TypedDict` to + annotate :term:`**kwargs ` +* :ref:`PEP 698 `, :func:`typing.override` decorator Important deprecations, removals or restrictions: -* :pep:`623`: Remove wstr from Unicode +* :pep:`623`: Remove ``wstr`` from Unicode objects in Python's C API, + reducing the size of every :class:`str` object by at least 8 bytes. -* :pep:`632`: Remove the ``distutils`` package. See - `the migration guide `_ - for advice on its replacement. +* :pep:`632`: Remove the :mod:`!distutils` package. + See `the migration guide `_ + for advice replacing the APIs it provided. + The third-party `Setuptools `__ + package continues to provide :mod:`!distutils`, + if you still require it in Python 3.12 and beyond. * :gh:`95299`: Do not pre-install ``setuptools`` in virtual environments created with :mod:`venv`. @@ -97,60 +167,77 @@ Important deprecations, removals or restrictions: run ``pip install setuptools`` in the :ref:`activated ` virtual environment. -Improved Error Messages -======================= +* The :mod:`!asynchat`, :mod:`!asyncore`, and :mod:`!imp` modules have been + removed, along with several :class:`unittest.TestCase` + `method aliases `_. -* Modules from the standard library are now potentially suggested as part of - the error messages displayed by the interpreter when a :exc:`NameError` is - raised to the top level. (Contributed by Pablo Galindo in :gh:`98254`.) - >>> sys.version_info - Traceback (most recent call last): - File "", line 1, in - NameError: name 'sys' is not defined. Did you forget to import 'sys'? +New Features +============ -* Improve the error suggestion for :exc:`NameError` exceptions for instances. - Now if a :exc:`NameError` is raised in a method and the instance has an - attribute that's exactly equal to the name in the exception, the suggestion - will include ``self.`` instead of the closest match in the method - scope. (Contributed by Pablo Galindo in :gh:`99139`.) +.. _whatsnew312-pep695: - >>> class A: - ... def __init__(self): - ... self.blech = 1 - ... - ... def foo(self): - ... somethin = blech - ... - >>> A().foo() - Traceback (most recent call last): - File "", line 1 - somethin = blech - ^^^^^ - NameError: name 'blech' is not defined. Did you mean: 'self.blech'? +PEP 695: Type Parameter Syntax +------------------------------ -* Improve the :exc:`SyntaxError` error message when the user types ``import x - from y`` instead of ``from y import x``. (Contributed by Pablo Galindo in :gh:`98931`.) +Generic classes and functions under :pep:`484` were declared using a verbose syntax +that left the scope of type parameters unclear and required explicit declarations of +variance. - >>> import a.y.z from b.y.z - Traceback (most recent call last): - File "", line 1 - import a.y.z from b.y.z - ^^^^^^^^^^^^^^^^^^^^^^^ - SyntaxError: Did you mean to use 'from ... import ...' instead? +:pep:`695` introduces a new, more compact and explicit way to create +:ref:`generic classes ` and :ref:`functions `:: -* :exc:`ImportError` exceptions raised from failed ``from import - `` statements now include suggestions for the value of ```` based on the - available names in ````. (Contributed by Pablo Galindo in :gh:`91058`.) + def max[T](args: Iterable[T]) -> T: + ... - >>> from collections import chainmap - Traceback (most recent call last): - File "", line 1, in - ImportError: cannot import name 'chainmap' from 'collections'. Did you mean: 'ChainMap'? + class list[T]: + def __getitem__(self, index: int, /) -> T: + ... + def append(self, element: T) -> None: + ... -New Features -============ +In addition, the PEP introduces a new way to declare :ref:`type aliases ` +using the :keyword:`type` statement, which creates an instance of +:class:`~typing.TypeAliasType`:: + + type Point = tuple[float, float] + +Type aliases can also be :ref:`generic `:: + + type Point[T] = tuple[T, T] + +The new syntax allows declaring :class:`~typing.TypeVarTuple` +and :class:`~typing.ParamSpec` parameters, as well as :class:`~typing.TypeVar` +parameters with bounds or constraints:: + + type IntFunc[**P] = Callable[P, int] # ParamSpec + type LabeledTuple[*Ts] = tuple[str, *Ts] # TypeVarTuple + type HashableSequence[T: Hashable] = Sequence[T] # TypeVar with bound + type IntOrStrSequence[T: (int, str)] = Sequence[T] # TypeVar with constraints + +The value of type aliases and the bound and constraints of type variables +created through this syntax are evaluated only on demand (see +:ref:`lazy evaluation `). This means type aliases are able to +refer to other types defined later in the file. + +Type parameters declared through a type parameter list are visible within the +scope of the declaration and any nested scopes, but not in the outer scope. For +example, they can be used in the type annotations for the methods of a generic +class or in the class body. However, they cannot be used in the module scope after +the class is defined. See :ref:`type-params` for a detailed description of the +runtime semantics of type parameters. + +In order to support these scoping semantics, a new kind of scope is introduced, +the :ref:`annotation scope `. Annotation scopes behave for the +most part like function scopes, but interact differently with enclosing class scopes. +In Python 3.13, :term:`annotations ` will also be evaluated in +annotation scopes. + +See :pep:`695` for more details. + +(PEP written by Eric Traut. Implementation by Jelle Zijlstra, Eric Traut, +and others in :gh:`103764`.) .. _whatsnew312-pep701: @@ -244,52 +331,6 @@ are parsed with the PEG parser, error messages can be more precise and show the Maureira-Fredes and Marta Gómez in :gh:`102856`. PEP written by Pablo Galindo, Batuhan Taskaya, Lysandros Nikolaou and Marta Gómez). -.. _whatsnew312-pep709: - -PEP 709: Comprehension inlining -------------------------------- - -Dictionary, list, and set comprehensions are now inlined, rather than creating a -new single-use function object for each execution of the comprehension. This -speeds up execution of a comprehension by up to two times. -See :pep:`709` for further details. - -Comprehension iteration variables remain isolated and don't overwrite a -variable of the same name in the outer scope, nor are they visible after the -comprehension. Inlining does result in a few visible behavior changes: - -* There is no longer a separate frame for the comprehension in tracebacks, - and tracing/profiling no longer shows the comprehension as a function call. -* The :mod:`symtable` module will no longer produce child symbol tables for each - comprehension; instead, the comprehension's locals will be included in the - parent function's symbol table. -* Calling :func:`locals` inside a comprehension now includes variables - from outside the comprehension, and no longer includes the synthetic ``.0`` - variable for the comprehension "argument". -* A comprehension iterating directly over ``locals()`` (e.g. ``[k for k in - locals()]``) may see "RuntimeError: dictionary changed size during iteration" - when run under tracing (e.g. code coverage measurement). This is the same - behavior already seen in e.g. ``for k in locals():``. To avoid the error, first - create a list of keys to iterate over: ``keys = list(locals()); [k for k in - keys]``. - -(Contributed by Carl Meyer and Vladimir Matveev in :pep:`709`.) - -.. _whatsnew312-pep688: - -PEP 688: Making the buffer protocol accessible in Python --------------------------------------------------------- - -:pep:`688` introduces a way to use the :ref:`buffer protocol ` -from Python code. Classes that implement the :meth:`~object.__buffer__` method -are now usable as buffer types. - -The new :class:`collections.abc.Buffer` ABC provides a standard -way to represent buffer objects, for example in type annotations. -The new :class:`inspect.BufferFlags` enum represents the flags that -can be used to customize buffer creation. -(Contributed by Jelle Zijlstra in :gh:`102500`.) - .. _whatsnew312-pep684: PEP 684: A Per-Interpreter GIL @@ -333,7 +374,105 @@ This means that you only pay for what you use, providing support for near-zero overhead debuggers and coverage tools. See :mod:`sys.monitoring` for details. -(Contributed by Mark Shannon in :gh:`103083`.) +(Contributed by Mark Shannon in :gh:`103082`.) + +.. _whatsnew312-pep688: + +PEP 688: Making the buffer protocol accessible in Python +-------------------------------------------------------- + +:pep:`688` introduces a way to use the :ref:`buffer protocol ` +from Python code. Classes that implement the :meth:`~object.__buffer__` method +are now usable as buffer types. + +The new :class:`collections.abc.Buffer` ABC provides a standard +way to represent buffer objects, for example in type annotations. +The new :class:`inspect.BufferFlags` enum represents the flags that +can be used to customize buffer creation. +(Contributed by Jelle Zijlstra in :gh:`102500`.) + +.. _whatsnew312-pep709: + +PEP 709: Comprehension inlining +------------------------------- + +Dictionary, list, and set comprehensions are now inlined, rather than creating a +new single-use function object for each execution of the comprehension. This +speeds up execution of a comprehension by up to two times. +See :pep:`709` for further details. + +Comprehension iteration variables remain isolated and don't overwrite a +variable of the same name in the outer scope, nor are they visible after the +comprehension. Inlining does result in a few visible behavior changes: + +* There is no longer a separate frame for the comprehension in tracebacks, + and tracing/profiling no longer shows the comprehension as a function call. +* The :mod:`symtable` module will no longer produce child symbol tables for each + comprehension; instead, the comprehension's locals will be included in the + parent function's symbol table. +* Calling :func:`locals` inside a comprehension now includes variables + from outside the comprehension, and no longer includes the synthetic ``.0`` + variable for the comprehension "argument". +* A comprehension iterating directly over ``locals()`` (e.g. ``[k for k in + locals()]``) may see "RuntimeError: dictionary changed size during iteration" + when run under tracing (e.g. code coverage measurement). This is the same + behavior already seen in e.g. ``for k in locals():``. To avoid the error, first + create a list of keys to iterate over: ``keys = list(locals()); [k for k in + keys]``. + +(Contributed by Carl Meyer and Vladimir Matveev in :pep:`709`.) + +Improved Error Messages +----------------------- + +* Modules from the standard library are now potentially suggested as part of + the error messages displayed by the interpreter when a :exc:`NameError` is + raised to the top level. (Contributed by Pablo Galindo in :gh:`98254`.) + + >>> sys.version_info + Traceback (most recent call last): + File "", line 1, in + NameError: name 'sys' is not defined. Did you forget to import 'sys'? + +* Improve the error suggestion for :exc:`NameError` exceptions for instances. + Now if a :exc:`NameError` is raised in a method and the instance has an + attribute that's exactly equal to the name in the exception, the suggestion + will include ``self.`` instead of the closest match in the method + scope. (Contributed by Pablo Galindo in :gh:`99139`.) + + >>> class A: + ... def __init__(self): + ... self.blech = 1 + ... + ... def foo(self): + ... somethin = blech + ... + >>> A().foo() + Traceback (most recent call last): + File "", line 1 + somethin = blech + ^^^^^ + NameError: name 'blech' is not defined. Did you mean: 'self.blech'? + +* Improve the :exc:`SyntaxError` error message when the user types ``import x + from y`` instead of ``from y import x``. (Contributed by Pablo Galindo in :gh:`98931`.) + + >>> import a.y.z from b.y.z + Traceback (most recent call last): + File "", line 1 + import a.y.z from b.y.z + ^^^^^^^^^^^^^^^^^^^^^^^ + SyntaxError: Did you mean to use 'from ... import ...' instead? + +* :exc:`ImportError` exceptions raised from failed ``from import + `` statements now include suggestions for the value of ```` based on the + available names in ````. (Contributed by Pablo Galindo in :gh:`91058`.) + + >>> from collections import chainmap + Traceback (most recent call last): + File "", line 1, in + ImportError: cannot import name 'chainmap' from 'collections'. Did you mean: 'ChainMap'? + New Features Related to Type Hints ================================== @@ -398,70 +537,6 @@ See :pep:`698` for more details. (Contributed by Steven Troxler in :gh:`101561`.) -.. _whatsnew312-pep695: - -PEP 695: Type Parameter Syntax ------------------------------- - -Generic classes and functions under :pep:`484` were declared using a verbose syntax -that left the scope of type parameters unclear and required explicit declarations of -variance. - -:pep:`695` introduces a new, more compact and explicit way to create -:ref:`generic classes ` and :ref:`functions `:: - - def max[T](args: Iterable[T]) -> T: - ... - - class list[T]: - def __getitem__(self, index: int, /) -> T: - ... - - def append(self, element: T) -> None: - ... - -In addition, the PEP introduces a new way to declare :ref:`type aliases ` -using the :keyword:`type` statement, which creates an instance of -:class:`~typing.TypeAliasType`:: - - type Point = tuple[float, float] - -Type aliases can also be :ref:`generic `:: - - type Point[T] = tuple[T, T] - -The new syntax allows declaring :class:`~typing.TypeVarTuple` -and :class:`~typing.ParamSpec` parameters, as well as :class:`~typing.TypeVar` -parameters with bounds or constraints:: - - type IntFunc[**P] = Callable[P, int] # ParamSpec - type LabeledTuple[*Ts] = tuple[str, *Ts] # TypeVarTuple - type HashableSequence[T: Hashable] = Sequence[T] # TypeVar with bound - type IntOrStrSequence[T: (int, str)] = Sequence[T] # TypeVar with constraints - -The value of type aliases and the bound and constraints of type variables -created through this syntax are evaluated only on demand (see -:ref:`lazy evaluation `). This means type aliases are able to -refer to other types defined later in the file. - -Type parameters declared through a type parameter list are visible within the -scope of the declaration and any nested scopes, but not in the outer scope. For -example, they can be used in the type annotations for the methods of a generic -class or in the class body. However, they cannot be used in the module scope after -the class is defined. See :ref:`type-params` for a detailed description of the -runtime semantics of type parameters. - -In order to support these scoping semantics, a new kind of scope is introduced, -the :ref:`annotation scope `. Annotation scopes behave for the -most part like function scopes, but interact differently with enclosing class scopes. -In Python 3.13, :term:`annotations ` will also be evaluated in -annotation scopes. - -See :pep:`695` for more details. - -(PEP written by Eric Traut. Implementation by Jelle Zijlstra, Eric Traut, -and others in :gh:`103764`.) - Other Language Changes ====================== @@ -626,6 +701,9 @@ dis :data:`dis.hasarg` collection instead. (Contributed by Irit Katriel in :gh:`94216`.) +* Add the :data:`dis.hasexc` collection to signify instructions that set + an exception handler. (Contributed by Irit Katriel in :gh:`94216`.) + fractions --------- @@ -809,6 +887,10 @@ statistics sys --- +* Add the :mod:`sys.monitoring` namespace to expose the new :ref:`PEP 669 + ` monitoring API. + (Contributed by Mark Shannon in :gh:`103082`.) + * Add :func:`sys.activate_stack_trampoline` and :func:`sys.deactivate_stack_trampoline` for activating and deactivating stack profiler trampolines, @@ -1008,9 +1090,27 @@ CPython bytecode changes * Remove the :opcode:`!PRECALL` instruction. (Contributed by Mark Shannon in :gh:`92925`.) +* Add the :opcode:`BINARY_SLICE` and :opcode:`STORE_SLICE` instructions. + (Contributed by Mark Shannon in :gh:`94163`.) + +* Add the :opcode:`CALL_INTRINSIC_1` instructions. + (Contributed by Mark Shannon in :gh:`99005`.) + +* Add the :opcode:`CALL_INTRINSIC_2` instruction. + (Contributed by Irit Katriel in :gh:`101799`.) + +* Add the :opcode:`CLEANUP_THROW` instruction. + (Contributed by Brandt Bucher in :gh:`90997`.) + +* Add the :opcode:`!END_SEND` instruction. + (Contributed by Mark Shannon in :gh:`103082`.) + * Add the :opcode:`LOAD_FAST_AND_CLEAR` instruction as part of the implementation of :pep:`709`. (Contributed by Carl Meyer in :gh:`101441`.) +* Add the :opcode:`LOAD_FAST_CHECK` instruction. + (Contributed by Dennis Sweeney in :gh:`93143`.) + * Add the :opcode:`LOAD_FROM_DICT_OR_DEREF`, :opcode:`LOAD_FROM_DICT_OR_GLOBALS`, and :opcode:`LOAD_LOCALS` opcodes as part of the implementation of :pep:`695`. Remove the :opcode:`!LOAD_CLASSDEREF` opcode, which can be replaced with @@ -1020,6 +1120,8 @@ CPython bytecode changes * Add the :opcode:`LOAD_SUPER_ATTR` instruction. (Contributed by Carl Meyer and Vladimir Matveev in :gh:`103497`.) +* Add the :opcode:`RETURN_CONST` instruction. (Contributed by Wenyang Wang in :gh:`101632`.) + Demos and Tools =============== @@ -1114,12 +1216,6 @@ Deprecated volume and maintenance burden. (Contributed by Raymond Hettinger in :gh:`101588`.) -* :mod:`os`: The ``st_ctime`` fields return by :func:`os.stat` and :func:`os.lstat` on - Windows are deprecated. In a future release, they will contain the last - metadata change time, consistent with other platforms. For now, they still - contain the creation time, which is also available in the new ``st_birthtime`` - field. (Contributed by Steve Dower in :gh:`99726`.) - * :mod:`multiprocessing`: In Python 3.14, the default :mod:`multiprocessing` start method will change to a safer one on Linux, BSDs, and other non-macOS POSIX platforms where ``'fork'`` is currently @@ -1140,15 +1236,23 @@ Deprecated proper :exc:`DeprecationWarning` in 3.12. Remove them in 3.14. (Contributed by Soumendra Ganguly and Gregory P. Smith in :gh:`85984`.) -* :mod:`os`: On POSIX platforms, :func:`os.fork` can now raise a - :exc:`DeprecationWarning` when it can detect being called from a - multithreaded process. There has always been a fundamental incompatibility - with the POSIX platform when doing so. Even if such code *appeared* to work. - We added the warning to to raise awareness as issues encounted by code doing - this are becoming more frequent. See the :func:`os.fork` documentation for - more details along with `this discussion on fork being incompatible with threads - `_ for *why* we're now surfacing this - longstanding platform compatibility problem to developers. +* :mod:`os`: + + * The ``st_ctime`` fields return by :func:`os.stat` and :func:`os.lstat` on + Windows are deprecated. In a future release, they will contain the last + metadata change time, consistent with other platforms. For now, they still + contain the creation time, which is also available in the new ``st_birthtime`` + field. (Contributed by Steve Dower in :gh:`99726`.) + + * On POSIX platforms, :func:`os.fork` can now raise a + :exc:`DeprecationWarning` when it can detect being called from a + multithreaded process. There has always been a fundamental incompatibility + with the POSIX platform when doing so. Even if such code *appeared* to work. + We added the warning to to raise awareness as issues encounted by code doing + this are becoming more frequent. See the :func:`os.fork` documentation for + more details along with `this discussion on fork being incompatible with threads + `_ for *why* we're now surfacing this + longstanding platform compatibility problem to developers. When this warning appears due to usage of :mod:`multiprocessing` or :mod:`concurrent.futures` the fix is to use a different @@ -1576,6 +1680,8 @@ unittest * Remove many long-deprecated :mod:`unittest` features: + .. _unittest-TestCase-removed-aliases: + * A number of :class:`~unittest.TestCase` method aliases: ============================ =============================== =============== @@ -1809,6 +1915,7 @@ C API Changes New Features ------------ +.. _whatsnew312-pep697: * :pep:`697`: Introduce the :ref:`Unstable C API tier `, intended for low-level tools like debuggers and JIT compilers. @@ -1936,6 +2043,8 @@ New Features to replace the legacy-api :c:func:`!PyErr_Display`. (Contributed by Irit Katriel in :gh:`102755`). +.. _whatsnew312-pep683: + * :pep:`683`: Introduce *Immortal Objects*, which allows objects to bypass reference counts, and related changes to the C-API: diff --git a/Include/cpython/pyatomic.h b/Include/cpython/pyatomic.h index ab182381b39f00..7a783058c173aa 100644 --- a/Include/cpython/pyatomic.h +++ b/Include/cpython/pyatomic.h @@ -501,3 +501,28 @@ static inline void _Py_atomic_fence_release(void); #else # error "no available pyatomic implementation for this platform/compiler" #endif + + +// --- aliases --------------------------------------------------------------- + +#if SIZEOF_LONG == 8 +# define _Py_atomic_load_ulong(p) \ + _Py_atomic_load_uint64((uint64_t *)p) +# define _Py_atomic_load_ulong_relaxed(p) \ + _Py_atomic_load_uint64_relaxed((uint64_t *)p) +# define _Py_atomic_store_ulong(p, v) \ + _Py_atomic_store_uint64((uint64_t *)p, v) +# define _Py_atomic_store_ulong_relaxed(p, v) \ + _Py_atomic_store_uint64_relaxed((uint64_t *)p, v) +#elif SIZEOF_LONG == 4 +# define _Py_atomic_load_ulong(p) \ + _Py_atomic_load_uint32((uint32_t *)p) +# define _Py_atomic_load_ulong_relaxed(p) \ + _Py_atomic_load_uint32_relaxed((uint32_t *)p) +# define _Py_atomic_store_ulong(p, v) \ + _Py_atomic_store_uint32((uint32_t *)p, v) +# define _Py_atomic_store_ulong_relaxed(p, v) \ + _Py_atomic_store_uint32_relaxed((uint32_t *)p, v) +#else +# error "long must be 4 or 8 bytes in size" +#endif // SIZEOF_LONG diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index ba5764e943e676..0912bd175fe4f7 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -93,6 +93,8 @@ struct _is { and _PyInterpreterState_SetFinalizing() to access it, don't access it directly. */ _Py_atomic_address _finalizing; + /* The ID of the OS thread in which we are finalizing. */ + unsigned long _finalizing_id; struct _gc_runtime_state gc; @@ -215,9 +217,23 @@ _PyInterpreterState_GetFinalizing(PyInterpreterState *interp) { return (PyThreadState*)_Py_atomic_load_relaxed(&interp->_finalizing); } +static inline unsigned long +_PyInterpreterState_GetFinalizingID(PyInterpreterState *interp) { + return _Py_atomic_load_ulong_relaxed(&interp->_finalizing_id); +} + static inline void _PyInterpreterState_SetFinalizing(PyInterpreterState *interp, PyThreadState *tstate) { _Py_atomic_store_relaxed(&interp->_finalizing, (uintptr_t)tstate); + if (tstate == NULL) { + _Py_atomic_store_ulong_relaxed(&interp->_finalizing_id, 0); + } + else { + // XXX Re-enable this assert once gh-109860 is fixed. + //assert(tstate->thread_id == PyThread_get_thread_ident()); + _Py_atomic_store_ulong_relaxed(&interp->_finalizing_id, + tstate->thread_id); + } } diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index bb37e9a1d1b6b6..16c1637e496033 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -46,31 +46,40 @@ #define _GUARD_TYPE_VERSION 318 #define _CHECK_MANAGED_OBJECT_HAS_VALUES 319 #define _LOAD_ATTR_INSTANCE_VALUE 320 -#define _IS_NONE 321 -#define _ITER_CHECK_LIST 322 -#define _ITER_JUMP_LIST 323 -#define _IS_ITER_EXHAUSTED_LIST 324 -#define _ITER_NEXT_LIST 325 -#define _ITER_CHECK_TUPLE 326 -#define _ITER_JUMP_TUPLE 327 -#define _IS_ITER_EXHAUSTED_TUPLE 328 -#define _ITER_NEXT_TUPLE 329 -#define _ITER_CHECK_RANGE 330 -#define _ITER_JUMP_RANGE 331 -#define _IS_ITER_EXHAUSTED_RANGE 332 -#define _ITER_NEXT_RANGE 333 -#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 334 -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 335 -#define _CHECK_PEP_523 336 -#define _CHECK_FUNCTION_EXACT_ARGS 337 -#define _CHECK_STACK_SPACE 338 -#define _INIT_CALL_PY_EXACT_ARGS 339 -#define _PUSH_FRAME 340 -#define _POP_JUMP_IF_FALSE 341 -#define _POP_JUMP_IF_TRUE 342 -#define _JUMP_TO_TOP 343 -#define _SAVE_CURRENT_IP 344 -#define _INSERT 345 +#define _LOAD_ATTR_SLOT 321 +#define _GUARD_DORV_VALUES 322 +#define _STORE_ATTR_INSTANCE_VALUE 323 +#define _GUARD_TYPE_VERSION_STORE 324 +#define _STORE_ATTR_SLOT 325 +#define _IS_NONE 326 +#define _ITER_CHECK_LIST 327 +#define _ITER_JUMP_LIST 328 +#define _IS_ITER_EXHAUSTED_LIST 329 +#define _ITER_NEXT_LIST 330 +#define _ITER_CHECK_TUPLE 331 +#define _ITER_JUMP_TUPLE 332 +#define _IS_ITER_EXHAUSTED_TUPLE 333 +#define _ITER_NEXT_TUPLE 334 +#define _ITER_CHECK_RANGE 335 +#define _ITER_JUMP_RANGE 336 +#define _IS_ITER_EXHAUSTED_RANGE 337 +#define _ITER_NEXT_RANGE 338 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 339 +#define _GUARD_KEYS_VERSION 340 +#define _LOAD_ATTR_METHOD_WITH_VALUES 341 +#define _LOAD_ATTR_METHOD_NO_DICT 342 +#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 343 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 344 +#define _CHECK_PEP_523 345 +#define _CHECK_FUNCTION_EXACT_ARGS 346 +#define _CHECK_STACK_SPACE 347 +#define _INIT_CALL_PY_EXACT_ARGS 348 +#define _PUSH_FRAME 349 +#define _POP_JUMP_IF_FALSE 350 +#define _POP_JUMP_IF_TRUE 351 +#define _JUMP_TO_TOP 352 +#define _SAVE_CURRENT_IP 353 +#define _INSERT 354 extern int _PyOpcode_num_popped(int opcode, int oparg, bool jump); #ifdef NEED_OPCODE_METADATA @@ -356,6 +365,8 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case LOAD_ATTR_WITH_HINT: return 1; + case _LOAD_ATTR_SLOT: + return 1; case LOAD_ATTR_SLOT: return 1; case LOAD_ATTR_CLASS: @@ -364,10 +375,18 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 1; case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: return 1; + case _GUARD_DORV_VALUES: + return 1; + case _STORE_ATTR_INSTANCE_VALUE: + return 2; case STORE_ATTR_INSTANCE_VALUE: return 2; case STORE_ATTR_WITH_HINT: return 2; + case _GUARD_TYPE_VERSION_STORE: + return 1; + case _STORE_ATTR_SLOT: + return 2; case STORE_ATTR_SLOT: return 2; case COMPARE_OP: @@ -478,8 +497,16 @@ int _PyOpcode_num_popped(int opcode, int oparg, bool jump) { return 0; case PUSH_EXC_INFO: return 1; + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + return 1; + case _GUARD_KEYS_VERSION: + return 1; + case _LOAD_ATTR_METHOD_WITH_VALUES: + return 1; case LOAD_ATTR_METHOD_WITH_VALUES: return 1; + case _LOAD_ATTR_METHOD_NO_DICT: + return 1; case LOAD_ATTR_METHOD_NO_DICT: return 1; case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: @@ -896,18 +923,28 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_WITH_HINT: return ((oparg & 1) ? 1 : 0) + 1; - case LOAD_ATTR_SLOT: + case _LOAD_ATTR_SLOT: return ((oparg & 1) ? 1 : 0) + 1; + case LOAD_ATTR_SLOT: + return (oparg & 1 ? 1 : 0) + 1; case LOAD_ATTR_CLASS: return ((oparg & 1) ? 1 : 0) + 1; case LOAD_ATTR_PROPERTY: return 1; case LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN: return 1; + case _GUARD_DORV_VALUES: + return 1; + case _STORE_ATTR_INSTANCE_VALUE: + return 0; case STORE_ATTR_INSTANCE_VALUE: return 0; case STORE_ATTR_WITH_HINT: return 0; + case _GUARD_TYPE_VERSION_STORE: + return 1; + case _STORE_ATTR_SLOT: + return 0; case STORE_ATTR_SLOT: return 0; case COMPARE_OP: @@ -1018,8 +1055,16 @@ int _PyOpcode_num_pushed(int opcode, int oparg, bool jump) { return 0; case PUSH_EXC_INFO: return 2; + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: + return 1; + case _GUARD_KEYS_VERSION: + return 1; + case _LOAD_ATTR_METHOD_WITH_VALUES: + return 2; case LOAD_ATTR_METHOD_WITH_VALUES: return 2; + case _LOAD_ATTR_METHOD_NO_DICT: + return 2; case LOAD_ATTR_METHOD_NO_DICT: return 2; case LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: @@ -1359,12 +1404,17 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG }, + [_GUARD_DORV_VALUES] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC, 0 }, [STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG }, [STORE_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG }, + [_GUARD_TYPE_VERSION_STORE] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC, 0 }, [STORE_ATTR_SLOT] = { true, INSTR_FMT_IXC000, HAS_DEOPT_FLAG }, [COMPARE_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [COMPARE_OP_FLOAT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, @@ -1420,7 +1470,11 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = { [SETUP_WITH] = { true, INSTR_FMT_IX, 0 }, [POP_BLOCK] = { true, INSTR_FMT_IX, 0 }, [PUSH_EXC_INFO] = { true, INSTR_FMT_IX, 0 }, + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG }, + [_GUARD_KEYS_VERSION] = { true, INSTR_FMT_IXC0, HAS_DEOPT_FLAG }, + [_LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, [LOAD_ATTR_METHOD_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, + [_LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG }, [LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, [LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG }, @@ -1583,6 +1637,9 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [LOAD_SUPER_ATTR_METHOD] = { .nuops = 1, .uops = { { LOAD_SUPER_ATTR_METHOD, 0, 0 } } }, [LOAD_ATTR] = { .nuops = 1, .uops = { { LOAD_ATTR, 0, 0 } } }, [LOAD_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _CHECK_MANAGED_OBJECT_HAS_VALUES, 0, 0 }, { _LOAD_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [LOAD_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_SLOT, 1, 3 } } }, + [STORE_ATTR_INSTANCE_VALUE] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION_STORE, 2, 1 }, { _GUARD_DORV_VALUES, 0, 0 }, { _STORE_ATTR_INSTANCE_VALUE, 1, 3 } } }, + [STORE_ATTR_SLOT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION_STORE, 2, 1 }, { _STORE_ATTR_SLOT, 1, 3 } } }, [COMPARE_OP] = { .nuops = 1, .uops = { { COMPARE_OP, 0, 0 } } }, [COMPARE_OP_FLOAT] = { .nuops = 1, .uops = { { COMPARE_OP_FLOAT, 0, 0 } } }, [COMPARE_OP_INT] = { .nuops = 1, .uops = { { COMPARE_OP_INT, 0, 0 } } }, @@ -1600,6 +1657,8 @@ const struct opcode_macro_expansion _PyOpcode_macro_expansion[OPCODE_MACRO_EXPAN [GET_YIELD_FROM_ITER] = { .nuops = 1, .uops = { { GET_YIELD_FROM_ITER, 0, 0 } } }, [WITH_EXCEPT_START] = { .nuops = 1, .uops = { { WITH_EXCEPT_START, 0, 0 } } }, [PUSH_EXC_INFO] = { .nuops = 1, .uops = { { PUSH_EXC_INFO, 0, 0 } } }, + [LOAD_ATTR_METHOD_WITH_VALUES] = { .nuops = 4, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, 0, 0 }, { _GUARD_KEYS_VERSION, 2, 3 }, { _LOAD_ATTR_METHOD_WITH_VALUES, 4, 5 } } }, + [LOAD_ATTR_METHOD_NO_DICT] = { .nuops = 2, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_METHOD_NO_DICT, 4, 5 } } }, [CALL_BOUND_METHOD_EXACT_ARGS] = { .nuops = 9, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _INIT_CALL_BOUND_METHOD_EXACT_ARGS, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SET_IP, 7, 3 }, { _SAVE_CURRENT_IP, 0, 0 }, { _PUSH_FRAME, 0, 0 } } }, [CALL_PY_EXACT_ARGS] = { .nuops = 7, .uops = { { _CHECK_PEP_523, 0, 0 }, { _CHECK_FUNCTION_EXACT_ARGS, 2, 1 }, { _CHECK_STACK_SPACE, 0, 0 }, { _INIT_CALL_PY_EXACT_ARGS, 0, 0 }, { _SET_IP, 7, 3 }, { _SAVE_CURRENT_IP, 0, 0 }, { _PUSH_FRAME, 0, 0 } } }, [CALL_TYPE_1] = { .nuops = 1, .uops = { { CALL_TYPE_1, 0, 0 } } }, @@ -1652,6 +1711,11 @@ const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] = { [_GUARD_TYPE_VERSION] = "_GUARD_TYPE_VERSION", [_CHECK_MANAGED_OBJECT_HAS_VALUES] = "_CHECK_MANAGED_OBJECT_HAS_VALUES", [_LOAD_ATTR_INSTANCE_VALUE] = "_LOAD_ATTR_INSTANCE_VALUE", + [_LOAD_ATTR_SLOT] = "_LOAD_ATTR_SLOT", + [_GUARD_DORV_VALUES] = "_GUARD_DORV_VALUES", + [_STORE_ATTR_INSTANCE_VALUE] = "_STORE_ATTR_INSTANCE_VALUE", + [_GUARD_TYPE_VERSION_STORE] = "_GUARD_TYPE_VERSION_STORE", + [_STORE_ATTR_SLOT] = "_STORE_ATTR_SLOT", [_IS_NONE] = "_IS_NONE", [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", [_ITER_JUMP_LIST] = "_ITER_JUMP_LIST", @@ -1665,6 +1729,10 @@ const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] = { [_ITER_JUMP_RANGE] = "_ITER_JUMP_RANGE", [_IS_ITER_EXHAUSTED_RANGE] = "_IS_ITER_EXHAUSTED_RANGE", [_ITER_NEXT_RANGE] = "_ITER_NEXT_RANGE", + [_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT] = "_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT", + [_GUARD_KEYS_VERSION] = "_GUARD_KEYS_VERSION", + [_LOAD_ATTR_METHOD_WITH_VALUES] = "_LOAD_ATTR_METHOD_WITH_VALUES", + [_LOAD_ATTR_METHOD_NO_DICT] = "_LOAD_ATTR_METHOD_NO_DICT", [_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = "_CHECK_CALL_BOUND_METHOD_EXACT_ARGS", [_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = "_INIT_CALL_BOUND_METHOD_EXACT_ARGS", [_CHECK_PEP_523] = "_CHECK_PEP_523", diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 9fc8ae903b2ac0..2e568f8aeeb152 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -36,8 +36,12 @@ _Py_IsMainInterpreter(PyInterpreterState *interp) static inline int _Py_IsMainInterpreterFinalizing(PyInterpreterState *interp) { - return (_PyRuntimeState_GetFinalizing(interp->runtime) != NULL && - interp == &interp->runtime->_main_interpreter); + /* bpo-39877: Access _PyRuntime directly rather than using + tstate->interp->runtime to support calls from Python daemon threads. + After Py_Finalize() has been called, tstate can be a dangling pointer: + point to PyThreadState freed memory. */ + return (_PyRuntimeState_GetFinalizing(&_PyRuntime) != NULL && + interp == &_PyRuntime._main_interpreter); } diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 0ddc405f221a1c..cc3a3420befa3d 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -171,6 +171,8 @@ typedef struct pyruntimestate { Use _PyRuntimeState_GetFinalizing() and _PyRuntimeState_SetFinalizing() to access it, don't access it directly. */ _Py_atomic_address _finalizing; + /* The ID of the OS thread in which we are finalizing. */ + unsigned long _finalizing_id; struct pyinterpreters { PyThread_type_lock mutex; @@ -303,9 +305,23 @@ _PyRuntimeState_GetFinalizing(_PyRuntimeState *runtime) { return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->_finalizing); } +static inline unsigned long +_PyRuntimeState_GetFinalizingID(_PyRuntimeState *runtime) { + return _Py_atomic_load_ulong_relaxed(&runtime->_finalizing_id); +} + static inline void _PyRuntimeState_SetFinalizing(_PyRuntimeState *runtime, PyThreadState *tstate) { _Py_atomic_store_relaxed(&runtime->_finalizing, (uintptr_t)tstate); + if (tstate == NULL) { + _Py_atomic_store_ulong_relaxed(&runtime->_finalizing_id, 0); + } + else { + // XXX Re-enable this assert once gh-109860 is fixed. + //assert(tstate->thread_id == PyThread_get_thread_ident()); + _Py_atomic_store_ulong_relaxed(&runtime->_finalizing_id, + tstate->thread_id); + } } #ifdef __cplusplus diff --git a/Include/internal/pycore_uops.h b/Include/internal/pycore_uops.h index 249f5c010e0092..d8a7d978f1304e 100644 --- a/Include/internal/pycore_uops.h +++ b/Include/internal/pycore_uops.h @@ -10,7 +10,7 @@ extern "C" { #include "pycore_frame.h" // _PyInterpreterFrame -#define _Py_UOP_MAX_TRACE_LENGTH 64 +#define _Py_UOP_MAX_TRACE_LENGTH 128 typedef struct { uint32_t opcode; diff --git a/Lib/_pydatetime.py b/Lib/_pydatetime.py index 88275481e7002b..bca2acf1fc88cf 100644 --- a/Lib/_pydatetime.py +++ b/Lib/_pydatetime.py @@ -1684,7 +1684,7 @@ class datetime(date): The year, month and day arguments are required. tzinfo may be None, or an instance of a tzinfo subclass. The remaining arguments may be ints. """ - __slots__ = date.__slots__ + time.__slots__ + __slots__ = time.__slots__ def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0, microsecond=0, tzinfo=None, *, fold=0): diff --git a/Lib/asyncio/streams.py b/Lib/asyncio/streams.py index b7ad365709b19e..bc84e53b8443cf 100644 --- a/Lib/asyncio/streams.py +++ b/Lib/asyncio/streams.py @@ -67,9 +67,8 @@ async def start_server(client_connected_cb, host=None, port=None, *, positional host and port, with various optional keyword arguments following. The return value is the same as loop.create_server(). - Additional optional keyword arguments are loop (to set the event loop - instance to use) and limit (to set the buffer limit passed to the - StreamReader). + Additional optional keyword argument is limit (to set the buffer + limit passed to the StreamReader). The return value is the same as loop.create_server(), i.e. a Server object which can be used to stop the service. diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 21a1b24194bcd8..72f4cc07173f0a 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -73,15 +73,25 @@ class Task(futures._PyFuture): # Inherit Python Task implementation """A coroutine wrapped in a Future.""" # An important invariant maintained while a Task not done: + # _fut_waiter is either None or a Future. The Future + # can be either done() or not done(). + # The task can be in any of 3 states: # - # - Either _fut_waiter is None, and _step() is scheduled; - # - or _fut_waiter is some Future, and _step() is *not* scheduled. + # - 1: _fut_waiter is not None and not _fut_waiter.done(): + # __step() is *not* scheduled and the Task is waiting for _fut_waiter. + # - 2: (_fut_waiter is None or _fut_waiter.done()) and __step() is scheduled: + # the Task is waiting for __step() to be executed. + # - 3: _fut_waiter is None and __step() is *not* scheduled: + # the Task is currently executing (in __step()). # - # The only transition from the latter to the former is through - # _wakeup(). When _fut_waiter is not None, one of its callbacks - # must be _wakeup(). - - # If False, don't log a message if the task is destroyed whereas its + # * In state 1, one of the callbacks of __fut_waiter must be __wakeup(). + # * The transition from 1 to 2 happens when _fut_waiter becomes done(), + # as it schedules __wakeup() to be called (which calls __step() so + # we way that __step() is scheduled). + # * It transitions from 2 to 3 when __step() is executed, and it clears + # _fut_waiter to None. + + # If False, don't log a message if the task is destroyed while its # status is still pending _log_destroy_pending = True diff --git a/Lib/codecs.py b/Lib/codecs.py index 82f23983e719c2..9b35b6127dd01c 100644 --- a/Lib/codecs.py +++ b/Lib/codecs.py @@ -111,6 +111,9 @@ def __repr__(self): (self.__class__.__module__, self.__class__.__qualname__, self.name, id(self)) + def __getnewargs__(self): + return tuple(self) + class Codec: """ Defines the interface for stateless encoders/decoders. diff --git a/Lib/concurrent/futures/process.py b/Lib/concurrent/futures/process.py index 73bdcbe8693991..3990e6b1833d78 100644 --- a/Lib/concurrent/futures/process.py +++ b/Lib/concurrent/futures/process.py @@ -341,7 +341,14 @@ def run(self): # Main loop for the executor manager thread. while True: - self.add_call_item_to_queue() + # gh-109047: During Python finalization, self.call_queue.put() + # creation of a thread can fail with RuntimeError. + try: + self.add_call_item_to_queue() + except BaseException as exc: + cause = format_exception(exc) + self.terminate_broken(cause) + return result_item, is_broken, cause = self.wait_result_broken_or_wakeup() @@ -425,8 +432,8 @@ def wait_result_broken_or_wakeup(self): try: result_item = result_reader.recv() is_broken = False - except BaseException as e: - cause = format_exception(type(e), e, e.__traceback__) + except BaseException as exc: + cause = format_exception(exc) elif wakeup_reader in ready: is_broken = False @@ -463,7 +470,7 @@ def is_shutting_down(self): return (_global_shutdown or executor is None or executor._shutdown_thread) - def terminate_broken(self, cause): + def _terminate_broken(self, cause): # Terminate the executor because it is in a broken state. The cause # argument can be used to display more information on the error that # lead the executor into becoming broken. @@ -490,7 +497,7 @@ def terminate_broken(self, cause): for work_id, work_item in self.pending_work_items.items(): try: work_item.future.set_exception(bpe) - except _base.InvalidStateError as exc: + except _base.InvalidStateError: # set_exception() fails if the future is cancelled: ignore it. # Trying to check if the future is cancelled before calling # set_exception() would leave a race condition if the future is @@ -505,17 +512,14 @@ def terminate_broken(self, cause): for p in self.processes.values(): p.terminate() - # Prevent queue writing to a pipe which is no longer read. - # https://github.com/python/cpython/issues/94777 - self.call_queue._reader.close() - - # gh-107219: Close the connection writer which can unblock - # Queue._feed() if it was stuck in send_bytes(). - if sys.platform == 'win32': - self.call_queue._writer.close() + self.call_queue._terminate_broken() # clean up resources - self.join_executor_internals() + self._join_executor_internals(broken=True) + + def terminate_broken(self, cause): + with self.shutdown_lock: + self._terminate_broken(cause) def flag_executor_shutting_down(self): # Flag the executor as shutting down and cancel remaining tasks if @@ -558,15 +562,24 @@ def shutdown_workers(self): break def join_executor_internals(self): - self.shutdown_workers() + with self.shutdown_lock: + self._join_executor_internals() + + def _join_executor_internals(self, broken=False): + # If broken, call_queue was closed and so can no longer be used. + if not broken: + self.shutdown_workers() + # Release the queue's resources as soon as possible. self.call_queue.close() self.call_queue.join_thread() - with self.shutdown_lock: - self.thread_wakeup.close() + self.thread_wakeup.close() + # If .join() is not called on the created processes then # some ctx.Queue methods may deadlock on Mac OS X. for p in self.processes.values(): + if broken: + p.terminate() p.join() def get_n_children_alive(self): diff --git a/Lib/copy.py b/Lib/copy.py index 6d7bb9a111b5b4..a69bc4e78c20b3 100644 --- a/Lib/copy.py +++ b/Lib/copy.py @@ -121,13 +121,13 @@ def deepcopy(x, memo=None, _nil=[]): See the module's __doc__ string for more info. """ + d = id(x) if memo is None: memo = {} - - d = id(x) - y = memo.get(d, _nil) - if y is not _nil: - return y + else: + y = memo.get(d, _nil) + if y is not _nil: + return y cls = type(x) diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 2d228e563094c8..eb7e020d1edfc0 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -159,12 +159,9 @@ def addLevelName(level, levelName): This is used when converting levels to text during message formatting. """ - _acquireLock() - try: #unlikely to cause an exception, but you never know... + with _lock: _levelToName[level] = levelName _nameToLevel[levelName] = level - finally: - _releaseLock() if hasattr(sys, "_getframe"): currentframe = lambda: sys._getframe(1) @@ -231,25 +228,27 @@ def _checkLevel(level): # _lock = threading.RLock() -def _acquireLock(): +def _prepareFork(): """ - Acquire the module-level lock for serializing access to shared data. + Prepare to fork a new child process by acquiring the module-level lock. - This should be released with _releaseLock(). + This should be used in conjunction with _afterFork(). """ - if _lock: - try: - _lock.acquire() - except BaseException: - _lock.release() - raise + # Wrap the lock acquisition in a try-except to prevent the lock from being + # abandoned in the event of an asynchronous exception. See gh-106238. + try: + _lock.acquire() + except BaseException: + _lock.release() + raise -def _releaseLock(): +def _afterFork(): """ - Release the module-level lock acquired by calling _acquireLock(). + After a new child process has been forked, release the module-level lock. + + This should be used in conjunction with _prepareFork(). """ - if _lock: - _lock.release() + _lock.release() # Prevent a held logging lock from blocking a child from logging. @@ -264,23 +263,20 @@ def _register_at_fork_reinit_lock(instance): _at_fork_reinit_lock_weakset = weakref.WeakSet() def _register_at_fork_reinit_lock(instance): - _acquireLock() - try: + with _lock: _at_fork_reinit_lock_weakset.add(instance) - finally: - _releaseLock() def _after_at_fork_child_reinit_locks(): for handler in _at_fork_reinit_lock_weakset: handler._at_fork_reinit() - # _acquireLock() was called in the parent before forking. + # _prepareFork() was called in the parent before forking. # The lock is reinitialized to unlocked state. _lock._at_fork_reinit() - os.register_at_fork(before=_acquireLock, + os.register_at_fork(before=_prepareFork, after_in_child=_after_at_fork_child_reinit_locks, - after_in_parent=_releaseLock) + after_in_parent=_afterFork) #--------------------------------------------------------------------------- @@ -883,25 +879,20 @@ def _removeHandlerRef(wr): # set to None. It can also be called from another thread. So we need to # pre-emptively grab the necessary globals and check if they're None, # to prevent race conditions and failures during interpreter shutdown. - acquire, release, handlers = _acquireLock, _releaseLock, _handlerList - if acquire and release and handlers: - acquire() - try: - handlers.remove(wr) - except ValueError: - pass - finally: - release() + handlers, lock = _handlerList, _lock + if lock and handlers: + with lock: + try: + handlers.remove(wr) + except ValueError: + pass def _addHandlerRef(handler): """ Add a handler to the internal cleanup list using a weak reference. """ - _acquireLock() - try: + with _lock: _handlerList.append(weakref.ref(handler, _removeHandlerRef)) - finally: - _releaseLock() def getHandlerByName(name): @@ -946,15 +937,12 @@ def get_name(self): return self._name def set_name(self, name): - _acquireLock() - try: + with _lock: if self._name in _handlers: del _handlers[self._name] self._name = name if name: _handlers[name] = self - finally: - _releaseLock() name = property(get_name, set_name) @@ -1026,11 +1014,8 @@ def handle(self, record): if isinstance(rv, LogRecord): record = rv if rv: - self.acquire() - try: + with self.lock: self.emit(record) - finally: - self.release() return rv def setFormatter(self, fmt): @@ -1058,13 +1043,10 @@ def close(self): methods. """ #get the module data lock, as we're updating a shared structure. - _acquireLock() - try: #unlikely to raise an exception, but you never know... + with _lock: self._closed = True if self._name and self._name in _handlers: del _handlers[self._name] - finally: - _releaseLock() def handleError(self, record): """ @@ -1141,12 +1123,9 @@ def flush(self): """ Flushes the stream. """ - self.acquire() - try: + with self.lock: if self.stream and hasattr(self.stream, "flush"): self.stream.flush() - finally: - self.release() def emit(self, record): """ @@ -1182,12 +1161,9 @@ def setStream(self, stream): result = None else: result = self.stream - self.acquire() - try: + with self.lock: self.flush() self.stream = stream - finally: - self.release() return result def __repr__(self): @@ -1237,8 +1213,7 @@ def close(self): """ Closes the stream. """ - self.acquire() - try: + with self.lock: try: if self.stream: try: @@ -1254,8 +1229,6 @@ def close(self): # Also see Issue #42378: we also rely on # self._closed being set to True there StreamHandler.close(self) - finally: - self.release() def _open(self): """ @@ -1391,8 +1364,7 @@ def getLogger(self, name): rv = None if not isinstance(name, str): raise TypeError('A logger name must be a string') - _acquireLock() - try: + with _lock: if name in self.loggerDict: rv = self.loggerDict[name] if isinstance(rv, PlaceHolder): @@ -1407,8 +1379,6 @@ def getLogger(self, name): rv.manager = self self.loggerDict[name] = rv self._fixupParents(rv) - finally: - _releaseLock() return rv def setLoggerClass(self, klass): @@ -1471,12 +1441,11 @@ def _clear_cache(self): Called when level changes are made """ - _acquireLock() - for logger in self.loggerDict.values(): - if isinstance(logger, Logger): - logger._cache.clear() - self.root._cache.clear() - _releaseLock() + with _lock: + for logger in self.loggerDict.values(): + if isinstance(logger, Logger): + logger._cache.clear() + self.root._cache.clear() #--------------------------------------------------------------------------- # Logger classes and functions @@ -1701,23 +1670,17 @@ def addHandler(self, hdlr): """ Add the specified handler to this logger. """ - _acquireLock() - try: + with _lock: if not (hdlr in self.handlers): self.handlers.append(hdlr) - finally: - _releaseLock() def removeHandler(self, hdlr): """ Remove the specified handler from this logger. """ - _acquireLock() - try: + with _lock: if hdlr in self.handlers: self.handlers.remove(hdlr) - finally: - _releaseLock() def hasHandlers(self): """ @@ -1795,16 +1758,13 @@ def isEnabledFor(self, level): try: return self._cache[level] except KeyError: - _acquireLock() - try: + with _lock: if self.manager.disable >= level: is_enabled = self._cache[level] = False else: is_enabled = self._cache[level] = ( level >= self.getEffectiveLevel() ) - finally: - _releaseLock() return is_enabled def getChild(self, suffix): @@ -1834,16 +1794,13 @@ def _hierlevel(logger): return 1 + logger.name.count('.') d = self.manager.loggerDict - _acquireLock() - try: + with _lock: # exclude PlaceHolders - the last check is to ensure that lower-level # descendants aren't returned - if there are placeholders, a logger's # parent field might point to a grandparent or ancestor thereof. return set(item for item in d.values() if isinstance(item, Logger) and item.parent is self and _hierlevel(item) == 1 + _hierlevel(item.parent)) - finally: - _releaseLock() def __repr__(self): level = getLevelName(self.getEffectiveLevel()) @@ -2102,8 +2059,7 @@ def basicConfig(**kwargs): """ # Add thread safety in case someone mistakenly calls # basicConfig() from multiple threads - _acquireLock() - try: + with _lock: force = kwargs.pop('force', False) encoding = kwargs.pop('encoding', None) errors = kwargs.pop('errors', 'backslashreplace') @@ -2152,8 +2108,6 @@ def basicConfig(**kwargs): if kwargs: keys = ', '.join(kwargs.keys()) raise ValueError('Unrecognised argument(s): %s' % keys) - finally: - _releaseLock() #--------------------------------------------------------------------------- # Utility functions at module level. diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 41283f4d627267..951bba73913cb3 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -83,15 +83,12 @@ def fileConfig(fname, defaults=None, disable_existing_loggers=True, encoding=Non formatters = _create_formatters(cp) # critical section - logging._acquireLock() - try: + with logging._lock: _clearExistingHandlers() # Handlers add themselves to logging._handlers handlers = _install_handlers(cp, formatters) _install_loggers(cp, handlers, disable_existing_loggers) - finally: - logging._releaseLock() def _resolve(name): @@ -516,8 +513,7 @@ def configure(self): raise ValueError("Unsupported version: %s" % config['version']) incremental = config.pop('incremental', False) EMPTY_DICT = {} - logging._acquireLock() - try: + with logging._lock: if incremental: handlers = config.get('handlers', EMPTY_DICT) for name in handlers: @@ -661,8 +657,6 @@ def configure(self): except Exception as e: raise ValueError('Unable to configure root ' 'logger') from e - finally: - logging._releaseLock() def configure_formatter(self, config): """Configure a formatter from a dictionary.""" @@ -988,9 +982,8 @@ class ConfigSocketReceiver(ThreadingTCPServer): def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, handler=None, ready=None, verify=None): ThreadingTCPServer.__init__(self, (host, port), handler) - logging._acquireLock() - self.abort = 0 - logging._releaseLock() + with logging._lock: + self.abort = 0 self.timeout = 1 self.ready = ready self.verify = verify @@ -1004,9 +997,8 @@ def serve_until_stopped(self): self.timeout) if rd: self.handle_request() - logging._acquireLock() - abort = self.abort - logging._releaseLock() + with logging._lock: + abort = self.abort self.server_close() class Server(threading.Thread): @@ -1027,9 +1019,8 @@ def run(self): self.port = server.server_address[1] self.ready.set() global _listener - logging._acquireLock() - _listener = server - logging._releaseLock() + with logging._lock: + _listener = server server.serve_until_stopped() return Server(ConfigSocketReceiver, ConfigStreamHandler, port, verify) @@ -1039,10 +1030,7 @@ def stopListening(): Stop the listening server which was created with a call to listen(). """ global _listener - logging._acquireLock() - try: + with logging._lock: if _listener: _listener.abort = 1 _listener = None - finally: - logging._releaseLock() diff --git a/Lib/logging/handlers.py b/Lib/logging/handlers.py index 671cc9596b02dd..e75da9b7b1de64 100644 --- a/Lib/logging/handlers.py +++ b/Lib/logging/handlers.py @@ -683,15 +683,12 @@ def close(self): """ Closes the socket. """ - self.acquire() - try: + with self.lock: sock = self.sock if sock: self.sock = None sock.close() logging.Handler.close(self) - finally: - self.release() class DatagramHandler(SocketHandler): """ @@ -953,15 +950,12 @@ def close(self): """ Closes the socket. """ - self.acquire() - try: + with self.lock: sock = self.socket if sock: self.socket = None sock.close() logging.Handler.close(self) - finally: - self.release() def mapPriority(self, levelName): """ @@ -1333,11 +1327,8 @@ def flush(self): This version just zaps the buffer to empty. """ - self.acquire() - try: + with self.lock: self.buffer.clear() - finally: - self.release() def close(self): """ @@ -1387,11 +1378,8 @@ def setTarget(self, target): """ Set the target handler for this handler. """ - self.acquire() - try: + with self.lock: self.target = target - finally: - self.release() def flush(self): """ @@ -1401,14 +1389,11 @@ def flush(self): The record buffer is only cleared if a target has been set. """ - self.acquire() - try: + with self.lock: if self.target: for record in self.buffer: self.target.handle(record) self.buffer.clear() - finally: - self.release() def close(self): """ @@ -1419,12 +1404,9 @@ def close(self): if self.flushOnClose: self.flush() finally: - self.acquire() - try: + with self.lock: self.target = None BufferingHandler.close(self) - finally: - self.release() class QueueHandler(logging.Handler): diff --git a/Lib/multiprocessing/popen_spawn_win32.py b/Lib/multiprocessing/popen_spawn_win32.py index 4d60ffc030bea6..af044305709e56 100644 --- a/Lib/multiprocessing/popen_spawn_win32.py +++ b/Lib/multiprocessing/popen_spawn_win32.py @@ -14,6 +14,7 @@ # # +# Exit code used by Popen.terminate() TERMINATE = 0x10000 WINEXE = (sys.platform == 'win32' and getattr(sys, 'frozen', False)) WINSERVICE = sys.executable.lower().endswith("pythonservice.exe") @@ -122,9 +123,15 @@ def terminate(self): if self.returncode is None: try: _winapi.TerminateProcess(int(self._handle), TERMINATE) - except OSError: - if self.wait(timeout=1.0) is None: + except PermissionError: + # ERROR_ACCESS_DENIED (winerror 5) is received when the + # process already died. + code = _winapi.GetExitCodeProcess(int(self._handle)) + if code == _winapi.STILL_ACTIVE: raise + self.returncode = code + else: + self.returncode = -signal.SIGTERM kill = terminate diff --git a/Lib/multiprocessing/queues.py b/Lib/multiprocessing/queues.py index daf9ee94a19431..852ae87b276861 100644 --- a/Lib/multiprocessing/queues.py +++ b/Lib/multiprocessing/queues.py @@ -158,6 +158,20 @@ def cancel_join_thread(self): except AttributeError: pass + def _terminate_broken(self): + # Close a Queue on error. + + # gh-94777: Prevent queue writing to a pipe which is no longer read. + self._reader.close() + + # gh-107219: Close the connection writer which can unblock + # Queue._feed() if it was stuck in send_bytes(). + if sys.platform == 'win32': + self._writer.close() + + self.close() + self.join_thread() + def _start_thread(self): debug('Queue._start_thread()') @@ -169,13 +183,19 @@ def _start_thread(self): self._wlock, self._reader.close, self._writer.close, self._ignore_epipe, self._on_queue_feeder_error, self._sem), - name='QueueFeederThread' + name='QueueFeederThread', + daemon=True, ) - self._thread.daemon = True - debug('doing self._thread.start()') - self._thread.start() - debug('... done self._thread.start()') + try: + debug('doing self._thread.start()') + self._thread.start() + debug('... done self._thread.start()') + except: + # gh-109047: During Python finalization, creating a thread + # can fail with RuntimeError. + self._thread = None + raise if not self._joincancelled: self._jointhread = Finalize( diff --git a/Lib/multiprocessing/util.py b/Lib/multiprocessing/util.py index 6ee0d33e88a060..28c77df1c32ea8 100644 --- a/Lib/multiprocessing/util.py +++ b/Lib/multiprocessing/util.py @@ -64,8 +64,7 @@ def get_logger(): global _logger import logging - logging._acquireLock() - try: + with logging._lock: if not _logger: _logger = logging.getLogger(LOGGER_NAME) @@ -79,9 +78,6 @@ def get_logger(): atexit._exithandlers.remove((_exit_function, (), {})) atexit._exithandlers.append((_exit_function, (), {})) - finally: - logging._releaseLock() - return _logger def log_to_stderr(level=None): diff --git a/Lib/pathlib.py b/Lib/pathlib.py index f7750764a70334..cad4ca8d15eeec 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -5,6 +5,7 @@ operating systems. """ +import contextlib import functools import glob import io @@ -15,10 +16,19 @@ import sys import warnings from _collections_abc import Sequence -from errno import ENOENT, ENOTDIR, EBADF, ELOOP +from errno import ENOENT, ENOTDIR, EBADF, ELOOP, EINVAL from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO from urllib.parse import quote_from_bytes as urlquote_from_bytes +try: + import pwd +except ImportError: + pwd = None +try: + import grp +except ImportError: + grp = None + __all__ = [ "UnsupportedOperation", @@ -30,6 +40,9 @@ # Internals # +# Maximum number of symlinks to follow in _PathBase.resolve() +_MAX_SYMLINKS = 40 + # Reference for Windows paths can be found at # https://learn.microsoft.com/en-gb/windows/win32/fileio/naming-a-file . _WIN_RESERVED_NAMES = frozenset( @@ -223,6 +236,11 @@ class PurePath: # The `_hash` slot stores the hash of the case-normalized string # path. It's set when `__hash__()` is called for the first time. '_hash', + + # The '_resolving' slot stores a boolean indicating whether the path + # is being processed by `_PathBase.resolve()`. This prevents duplicate + # work from occurring when `resolve()` calls `stat()` or `readlink()`. + '_resolving', ) pathmod = os.path @@ -262,6 +280,7 @@ def __init__(self, *args): f"not {type(path).__name__!r}") paths.append(path) self._raw_paths = paths + self._resolving = False def with_segments(self, *pathsegments): """Construct a new path object from any number of path-like objects. @@ -347,7 +366,7 @@ def __repr__(self): return "{}({!r})".format(self.__class__.__name__, self.as_posix()) def as_uri(self): - """Return the path as a 'file' URI.""" + """Return the path as a URI.""" if not self.is_absolute(): raise ValueError("relative path can't be expressed as a file URI") @@ -608,7 +627,9 @@ def parent(self): tail = self._tail if not tail: return self - return self._from_parsed_parts(drv, root, tail[:-1]) + path = self._from_parsed_parts(drv, root, tail[:-1]) + path._resolving = self._resolving + return path @property def parents(self): @@ -696,23 +717,35 @@ class PureWindowsPath(PurePath): # Filesystem-accessing classes -class Path(PurePath): - """PurePath subclass that can make system calls. +class _PathBase(PurePath): + """Base class for concrete path objects. - Path represents a filesystem path but unlike PurePath, also offers - methods to do system calls on path objects. Depending on your system, - instantiating a Path will return either a PosixPath or a WindowsPath - object. You can also instantiate a PosixPath or WindowsPath directly, - but cannot instantiate a WindowsPath on a POSIX system or vice versa. + This class provides dummy implementations for many methods that derived + classes can override selectively; the default implementations raise + UnsupportedOperation. The most basic methods, such as stat() and open(), + directly raise UnsupportedOperation; these basic methods are called by + other methods such as is_dir() and read_text(). + + The Path class derives this class to implement local filesystem paths. + Users may derive their own classes to implement virtual filesystem paths, + such as paths in archive files or on remote storage systems. """ __slots__ = () + __bytes__ = None + __fspath__ = None # virtual paths have no local file system representation + + def _unsupported(self, method_name): + msg = f"{type(self).__name__}.{method_name}() is unsupported" + if isinstance(self, Path): + msg += " on this system" + raise UnsupportedOperation(msg) def stat(self, *, follow_symlinks=True): """ Return the result of the stat() system call on this path, like os.stat() does. """ - return os.stat(self, follow_symlinks=follow_symlinks) + self._unsupported("stat") def lstat(self): """ @@ -779,7 +812,21 @@ def is_mount(self): """ Check if this path is a mount point """ - return os.path.ismount(self) + # Need to exist and be a dir + if not self.exists() or not self.is_dir(): + return False + + try: + parent_dev = self.parent.stat().st_dev + except OSError: + return False + + dev = self.stat().st_dev + if dev != parent_dev: + return True + ino = self.stat().st_ino + parent_ino = self.parent.stat().st_ino + return ino == parent_ino def is_symlink(self): """ @@ -800,7 +847,10 @@ def is_junction(self): """ Whether this path is a junction. """ - return os.path.isjunction(self) + # Junctions are a Windows-only feature, not present in POSIX nor the + # majority of virtual filesystems. There is no cross-platform idiom + # to check for junctions (using stat().st_mode). + return False def is_block_device(self): """ @@ -884,9 +934,7 @@ def open(self, mode='r', buffering=-1, encoding=None, Open the file pointed by this path and return a file object, as the built-in open() function does. """ - if "b" not in mode: - encoding = io.text_encoding(encoding) - return io.open(self, mode, buffering, encoding, errors, newline) + self._unsupported("open") def read_bytes(self): """ @@ -929,13 +977,12 @@ def iterdir(self): The children are yielded in arbitrary order, and the special entries '.' and '..' are not included. """ - return (self._make_child_relpath(name) for name in os.listdir(self)) + self._unsupported("iterdir") def _scandir(self): - # bpo-24132: a future version of pathlib will support subclassing of - # pathlib.Path to customize how the filesystem is accessed. This - # includes scandir(), which is used to implement glob(). - return os.scandir(self) + # Emulate os.scandir(), which returns an object that can be used as a + # context manager. This method is called by walk() and glob(). + return contextlib.nullcontext(self.iterdir()) def _make_child_relpath(self, name): path_str = str(self) @@ -1058,13 +1105,13 @@ def walk(self, top_down=True, on_error=None, follow_symlinks=False): # blow up for a minor reason when (say) a thousand readable # directories are still left to visit. That logic is copied here. try: - scandir_it = path._scandir() + scandir_obj = path._scandir() except OSError as error: if on_error is not None: on_error(error) continue - with scandir_it: + with scandir_obj as scandir_it: dirnames = [] filenames = [] for entry in scandir_it: @@ -1086,17 +1133,13 @@ def walk(self, top_down=True, on_error=None, follow_symlinks=False): paths += [path._make_child_relpath(d) for d in reversed(dirnames)] - def __init__(self, *args, **kwargs): - if kwargs: - msg = ("support for supplying keyword arguments to pathlib.PurePath " - "is deprecated and scheduled for removal in Python {remove}") - warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) - super().__init__(*args) + def absolute(self): + """Return an absolute version of this path + No normalization or symlink resolution is performed. - def __new__(cls, *args, **kwargs): - if cls is Path: - cls = WindowsPath if os.name == 'nt' else PosixPath - return object.__new__(cls) + Use resolve() to resolve symlinks and remove '..' segments. + """ + self._unsupported("absolute") @classmethod def cwd(cls): @@ -1107,18 +1150,264 @@ def cwd(cls): # os.path.abspath('.') == os.getcwd(). return cls().absolute() + def expanduser(self): + """ Return a new path with expanded ~ and ~user constructs + (as returned by os.path.expanduser) + """ + self._unsupported("expanduser") + @classmethod def home(cls): - """Return a new path pointing to the user's home directory (as - returned by os.path.expanduser('~')). + """Return a new path pointing to expanduser('~'). """ return cls("~").expanduser() + def readlink(self): + """ + Return the path to which the symbolic link points. + """ + self._unsupported("readlink") + readlink._supported = False + + def _split_stack(self): + """ + Split the path into a 2-tuple (anchor, parts), where *anchor* is the + uppermost parent of the path (equivalent to path.parents[-1]), and + *parts* is a reversed list of parts following the anchor. + """ + return self._from_parsed_parts(self.drive, self.root, []), self._tail[::-1] + + def resolve(self, strict=False): + """ + Make the path absolute, resolving all symlinks on the way and also + normalizing it. + """ + if self._resolving: + return self + try: + path = self.absolute() + except UnsupportedOperation: + path = self + + # If the user has *not* overridden the `readlink()` method, then symlinks are unsupported + # and (in non-strict mode) we can improve performance by not calling `stat()`. + querying = strict or getattr(self.readlink, '_supported', True) + link_count = 0 + stat_cache = {} + target_cache = {} + path, parts = path._split_stack() + while parts: + part = parts.pop() + if part == '..': + if not path._tail: + if path.root: + # Delete '..' segment immediately following root + continue + elif path._tail[-1] != '..': + # Delete '..' segment and its predecessor + path = path.parent + continue + # Join the current part onto the path. + path_parent = path + path = path._make_child_relpath(part) + if querying and part != '..': + path._resolving = True + try: + st = stat_cache.get(path) + if st is None: + st = stat_cache[path] = path.stat(follow_symlinks=False) + if S_ISLNK(st.st_mode): + # Like Linux and macOS, raise OSError(errno.ELOOP) if too many symlinks are + # encountered during resolution. + link_count += 1 + if link_count >= _MAX_SYMLINKS: + raise OSError(ELOOP, "Too many symbolic links in path", str(path)) + target = target_cache.get(path) + if target is None: + target = target_cache[path] = path.readlink() + target, target_parts = target._split_stack() + # If the symlink target is absolute (like '/etc/hosts'), set the current + # path to its uppermost parent (like '/'). If not, the symlink target is + # relative to the symlink parent, which we recorded earlier. + path = target if target.root else path_parent + # Add the symlink target's reversed tail parts (like ['hosts', 'etc']) to + # the stack of unresolved path parts. + parts.extend(target_parts) + elif parts and not S_ISDIR(st.st_mode): + raise NotADirectoryError(ENOTDIR, "Not a directory", str(path)) + except OSError: + if strict: + raise + else: + querying = False + path._resolving = False + return path + + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the target path. + Note the order of arguments (link, target) is the reverse of os.symlink. + """ + self._unsupported("symlink_to") + + def hardlink_to(self, target): + """ + Make this path a hard link pointing to the same file as *target*. + + Note the order of arguments (self, target) is the reverse of os.link's. + """ + self._unsupported("hardlink_to") + + def touch(self, mode=0o666, exist_ok=True): + """ + Create this file with the given access mode, if it doesn't exist. + """ + self._unsupported("touch") + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + """ + Create a new directory at this given path. + """ + self._unsupported("mkdir") + + def rename(self, target): + """ + Rename this path to the target path. + + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ + self._unsupported("rename") + + def replace(self, target): + """ + Rename this path to the target path, overwriting if that path exists. + + The target path may be absolute or relative. Relative paths are + interpreted relative to the current working directory, *not* the + directory of the Path object. + + Returns the new Path instance pointing to the target path. + """ + self._unsupported("replace") + + def chmod(self, mode, *, follow_symlinks=True): + """ + Change the permissions of the path, like os.chmod(). + """ + self._unsupported("chmod") + + def lchmod(self, mode): + """ + Like chmod(), except if the path points to a symlink, the symlink's + permissions are changed, rather than its target's. + """ + self.chmod(mode, follow_symlinks=False) + + def unlink(self, missing_ok=False): + """ + Remove this file or link. + If the path is a directory, use rmdir() instead. + """ + self._unsupported("unlink") + + def rmdir(self): + """ + Remove this directory. The directory must be empty. + """ + self._unsupported("rmdir") + + def owner(self): + """ + Return the login name of the file owner. + """ + self._unsupported("owner") + + def group(self): + """ + Return the group name of the file gid. + """ + self._unsupported("group") + + def as_uri(self): + """Return the path as a URI.""" + self._unsupported("as_uri") + + +class Path(_PathBase): + """PurePath subclass that can make system calls. + + Path represents a filesystem path but unlike PurePath, also offers + methods to do system calls on path objects. Depending on your system, + instantiating a Path will return either a PosixPath or a WindowsPath + object. You can also instantiate a PosixPath or WindowsPath directly, + but cannot instantiate a WindowsPath on a POSIX system or vice versa. + """ + __slots__ = () + __bytes__ = PurePath.__bytes__ + __fspath__ = PurePath.__fspath__ + as_uri = PurePath.as_uri + + def __init__(self, *args, **kwargs): + if kwargs: + msg = ("support for supplying keyword arguments to pathlib.PurePath " + "is deprecated and scheduled for removal in Python {remove}") + warnings._deprecated("pathlib.PurePath(**kwargs)", msg, remove=(3, 14)) + super().__init__(*args) + + def __new__(cls, *args, **kwargs): + if cls is Path: + cls = WindowsPath if os.name == 'nt' else PosixPath + return object.__new__(cls) + + def stat(self, *, follow_symlinks=True): + """ + Return the result of the stat() system call on this path, like + os.stat() does. + """ + return os.stat(self, follow_symlinks=follow_symlinks) + + def is_mount(self): + """ + Check if this path is a mount point + """ + return os.path.ismount(self) + + def is_junction(self): + """ + Whether this path is a junction. + """ + return os.path.isjunction(self) + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + """ + Open the file pointed by this path and return a file object, as + the built-in open() function does. + """ + if "b" not in mode: + encoding = io.text_encoding(encoding) + return io.open(self, mode, buffering, encoding, errors, newline) + + def iterdir(self): + """Yield path objects of the directory contents. + + The children are yielded in arbitrary order, and the + special entries '.' and '..' are not included. + """ + return (self._make_child_relpath(name) for name in os.listdir(self)) + + def _scandir(self): + return os.scandir(self) + def absolute(self): - """Return an absolute version of this path by prepending the current - working directory. No normalization or symlink resolution is performed. + """Return an absolute version of this path + No normalization or symlink resolution is performed. - Use resolve() to get the canonical path to a file. + Use resolve() to resolve symlinks and remove '..' segments. """ if self.is_absolute(): return self @@ -1146,34 +1435,26 @@ def resolve(self, strict=False): return self.with_segments(os.path.realpath(self, strict=strict)) - def owner(self): - """ - Return the login name of the file owner. - """ - try: - import pwd + if pwd: + def owner(self): + """ + Return the login name of the file owner. + """ return pwd.getpwuid(self.stat().st_uid).pw_name - except ImportError: - raise UnsupportedOperation("Path.owner() is unsupported on this system") - - def group(self): - """ - Return the group name of the file gid. - """ - try: - import grp + if grp: + def group(self): + """ + Return the group name of the file gid. + """ return grp.getgrgid(self.stat().st_gid).gr_name - except ImportError: - raise UnsupportedOperation("Path.group() is unsupported on this system") - def readlink(self): - """ - Return the path to which the symbolic link points. - """ - if not hasattr(os, "readlink"): - raise UnsupportedOperation("os.readlink() not available on this system") - return self.with_segments(os.readlink(self)) + if hasattr(os, "readlink"): + def readlink(self): + """ + Return the path to which the symbolic link points. + """ + return self.with_segments(os.readlink(self)) def touch(self, mode=0o666, exist_ok=True): """ @@ -1220,13 +1501,6 @@ def chmod(self, mode, *, follow_symlinks=True): """ os.chmod(self, mode, follow_symlinks=follow_symlinks) - def lchmod(self, mode): - """ - Like chmod(), except if the path points to a symlink, the symlink's - permissions are changed, rather than its target's. - """ - self.chmod(mode, follow_symlinks=False) - def unlink(self, missing_ok=False): """ Remove this file or link. @@ -1270,24 +1544,22 @@ def replace(self, target): os.replace(self, target) return self.with_segments(target) - def symlink_to(self, target, target_is_directory=False): - """ - Make this path a symlink pointing to the target path. - Note the order of arguments (link, target) is the reverse of os.symlink. - """ - if not hasattr(os, "symlink"): - raise UnsupportedOperation("os.symlink() not available on this system") - os.symlink(target, self, target_is_directory) - - def hardlink_to(self, target): - """ - Make this path a hard link pointing to the same file as *target*. - - Note the order of arguments (self, target) is the reverse of os.link's. - """ - if not hasattr(os, "link"): - raise UnsupportedOperation("os.link() not available on this system") - os.link(target, self) + if hasattr(os, "symlink"): + def symlink_to(self, target, target_is_directory=False): + """ + Make this path a symlink pointing to the target path. + Note the order of arguments (link, target) is the reverse of os.symlink. + """ + os.symlink(target, self, target_is_directory) + + if hasattr(os, "link"): + def hardlink_to(self, target): + """ + Make this path a hard link pointing to the same file as *target*. + + Note the order of arguments (self, target) is the reverse of os.link's. + """ + os.link(target, self) def expanduser(self): """ Return a new path with expanded ~ and ~user constructs diff --git a/Lib/reprlib.py b/Lib/reprlib.py index 840dd0e20132b1..05bb1a0eb01795 100644 --- a/Lib/reprlib.py +++ b/Lib/reprlib.py @@ -29,6 +29,7 @@ def wrapper(self): wrapper.__name__ = getattr(user_function, '__name__') wrapper.__qualname__ = getattr(user_function, '__qualname__') wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + wrapper.__type_params__ = getattr(user_function, '__type_params__', ()) wrapper.__wrapped__ = user_function return wrapper diff --git a/Lib/selectors.py b/Lib/selectors.py index 20367c9152f331..b8e5f6a4f77d89 100644 --- a/Lib/selectors.py +++ b/Lib/selectors.py @@ -491,6 +491,7 @@ class KqueueSelector(_BaseSelectorImpl): def __init__(self): super().__init__() self._selector = select.kqueue() + self._max_events = 0 def fileno(self): return self._selector.fileno() @@ -502,10 +503,12 @@ def register(self, fileobj, events, data=None): kev = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_ADD) self._selector.control([kev], 0, 0) + self._max_events += 1 if events & EVENT_WRITE: kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_ADD) self._selector.control([kev], 0, 0) + self._max_events += 1 except: super().unregister(fileobj) raise @@ -516,6 +519,7 @@ def unregister(self, fileobj): if key.events & EVENT_READ: kev = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_DELETE) + self._max_events -= 1 try: self._selector.control([kev], 0, 0) except OSError: @@ -525,6 +529,7 @@ def unregister(self, fileobj): if key.events & EVENT_WRITE: kev = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) + self._max_events -= 1 try: self._selector.control([kev], 0, 0) except OSError: @@ -537,7 +542,7 @@ def select(self, timeout=None): # If max_ev is 0, kqueue will ignore the timeout. For consistent # behavior with the other selector classes, we prevent that here # (using max). See https://bugs.python.org/issue29255 - max_ev = len(self._fd_to_key) or 1 + max_ev = self._max_events or 1 ready = [] try: kev_list = self._selector.control(None, max_ev, timeout) diff --git a/Lib/symtable.py b/Lib/symtable.py index 5dd71ffc6b4f19..4b0bc6f497a553 100644 --- a/Lib/symtable.py +++ b/Lib/symtable.py @@ -62,8 +62,8 @@ def __repr__(self): def get_type(self): """Return the type of the symbol table. - The values returned are 'class', 'module' and - 'function'. + The values returned are 'class', 'module', 'function', + 'annotation', 'TypeVar bound', 'type alias', and 'type parameter'. """ if self._table.type == _symtable.TYPE_MODULE: return "module" @@ -71,8 +71,15 @@ def get_type(self): return "function" if self._table.type == _symtable.TYPE_CLASS: return "class" - assert self._table.type in (1, 2, 3), \ - "unexpected type: {0}".format(self._table.type) + if self._table.type == _symtable.TYPE_ANNOTATION: + return "annotation" + if self._table.type == _symtable.TYPE_TYPE_VAR_BOUND: + return "TypeVar bound" + if self._table.type == _symtable.TYPE_TYPE_ALIAS: + return "type alias" + if self._table.type == _symtable.TYPE_TYPE_PARAM: + return "type parameter" + assert False, f"unexpected type: {self._table.type}" def get_id(self): """Return an identifier for the table. diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 006581f7cc6a9a..15586f15dfab30 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -25,6 +25,12 @@ from test.support import os_helper from test.support import socket_helper + +# gh-109592: Tolerate a difference of 20 ms when comparing timings +# (clock resolution) +CLOCK_RES = 0.020 + + @contextlib.contextmanager def kill_on_error(proc): """Context manager killing the subprocess if a Python exception is raised.""" @@ -75,6 +81,9 @@ def subprocess(self, *args, **kw): cmd_args = (sys.executable, '-c') + args return subprocess.Popen(cmd_args, **kw) + def check_elapsed_time(self, elapsed): + self.assertGreaterEqual(elapsed, self.sleep_time - CLOCK_RES) + @unittest.skipUnless(hasattr(signal, "setitimer"), "requires setitimer()") class OSEINTRTest(EINTRBaseTest): @@ -373,7 +382,7 @@ def test_sleep(self): time.sleep(self.sleep_time) self.stop_alarm() dt = time.monotonic() - t0 - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) @unittest.skipUnless(hasattr(signal, "setitimer"), "requires setitimer()") @@ -435,7 +444,7 @@ def test_select(self): select.select([], [], [], self.sleep_time) dt = time.monotonic() - t0 self.stop_alarm() - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) @unittest.skipIf(sys.platform == "darwin", "poll may fail on macOS; see issue #28087") @@ -447,7 +456,7 @@ def test_poll(self): poller.poll(self.sleep_time * 1e3) dt = time.monotonic() - t0 self.stop_alarm() - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) @unittest.skipUnless(hasattr(select, 'epoll'), 'need select.epoll') def test_epoll(self): @@ -458,7 +467,7 @@ def test_epoll(self): poller.poll(self.sleep_time) dt = time.monotonic() - t0 self.stop_alarm() - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) @unittest.skipUnless(hasattr(select, 'kqueue'), 'need select.kqueue') def test_kqueue(self): @@ -469,7 +478,7 @@ def test_kqueue(self): kqueue.control(None, 1, self.sleep_time) dt = time.monotonic() - t0 self.stop_alarm() - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) @unittest.skipUnless(hasattr(select, 'devpoll'), 'need select.devpoll') def test_devpoll(self): @@ -480,7 +489,7 @@ def test_devpoll(self): poller.poll(self.sleep_time * 1e3) dt = time.monotonic() - t0 self.stop_alarm() - self.assertGreaterEqual(dt, self.sleep_time) + self.check_elapsed_time(dt) class FNTLEINTRTest(EINTRBaseTest): @@ -512,8 +521,8 @@ def _lock(self, lock_func, lock_name): # potential context switch delay lock_func(f, fcntl.LOCK_EX) dt = time.monotonic() - start_time - self.assertGreaterEqual(dt, self.sleep_time) self.stop_alarm() + self.check_elapsed_time(dt) proc.wait() # Issue 35633: See https://bugs.python.org/issue35633#msg333662 diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 756d6808518fc4..39666dd331db0b 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -557,13 +557,14 @@ def handler(*args): def test_terminate(self): exitcode = self._kill_process(multiprocessing.Process.terminate) - if os.name != 'nt': - self.assertEqual(exitcode, -signal.SIGTERM) + self.assertEqual(exitcode, -signal.SIGTERM) def test_kill(self): exitcode = self._kill_process(multiprocessing.Process.kill) if os.name != 'nt': self.assertEqual(exitcode, -signal.SIGKILL) + else: + self.assertEqual(exitcode, -signal.SIGTERM) def test_cpu_count(self): try: diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index c180bb76222a89..8562a48446b4a7 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -4,8 +4,6 @@ import sys from test.support import os_helper -from .utils import MS_WINDOWS - USAGE = """\ python -m test [options] [test_name1 [test_name2 ...]] @@ -414,17 +412,16 @@ def _parse_args(args, **kwargs): # Similar to options: # # -j0 --randomize --fail-env-changed --fail-rerun --rerun - # --slowest --verbose3 --nowindows + # --slowest --verbose3 if ns.use_mp is None: ns.use_mp = 0 ns.randomize = True ns.fail_env_changed = True ns.fail_rerun = True - ns.rerun = True + if ns.python is None: + ns.rerun = True ns.print_slow = True ns.verbose3 = True - if MS_WINDOWS: - ns.nowindows = True # Silence alerts under Windows else: ns._add_python_opts = False diff --git a/Lib/test/libregrtest/findtests.py b/Lib/test/libregrtest/findtests.py index 60f21980c10dd0..96cc3e0d021184 100644 --- a/Lib/test/libregrtest/findtests.py +++ b/Lib/test/libregrtest/findtests.py @@ -19,6 +19,7 @@ "test_asyncio", "test_concurrent_futures", "test_future_stmt", + "test_gdb", "test_multiprocessing_fork", "test_multiprocessing_forkserver", "test_multiprocessing_spawn", diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index c31d5ff187c56a..dcb2c5870de176 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -11,18 +11,18 @@ from .cmdline import _parse_args, Namespace from .findtests import findtests, split_test_packages, list_cases from .logger import Logger +from .pgo import setup_pgo_tests from .result import State +from .results import TestResults, EXITCODE_INTERRUPTED from .runtests import RunTests, HuntRefleak from .setup import setup_process, setup_test_dir from .single import run_single_test, PROGRESS_MIN_TIME -from .pgo import setup_pgo_tests -from .results import TestResults from .utils import ( StrPath, StrJSON, TestName, TestList, TestTuple, FilterTuple, strip_py_suffix, count, format_duration, printlist, get_temp_dir, get_work_dir, exit_timeout, display_header, cleanup_temp_dir, print_warning, - MS_WINDOWS) + MS_WINDOWS, EXIT_TIMEOUT) class Regrtest: @@ -504,9 +504,12 @@ def _add_python_opts(self): if sys.flags.bytes_warning < 2: python_opts.append('-bb') - # Ignore PYTHON* environment variables - if not sys.flags.ignore_environment: - python_opts.append('-E') + # WASM/WASI buildbot builders pass multiple PYTHON environment + # variables such as PYTHONPATH and _PYTHON_HOSTRUNNER. + if not self.python_cmd: + # Ignore PYTHON* environment variables + if not sys.flags.ignore_environment: + python_opts.append('-E') if not python_opts: return @@ -522,10 +525,23 @@ def _add_python_opts(self): try: if hasattr(os, 'execv') and not MS_WINDOWS: os.execv(cmd[0], cmd) - # execv() do no return and so we don't get to this line on success + # On success, execv() do no return. + # On error, it raises an OSError. else: import subprocess - proc = subprocess.run(cmd) + with subprocess.Popen(cmd) as proc: + try: + proc.wait() + except KeyboardInterrupt: + # There is no need to call proc.terminate(): on CTRL+C, + # SIGTERM is also sent to the child process. + try: + proc.wait(timeout=EXIT_TIMEOUT) + except subprocess.TimeoutExpired: + proc.kill() + proc.wait() + sys.exit(EXITCODE_INTERRUPTED) + sys.exit(proc.returncode) except Exception as exc: print_warning(f"Failed to change Python options: {exc!r}\n" diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py index 89cc50b7c158d2..41ed7b0bac01ad 100644 --- a/Lib/test/libregrtest/run_workers.py +++ b/Lib/test/libregrtest/run_workers.py @@ -42,7 +42,10 @@ assert MAIN_PROCESS_TIMEOUT >= PROGRESS_UPDATE # Time to wait until a worker completes: should be immediate -JOIN_TIMEOUT = 30.0 # seconds +WAIT_COMPLETED_TIMEOUT = 30.0 # seconds + +# Time to wait a killed process (in seconds) +WAIT_KILLED_TIMEOUT = 60.0 # We do not use a generator so multiple threads can call next(). @@ -138,7 +141,7 @@ def _kill(self) -> None: if USE_PROCESS_GROUP: what = f"{self} process group" else: - what = f"{self}" + what = f"{self} process" print(f"Kill {what}", file=sys.stderr, flush=True) try: @@ -390,10 +393,10 @@ def _wait_completed(self) -> None: popen = self._popen try: - popen.wait(JOIN_TIMEOUT) + popen.wait(WAIT_COMPLETED_TIMEOUT) except (subprocess.TimeoutExpired, OSError) as exc: print_warning(f"Failed to wait for {self} completion " - f"(timeout={format_duration(JOIN_TIMEOUT)}): " + f"(timeout={format_duration(WAIT_COMPLETED_TIMEOUT)}): " f"{exc!r}") def wait_stopped(self, start_time: float) -> None: @@ -414,7 +417,7 @@ def wait_stopped(self, start_time: float) -> None: break dt = time.monotonic() - start_time self.log(f"Waiting for {self} thread for {format_duration(dt)}") - if dt > JOIN_TIMEOUT: + if dt > WAIT_KILLED_TIMEOUT: print_warning(f"Failed to join {self} in {format_duration(dt)}") break diff --git a/Lib/test/libregrtest/utils.py b/Lib/test/libregrtest/utils.py index acf35723a1abb0..46451152b8859f 100644 --- a/Lib/test/libregrtest/utils.py +++ b/Lib/test/libregrtest/utils.py @@ -355,11 +355,13 @@ def get_temp_dir(tmp_dir: StrPath | None = None) -> StrPath: if not support.is_wasi: tmp_dir = sysconfig.get_config_var('abs_builddir') if tmp_dir is None: - # bpo-30284: On Windows, only srcdir is available. Using - # abs_builddir mostly matters on UNIX when building Python - # out of the source tree, especially when the source tree - # is read only. - tmp_dir = sysconfig.get_config_var('srcdir') + tmp_dir = sysconfig.get_config_var('abs_srcdir') + if not tmp_dir: + # gh-74470: On Windows, only srcdir is available. Using + # abs_builddir mostly matters on UNIX when building + # Python out of the source tree, especially when the + # source tree is read only. + tmp_dir = sysconfig.get_config_var('srcdir') tmp_dir = os.path.join(tmp_dir, 'build') else: # WASI platform @@ -539,7 +541,7 @@ def display_header(use_resources: tuple[str, ...]): print(f"== resources ({len(use_resources)}): " f"{', '.join(sorted(use_resources))}") else: - print(f"== resources: (all disabled, use -u option)") + print("== resources: (all disabled, use -u option)") # This makes it easier to remember what to set in your local # environment when trying to reproduce a sanitizer failure. diff --git a/Lib/test/libregrtest/worker.py b/Lib/test/libregrtest/worker.py index 610e0a8437839d..a9c8be0bb65d08 100644 --- a/Lib/test/libregrtest/worker.py +++ b/Lib/test/libregrtest/worker.py @@ -22,11 +22,15 @@ def create_worker_process(runtests: RunTests, output_fd: int, python_cmd = runtests.python_cmd worker_json = runtests.as_json() + python_opts = support.args_from_interpreter_flags() if python_cmd is not None: executable = python_cmd + # Remove -E option, since --python=COMMAND can set PYTHON environment + # variables, such as PYTHONPATH, in the worker process. + python_opts = [opt for opt in python_opts if opt != "-E"] else: executable = (sys.executable,) - cmd = [*executable, *support.args_from_interpreter_flags(), + cmd = [*executable, *python_opts, '-u', # Unbuffered stdout and stderr '-m', 'test.libregrtest.worker', worker_json] @@ -37,14 +41,15 @@ def create_worker_process(runtests: RunTests, output_fd: int, env['TEMP'] = tmp_dir env['TMP'] = tmp_dir + # Running the child from the same working directory as regrtest's original + # invocation ensures that TEMPDIR for the child is the same when + # sysconfig.is_python_build() is true. See issue 15300. + # # Emscripten and WASI Python must start in the Python source code directory # to get 'python.js' or 'python.wasm' file. Then worker_process() changes # to a temporary directory created to run tests. work_dir = os_helper.SAVEDCWD - # Running the child from the same working directory as regrtest's original - # invocation ensures that TEMPDIR for the child is the same when - # sysconfig.is_python_build() is true. See issue 15300. kwargs: dict[str, Any] = dict( env=env, stdout=output_fd, @@ -54,6 +59,8 @@ def create_worker_process(runtests: RunTests, output_fd: int, close_fds=True, cwd=work_dir, ) + if USE_PROCESS_GROUP: + kwargs['start_new_session'] = True # Pass json_file to the worker process json_file = runtests.json_file diff --git a/Lib/test/lock_tests.py b/Lib/test/lock_tests.py index e53e24b18f2760..024c6debcd4a54 100644 --- a/Lib/test/lock_tests.py +++ b/Lib/test/lock_tests.py @@ -19,54 +19,74 @@ "(no _at_fork_reinit method)") -def _wait(): - # A crude wait/yield function not relying on synchronization primitives. - time.sleep(0.01) +def wait_threads_blocked(nthread): + # Arbitrary sleep to wait until N threads are blocked, + # like waiting for a lock. + time.sleep(0.010 * nthread) + class Bunch(object): """ A bunch of threads. """ - def __init__(self, f, n, wait_before_exit=False): + def __init__(self, func, nthread, wait_before_exit=False): """ - Construct a bunch of `n` threads running the same function `f`. + Construct a bunch of `nthread` threads running the same function `func`. If `wait_before_exit` is True, the threads won't terminate until do_finish() is called. """ - self.f = f - self.n = n + self.func = func + self.nthread = nthread self.started = [] self.finished = [] + self.exceptions = [] self._can_exit = not wait_before_exit - self.wait_thread = threading_helper.wait_threads_exit() - self.wait_thread.__enter__() + self._wait_thread = None - def task(): - tid = threading.get_ident() - self.started.append(tid) - try: - f() - finally: - self.finished.append(tid) - while not self._can_exit: - _wait() + def task(self): + tid = threading.get_ident() + self.started.append(tid) + try: + self.func() + except BaseException as exc: + self.exceptions.append(exc) + finally: + self.finished.append(tid) + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if self._can_exit: + break + + def __enter__(self): + self._wait_thread = threading_helper.wait_threads_exit(support.SHORT_TIMEOUT) + self._wait_thread.__enter__() try: - for i in range(n): - start_new_thread(task, ()) + for _ in range(self.nthread): + start_new_thread(self.task, ()) except: self._can_exit = True raise - def wait_for_started(self): - while len(self.started) < self.n: - _wait() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(self.started) >= self.nthread: + break + + return self + + def __exit__(self, exc_type, exc_value, traceback): + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(self.finished) >= self.nthread: + break - def wait_for_finished(self): - while len(self.finished) < self.n: - _wait() - # Wait for threads exit - self.wait_thread.__exit__(None, None, None) + # Wait until threads completely exit according to _thread._count() + self._wait_thread.__exit__(None, None, None) + + # Break reference cycle + exceptions = self.exceptions + self.exceptions = None + if exceptions: + raise ExceptionGroup(f"{self.func} threads raised exceptions", + exceptions) def do_finish(self): self._can_exit = True @@ -94,6 +114,12 @@ class BaseLockTests(BaseTestCase): Tests for both recursive and non-recursive locks. """ + def wait_phase(self, phase, expected): + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(phase) >= expected: + break + self.assertEqual(len(phase), expected) + def test_constructor(self): lock = self.locktype() del lock @@ -131,41 +157,57 @@ def test_try_acquire_contended(self): result = [] def f(): result.append(lock.acquire(False)) - Bunch(f, 1).wait_for_finished() + with Bunch(f, 1): + pass self.assertFalse(result[0]) lock.release() def test_acquire_contended(self): lock = self.locktype() lock.acquire() - N = 5 def f(): lock.acquire() lock.release() - b = Bunch(f, N) - b.wait_for_started() - _wait() - self.assertEqual(len(b.finished), 0) - lock.release() - b.wait_for_finished() - self.assertEqual(len(b.finished), N) + N = 5 + with Bunch(f, N) as bunch: + # Threads block on lock.acquire() + wait_threads_blocked(N) + self.assertEqual(len(bunch.finished), 0) + + # Threads unblocked + lock.release() + + self.assertEqual(len(bunch.finished), N) def test_with(self): lock = self.locktype() def f(): lock.acquire() lock.release() - def _with(err=None): + + def with_lock(err=None): with lock: if err is not None: raise err - _with() - # Check the lock is unacquired - Bunch(f, 1).wait_for_finished() - self.assertRaises(TypeError, _with, TypeError) - # Check the lock is unacquired - Bunch(f, 1).wait_for_finished() + + # Acquire the lock, do nothing, with releases the lock + with lock: + pass + + # Check that the lock is unacquired + with Bunch(f, 1): + pass + + # Acquire the lock, raise an exception, with releases the lock + with self.assertRaises(TypeError): + with lock: + raise TypeError + + # Check that the lock is unacquired even if after an exception + # was raised in the previous "with lock:" block + with Bunch(f, 1): + pass def test_thread_leak(self): # The lock shouldn't leak a Thread instance when used from a foreign @@ -174,17 +216,11 @@ def test_thread_leak(self): def f(): lock.acquire() lock.release() - n = len(threading.enumerate()) + # We run many threads in the hope that existing threads ids won't # be recycled. - Bunch(f, 15).wait_for_finished() - if len(threading.enumerate()) != n: - # There is a small window during which a Thread instance's - # target function has finished running, but the Thread is still - # alive and registered. Avoid spurious failures by waiting a - # bit more (seen on a buildbot). - time.sleep(0.4) - self.assertEqual(n, len(threading.enumerate())) + with Bunch(f, 15): + pass def test_timeout(self): lock = self.locktype() @@ -208,7 +244,8 @@ def f(): results.append(lock.acquire(timeout=0.5)) t2 = time.monotonic() results.append(t2 - t1) - Bunch(f, 1).wait_for_finished() + with Bunch(f, 1): + pass self.assertFalse(results[0]) self.assertTimeout(results[1], 0.5) @@ -242,15 +279,13 @@ def f(): phase.append(None) with threading_helper.wait_threads_exit(): + # Thread blocked on lock.acquire() start_new_thread(f, ()) - while len(phase) == 0: - _wait() - _wait() - self.assertEqual(len(phase), 1) + self.wait_phase(phase, 1) + + # Thread unblocked lock.release() - while len(phase) == 1: - _wait() - self.assertEqual(len(phase), 2) + self.wait_phase(phase, 2) def test_different_thread(self): # Lock can be released from a different thread. @@ -258,8 +293,8 @@ def test_different_thread(self): lock.acquire() def f(): lock.release() - b = Bunch(f, 1) - b.wait_for_finished() + with Bunch(f, 1): + pass lock.acquire() lock.release() @@ -349,21 +384,20 @@ def test_recursion_count(self): def f(): lock.acquire() phase.append(None) - while len(phase) == 1: - _wait() + + self.wait_phase(phase, 2) lock.release() phase.append(None) with threading_helper.wait_threads_exit(): + # Thread blocked on lock.acquire() start_new_thread(f, ()) - while len(phase) == 0: - _wait() - self.assertEqual(len(phase), 1) + self.wait_phase(phase, 1) self.assertEqual(0, lock._recursion_count()) + + # Thread unblocked phase.append(None) - while len(phase) == 2: - _wait() - self.assertEqual(len(phase), 3) + self.wait_phase(phase, 3) self.assertEqual(0, lock._recursion_count()) def test_different_thread(self): @@ -371,12 +405,12 @@ def test_different_thread(self): lock = self.locktype() def f(): lock.acquire() - b = Bunch(f, 1, True) - try: - self.assertRaises(RuntimeError, lock.release) - finally: - b.do_finish() - b.wait_for_finished() + + with Bunch(f, 1, True) as bunch: + try: + self.assertRaises(RuntimeError, lock.release) + finally: + bunch.do_finish() def test__is_owned(self): lock = self.locktype() @@ -388,7 +422,8 @@ def test__is_owned(self): result = [] def f(): result.append(lock._is_owned()) - Bunch(f, 1).wait_for_finished() + with Bunch(f, 1): + pass self.assertFalse(result[0]) lock.release() self.assertTrue(lock._is_owned()) @@ -421,12 +456,15 @@ def _check_notify(self, evt): def f(): results1.append(evt.wait()) results2.append(evt.wait()) - b = Bunch(f, N) - b.wait_for_started() - _wait() - self.assertEqual(len(results1), 0) - evt.set() - b.wait_for_finished() + + with Bunch(f, N): + # Threads blocked on first evt.wait() + wait_threads_blocked(N) + self.assertEqual(len(results1), 0) + + # Threads unblocked + evt.set() + self.assertEqual(results1, [True] * N) self.assertEqual(results2, [True] * N) @@ -449,35 +487,43 @@ def f(): r = evt.wait(0.5) t2 = time.monotonic() results2.append((r, t2 - t1)) - Bunch(f, N).wait_for_finished() + + with Bunch(f, N): + pass + self.assertEqual(results1, [False] * N) for r, dt in results2: self.assertFalse(r) self.assertTimeout(dt, 0.5) + # The event is set results1 = [] results2 = [] evt.set() - Bunch(f, N).wait_for_finished() + with Bunch(f, N): + pass + self.assertEqual(results1, [True] * N) for r, dt in results2: self.assertTrue(r) def test_set_and_clear(self): - # Issue #13502: check that wait() returns true even when the event is + # gh-57711: check that wait() returns true even when the event is # cleared before the waiting thread is woken up. - evt = self.eventtype() + event = self.eventtype() results = [] - timeout = 0.250 - N = 5 def f(): - results.append(evt.wait(timeout * 4)) - b = Bunch(f, N) - b.wait_for_started() - time.sleep(timeout) - evt.set() - evt.clear() - b.wait_for_finished() + results.append(event.wait(support.LONG_TIMEOUT)) + + N = 5 + with Bunch(f, N): + # Threads blocked on event.wait() + wait_threads_blocked(N) + + # Threads unblocked + event.set() + event.clear() + self.assertEqual(results, [True] * N) @requires_fork @@ -533,15 +579,14 @@ def _check_notify(self, cond): # Note that this test is sensitive to timing. If the worker threads # don't execute in a timely fashion, the main thread may think they # are further along then they are. The main thread therefore issues - # _wait() statements to try to make sure that it doesn't race ahead - # of the workers. + # wait_threads_blocked() statements to try to make sure that it doesn't + # race ahead of the workers. # Secondly, this test assumes that condition variables are not subject # to spurious wakeups. The absence of spurious wakeups is an implementation # detail of Condition Variables in current CPython, but in general, not # a guaranteed property of condition variables as a programming # construct. In particular, it is possible that this can no longer # be conveniently guaranteed should their implementation ever change. - N = 5 ready = [] results1 = [] results2 = [] @@ -550,58 +595,83 @@ def f(): cond.acquire() ready.append(phase_num) result = cond.wait() + cond.release() results1.append((result, phase_num)) + cond.acquire() ready.append(phase_num) + result = cond.wait() cond.release() results2.append((result, phase_num)) - b = Bunch(f, N) - b.wait_for_started() - # first wait, to ensure all workers settle into cond.wait() before - # we continue. See issues #8799 and #30727. - while len(ready) < 5: - _wait() - ready.clear() - self.assertEqual(results1, []) - # Notify 3 threads at first - cond.acquire() - cond.notify(3) - _wait() - phase_num = 1 - cond.release() - while len(results1) < 3: - _wait() - self.assertEqual(results1, [(True, 1)] * 3) - self.assertEqual(results2, []) - # make sure all awaken workers settle into cond.wait() - while len(ready) < 3: - _wait() - # Notify 5 threads: they might be in their first or second wait - cond.acquire() - cond.notify(5) - _wait() - phase_num = 2 - cond.release() - while len(results1) + len(results2) < 8: - _wait() - self.assertEqual(results1, [(True, 1)] * 3 + [(True, 2)] * 2) - self.assertEqual(results2, [(True, 2)] * 3) - # make sure all workers settle into cond.wait() - while len(ready) < 5: - _wait() - # Notify all threads: they are all in their second wait - cond.acquire() - cond.notify_all() - _wait() - phase_num = 3 - cond.release() - while len(results2) < 5: - _wait() - self.assertEqual(results1, [(True, 1)] * 3 + [(True,2)] * 2) - self.assertEqual(results2, [(True, 2)] * 3 + [(True, 3)] * 2) - b.wait_for_finished() + + N = 5 + with Bunch(f, N): + # first wait, to ensure all workers settle into cond.wait() before + # we continue. See issues #8799 and #30727. + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(ready) >= N: + break + + ready.clear() + self.assertEqual(results1, []) + + # Notify 3 threads at first + count1 = 3 + cond.acquire() + cond.notify(count1) + wait_threads_blocked(count1) + + # Phase 1 + phase_num = 1 + cond.release() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(results1) >= count1: + break + + self.assertEqual(results1, [(True, 1)] * count1) + self.assertEqual(results2, []) + + # Wait until awaken workers are blocked on cond.wait() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(ready) >= count1 : + break + + # Notify 5 threads: they might be in their first or second wait + cond.acquire() + cond.notify(5) + wait_threads_blocked(N) + + # Phase 2 + phase_num = 2 + cond.release() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(results1) + len(results2) >= (N + count1): + break + + count2 = N - count1 + self.assertEqual(results1, [(True, 1)] * count1 + [(True, 2)] * count2) + self.assertEqual(results2, [(True, 2)] * count1) + + # Make sure all workers settle into cond.wait() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(ready) >= N: + break + + # Notify all threads: they are all in their second wait + cond.acquire() + cond.notify_all() + wait_threads_blocked(N) + + # Phase 3 + phase_num = 3 + cond.release() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(results2) >= N: + break + self.assertEqual(results1, [(True, 1)] * count1 + [(True, 2)] * count2) + self.assertEqual(results2, [(True, 2)] * count1 + [(True, 3)] * count2) def test_notify(self): cond = self.condtype() @@ -611,19 +681,23 @@ def test_notify(self): def test_timeout(self): cond = self.condtype() + timeout = 0.5 results = [] - N = 5 def f(): cond.acquire() t1 = time.monotonic() - result = cond.wait(0.5) + result = cond.wait(timeout) t2 = time.monotonic() cond.release() results.append((t2 - t1, result)) - Bunch(f, N).wait_for_finished() + + N = 5 + with Bunch(f, N): + pass self.assertEqual(len(results), N) + for dt, result in results: - self.assertTimeout(dt, 0.5) + self.assertTimeout(dt, timeout) # Note that conceptually (that"s the condition variable protocol) # a wait() may succeed even if no one notifies us and before any # timeout occurs. Spurious wakeups can occur. @@ -636,17 +710,16 @@ def test_waitfor(self): state = 0 def f(): with cond: - result = cond.wait_for(lambda : state==4) + result = cond.wait_for(lambda: state == 4) self.assertTrue(result) self.assertEqual(state, 4) - b = Bunch(f, 1) - b.wait_for_started() - for i in range(4): - time.sleep(0.01) - with cond: - state += 1 - cond.notify() - b.wait_for_finished() + + with Bunch(f, 1): + for i in range(4): + time.sleep(0.010) + with cond: + state += 1 + cond.notify() def test_waitfor_timeout(self): cond = self.condtype() @@ -660,15 +733,15 @@ def f(): self.assertFalse(result) self.assertTimeout(dt, 0.1) success.append(None) - b = Bunch(f, 1) - b.wait_for_started() - # Only increment 3 times, so state == 4 is never reached. - for i in range(3): - time.sleep(0.01) - with cond: - state += 1 - cond.notify() - b.wait_for_finished() + + with Bunch(f, 1): + # Only increment 3 times, so state == 4 is never reached. + for i in range(3): + time.sleep(0.010) + with cond: + state += 1 + cond.notify() + self.assertEqual(len(success), 1) @@ -697,73 +770,107 @@ def test_acquire_destroy(self): del sem def test_acquire_contended(self): - sem = self.semtype(7) + sem_value = 7 + sem = self.semtype(sem_value) sem.acquire() - N = 10 + sem_results = [] results1 = [] results2 = [] phase_num = 0 - def f(): + + def func(): sem_results.append(sem.acquire()) results1.append(phase_num) + sem_results.append(sem.acquire()) results2.append(phase_num) - b = Bunch(f, 10) - b.wait_for_started() - while len(results1) + len(results2) < 6: - _wait() - self.assertEqual(results1 + results2, [0] * 6) - phase_num = 1 - for i in range(7): - sem.release() - while len(results1) + len(results2) < 13: - _wait() - self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7) - phase_num = 2 - for i in range(6): + + def wait_count(count): + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(results1) + len(results2) >= count: + break + + N = 10 + with Bunch(func, N): + # Phase 0 + count1 = sem_value - 1 + wait_count(count1) + self.assertEqual(results1 + results2, [0] * count1) + + # Phase 1 + phase_num = 1 + for i in range(sem_value): + sem.release() + count2 = sem_value + wait_count(count1 + count2) + self.assertEqual(sorted(results1 + results2), + [0] * count1 + [1] * count2) + + # Phase 2 + phase_num = 2 + count3 = (sem_value - 1) + for i in range(count3): + sem.release() + wait_count(count1 + count2 + count3) + self.assertEqual(sorted(results1 + results2), + [0] * count1 + [1] * count2 + [2] * count3) + # The semaphore is still locked + self.assertFalse(sem.acquire(False)) + + # Final release, to let the last thread finish + count4 = 1 sem.release() - while len(results1) + len(results2) < 19: - _wait() - self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7 + [2] * 6) - # The semaphore is still locked - self.assertFalse(sem.acquire(False)) - # Final release, to let the last thread finish - sem.release() - b.wait_for_finished() - self.assertEqual(sem_results, [True] * (6 + 7 + 6 + 1)) + + self.assertEqual(sem_results, + [True] * (count1 + count2 + count3 + count4)) def test_multirelease(self): - sem = self.semtype(7) + sem_value = 7 + sem = self.semtype(sem_value) sem.acquire() + results1 = [] results2 = [] phase_num = 0 - def f(): + def func(): sem.acquire() results1.append(phase_num) + sem.acquire() results2.append(phase_num) - b = Bunch(f, 10) - b.wait_for_started() - while len(results1) + len(results2) < 6: - _wait() - self.assertEqual(results1 + results2, [0] * 6) - phase_num = 1 - sem.release(7) - while len(results1) + len(results2) < 13: - _wait() - self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7) - phase_num = 2 - sem.release(6) - while len(results1) + len(results2) < 19: - _wait() - self.assertEqual(sorted(results1 + results2), [0] * 6 + [1] * 7 + [2] * 6) - # The semaphore is still locked - self.assertFalse(sem.acquire(False)) - # Final release, to let the last thread finish - sem.release() - b.wait_for_finished() + + def wait_count(count): + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if len(results1) + len(results2) >= count: + break + + with Bunch(func, 10): + # Phase 0 + count1 = sem_value - 1 + wait_count(count1) + self.assertEqual(results1 + results2, [0] * count1) + + # Phase 1 + phase_num = 1 + count2 = sem_value + sem.release(count2) + wait_count(count1 + count2) + self.assertEqual(sorted(results1 + results2), + [0] * count1 + [1] * count2) + + # Phase 2 + phase_num = 2 + count3 = sem_value - 1 + sem.release(count3) + wait_count(count1 + count2 + count3) + self.assertEqual(sorted(results1 + results2), + [0] * count1 + [1] * count2 + [2] * count3) + # The semaphore is still locked + self.assertFalse(sem.acquire(False)) + + # Final release, to let the last thread finish + sem.release() def test_try_acquire(self): sem = self.semtype(2) @@ -780,7 +887,8 @@ def test_try_acquire_contended(self): def f(): results.append(sem.acquire(False)) results.append(sem.acquire(False)) - Bunch(f, 5).wait_for_finished() + with Bunch(f, 5): + pass # There can be a thread switch between acquiring the semaphore and # appending the result, therefore results will not necessarily be # ordered. @@ -806,12 +914,14 @@ def test_default_value(self): def f(): sem.acquire() sem.release() - b = Bunch(f, 1) - b.wait_for_started() - _wait() - self.assertFalse(b.finished) - sem.release() - b.wait_for_finished() + + with Bunch(f, 1) as bunch: + # Thread blocked on sem.acquire() + wait_threads_blocked(1) + self.assertFalse(bunch.finished) + + # Thread unblocked + sem.release() def test_with(self): sem = self.semtype(2) @@ -882,13 +992,13 @@ class BarrierTests(BaseTestCase): def setUp(self): self.barrier = self.barriertype(self.N, timeout=self.defaultTimeout) + def tearDown(self): self.barrier.abort() def run_threads(self, f): - b = Bunch(f, self.N-1) - f() - b.wait_for_finished() + with Bunch(f, self.N): + pass def multipass(self, results, n): m = self.barrier.parties @@ -979,8 +1089,9 @@ def f(): i = self.barrier.wait() if i == self.N//2: # Wait until the other threads are all in the barrier. - while self.barrier.n_waiting < self.N-1: - time.sleep(0.001) + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if self.barrier.n_waiting >= (self.N - 1): + break self.barrier.reset() else: try: @@ -1040,27 +1151,27 @@ def f(): i = self.barrier.wait() if i == self.N // 2: # One thread is late! - time.sleep(1.0) + time.sleep(self.defaultTimeout / 2) # Default timeout is 2.0, so this is shorter. self.assertRaises(threading.BrokenBarrierError, - self.barrier.wait, 0.5) + self.barrier.wait, self.defaultTimeout / 4) self.run_threads(f) def test_default_timeout(self): """ Test the barrier's default timeout """ - # gh-109401: Barrier timeout should be long enough - # to create 4 threads on a slow CI. - timeout = 1.0 - barrier = self.barriertype(self.N, timeout=timeout) + timeout = 0.100 + barrier = self.barriertype(2, timeout=timeout) def f(): - i = barrier.wait() - if i == self.N // 2: - # One thread is later than the default timeout. - time.sleep(timeout * 2) - self.assertRaises(threading.BrokenBarrierError, barrier.wait) - self.run_threads(f) + self.assertRaises(threading.BrokenBarrierError, + barrier.wait) + + start_time = time.monotonic() + with Bunch(f, 1): + pass + dt = time.monotonic() - start_time + self.assertGreaterEqual(dt, timeout) def test_single_thread(self): b = self.barriertype(1) @@ -1068,16 +1179,28 @@ def test_single_thread(self): b.wait() def test_repr(self): - b = self.barriertype(3) - self.assertRegex(repr(b), r"<\w+\.Barrier at .*: waiters=0/3>") + barrier = self.barriertype(3) + timeout = support.LONG_TIMEOUT + self.assertRegex(repr(barrier), r"<\w+\.Barrier at .*: waiters=0/3>") def f(): - b.wait(3) - bunch = Bunch(f, 2) - bunch.wait_for_started() - time.sleep(0.2) - self.assertRegex(repr(b), r"<\w+\.Barrier at .*: waiters=2/3>") - b.wait(3) - bunch.wait_for_finished() - self.assertRegex(repr(b), r"<\w+\.Barrier at .*: waiters=0/3>") - b.abort() - self.assertRegex(repr(b), r"<\w+\.Barrier at .*: broken>") + barrier.wait(timeout) + + N = 2 + with Bunch(f, N): + # Threads blocked on barrier.wait() + for _ in support.sleeping_retry(support.SHORT_TIMEOUT): + if barrier.n_waiting >= N: + break + self.assertRegex(repr(barrier), + r"<\w+\.Barrier at .*: waiters=2/3>") + + # Threads unblocked + barrier.wait(timeout) + + self.assertRegex(repr(barrier), + r"<\w+\.Barrier at .*: waiters=0/3>") + + # Abort the barrier + barrier.abort() + self.assertRegex(repr(barrier), + r"<\w+\.Barrier at .*: broken>") diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py index c372efaedd313b..0e7528ef97c5f6 100644 --- a/Lib/test/pythoninfo.py +++ b/Lib/test/pythoninfo.py @@ -520,6 +520,7 @@ def collect_sysconfig(info_add): 'SHELL', 'SOABI', 'abs_builddir', + 'abs_srcdir', 'prefix', 'srcdir', ): diff --git a/Lib/test/signalinterproctester.py b/Lib/test/signalinterproctester.py index cdcd92a8baace6..073c078f45f6d7 100644 --- a/Lib/test/signalinterproctester.py +++ b/Lib/test/signalinterproctester.py @@ -1,3 +1,4 @@ +import gc import os import signal import subprocess @@ -59,6 +60,13 @@ def test_interprocess_signal(self): self.assertEqual(self.got_signals, {'SIGHUP': 1, 'SIGUSR1': 0, 'SIGALRM': 0}) + # gh-110033: Make sure that the subprocess.Popen is deleted before + # the next test which raises an exception. Otherwise, the exception + # may be raised when Popen.__del__() is executed and so be logged + # as "Exception ignored in: ". + child = None + gc.collect() + with self.assertRaises(SIGUSR1Exception): with self.subprocess_send_signal(pid, "SIGUSR1") as child: self.wait_signal(child, 'SIGUSR1') diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 8a4555ce16fbb6..38d5012ba46c08 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -777,14 +777,17 @@ def check_cflags_pgo(): # Check if Python was built with ./configure --enable-optimizations: # with Profile Guided Optimization (PGO). cflags_nodist = sysconfig.get_config_var('PY_CFLAGS_NODIST') or '' - pgo_options = ( + pgo_options = [ # GCC '-fprofile-use', # clang: -fprofile-instr-use=code.profclangd '-fprofile-instr-use', # ICC "-prof-use", - ) + ] + PGO_PROF_USE_FLAG = sysconfig.get_config_var('PGO_PROF_USE_FLAG') + if PGO_PROF_USE_FLAG: + pgo_options.append(PGO_PROF_USE_FLAG) return any(option in cflags_nodist for option in pgo_options) @@ -2565,3 +2568,30 @@ def wrapper(*args, **kwargs): finally: _testinternalcapi.set_optimizer(save_opt) return wrapper + + +_BASE_COPY_SRC_DIR_IGNORED_NAMES = frozenset({ + # SRC_DIR/.git + '.git', + # ignore all __pycache__/ sub-directories + '__pycache__', +}) + +# Ignore function for shutil.copytree() to copy the Python source code. +def copy_python_src_ignore(path, names): + ignored = _BASE_COPY_SRC_DIR_IGNORED_NAMES + if os.path.basename(path) == 'Doc': + ignored |= { + # SRC_DIR/Doc/build/ + 'build', + # SRC_DIR/Doc/venv/ + 'venv', + } + + # check if we are at the root of the source code + elif 'Modules' in names: + ignored |= { + # SRC_DIR/build/ + 'build', + } + return ignored diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 3b4026cb73869a..abcb6f55c4b04e 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -273,7 +273,7 @@ def cb(): self.loop.stop() self.loop._process_events = mock.Mock() - delay = 0.1 + delay = 0.100 when = self.loop.time() + delay self.loop.call_at(when, cb) @@ -282,10 +282,7 @@ def cb(): dt = self.loop.time() - t0 # 50 ms: maximum granularity of the event loop - self.assertGreaterEqual(dt, delay - 0.050, dt) - # tolerate a difference of +800 ms because some Python buildbots - # are really slow - self.assertLessEqual(dt, 0.9, dt) + self.assertGreaterEqual(dt, delay - test_utils.CLOCK_RES) with self.assertRaises(TypeError, msg="when cannot be None"): self.loop.call_at(None, cb) diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index f22cb5e58bba62..3ee6565b2b65ad 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -293,10 +293,11 @@ async def coro2(): # 15.6 msec, we use fairly long sleep times here (~100 msec). def test_run_until_complete(self): + delay = 0.100 t0 = self.loop.time() - self.loop.run_until_complete(asyncio.sleep(0.1)) - t1 = self.loop.time() - self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0) + self.loop.run_until_complete(asyncio.sleep(delay)) + dt = self.loop.time() - t0 + self.assertGreaterEqual(dt, delay - test_utils.CLOCK_RES) def test_run_until_complete_stopped(self): @@ -1717,7 +1718,6 @@ def _run_once(): self.loop._run_once = _run_once async def wait(): - loop = self.loop await asyncio.sleep(1e-2) await asyncio.sleep(1e-4) await asyncio.sleep(1e-6) diff --git a/Lib/test/test_asyncio/test_timeouts.py b/Lib/test/test_asyncio/test_timeouts.py index 8b6b9a1fea0be8..e9b59b953518b3 100644 --- a/Lib/test/test_asyncio/test_timeouts.py +++ b/Lib/test/test_asyncio/test_timeouts.py @@ -46,7 +46,6 @@ async def test_nested_timeouts(self): self.assertTrue(cm2.expired()) async def test_waiter_cancelled(self): - loop = asyncio.get_running_loop() cancelled = False with self.assertRaises(TimeoutError): async with asyncio.timeout(0.01): @@ -59,39 +58,26 @@ async def test_waiter_cancelled(self): async def test_timeout_not_called(self): loop = asyncio.get_running_loop() - t0 = loop.time() async with asyncio.timeout(10) as cm: await asyncio.sleep(0.01) t1 = loop.time() self.assertFalse(cm.expired()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) self.assertGreater(cm.when(), t1) async def test_timeout_disabled(self): - loop = asyncio.get_running_loop() - t0 = loop.time() async with asyncio.timeout(None) as cm: await asyncio.sleep(0.01) - t1 = loop.time() self.assertFalse(cm.expired()) self.assertIsNone(cm.when()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) async def test_timeout_at_disabled(self): - loop = asyncio.get_running_loop() - t0 = loop.time() async with asyncio.timeout_at(None) as cm: await asyncio.sleep(0.01) - t1 = loop.time() self.assertFalse(cm.expired()) self.assertIsNone(cm.when()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) async def test_timeout_zero(self): loop = asyncio.get_running_loop() @@ -101,8 +87,6 @@ async def test_timeout_zero(self): await asyncio.sleep(10) t1 = loop.time() self.assertTrue(cm.expired()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) self.assertTrue(t0 <= cm.when() <= t1) async def test_timeout_zero_sleep_zero(self): @@ -113,8 +97,6 @@ async def test_timeout_zero_sleep_zero(self): await asyncio.sleep(0) t1 = loop.time() self.assertTrue(cm.expired()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) self.assertTrue(t0 <= cm.when() <= t1) async def test_timeout_in_the_past_sleep_zero(self): @@ -125,8 +107,6 @@ async def test_timeout_in_the_past_sleep_zero(self): await asyncio.sleep(0) t1 = loop.time() self.assertTrue(cm.expired()) - # 2 sec for slow CI boxes - self.assertLess(t1-t0, 2) self.assertTrue(t0 >= cm.when() <= t1) async def test_foreign_exception_passed(self): diff --git a/Lib/test/test_asyncio/test_waitfor.py b/Lib/test/test_asyncio/test_waitfor.py index e714b154c5cadf..d52f32534a0cfe 100644 --- a/Lib/test/test_asyncio/test_waitfor.py +++ b/Lib/test/test_asyncio/test_waitfor.py @@ -66,17 +66,12 @@ async def test_wait_for_timeout_less_then_0_or_0_future_done(self): fut = loop.create_future() fut.set_result('done') - t0 = loop.time() ret = await asyncio.wait_for(fut, 0) - t1 = loop.time() self.assertEqual(ret, 'done') self.assertTrue(fut.done()) - self.assertLess(t1 - t0, 0.1) async def test_wait_for_timeout_less_then_0_or_0_coroutine_do_not_started(self): - loop = asyncio.get_running_loop() - foo_started = False async def foo(): @@ -84,12 +79,9 @@ async def foo(): foo_started = True with self.assertRaises(asyncio.TimeoutError): - t0 = loop.time() await asyncio.wait_for(foo(), 0) - t1 = loop.time() self.assertEqual(foo_started, False) - self.assertLess(t1 - t0, 0.1) async def test_wait_for_timeout_less_then_0_or_0(self): loop = asyncio.get_running_loop() @@ -113,18 +105,14 @@ async def foo(): await started with self.assertRaises(asyncio.TimeoutError): - t0 = loop.time() await asyncio.wait_for(fut, timeout) - t1 = loop.time() self.assertTrue(fut.done()) # it should have been cancelled due to the timeout self.assertTrue(fut.cancelled()) self.assertEqual(foo_running, False) - self.assertLess(t1 - t0, 0.1) async def test_wait_for(self): - loop = asyncio.get_running_loop() foo_running = None async def foo(): @@ -139,13 +127,10 @@ async def foo(): fut = asyncio.create_task(foo()) with self.assertRaises(asyncio.TimeoutError): - t0 = loop.time() await asyncio.wait_for(fut, 0.1) - t1 = loop.time() self.assertTrue(fut.done()) # it should have been cancelled due to the timeout self.assertTrue(fut.cancelled()) - self.assertLess(t1 - t0, support.SHORT_TIMEOUT) self.assertEqual(foo_running, False) async def test_wait_for_blocking(self): diff --git a/Lib/test/test_asyncio/test_windows_events.py b/Lib/test/test_asyncio/test_windows_events.py index a36119a8004f9d..6e6c90a247b291 100644 --- a/Lib/test/test_asyncio/test_windows_events.py +++ b/Lib/test/test_asyncio/test_windows_events.py @@ -163,29 +163,25 @@ def test_wait_for_handle(self): # Wait for unset event with 0.5s timeout; # result should be False at timeout - fut = self.loop._proactor.wait_for_handle(event, 0.5) + timeout = 0.5 + fut = self.loop._proactor.wait_for_handle(event, timeout) start = self.loop.time() done = self.loop.run_until_complete(fut) elapsed = self.loop.time() - start self.assertEqual(done, False) self.assertFalse(fut.result()) - # bpo-31008: Tolerate only 450 ms (at least 500 ms expected), - # because of bad clock resolution on Windows - self.assertTrue(0.45 <= elapsed <= 0.9, elapsed) + self.assertGreaterEqual(elapsed, timeout - test_utils.CLOCK_RES) _overlapped.SetEvent(event) # Wait for set event; # result should be True immediately fut = self.loop._proactor.wait_for_handle(event, 10) - start = self.loop.time() done = self.loop.run_until_complete(fut) - elapsed = self.loop.time() - start self.assertEqual(done, True) self.assertTrue(fut.result()) - self.assertTrue(0 <= elapsed < 0.3, elapsed) # asyncio issue #195: cancelling a done _WaitHandleFuture # must not crash @@ -199,11 +195,8 @@ def test_wait_for_handle_cancel(self): # CancelledError should be raised immediately fut = self.loop._proactor.wait_for_handle(event, 10) fut.cancel() - start = self.loop.time() with self.assertRaises(asyncio.CancelledError): self.loop.run_until_complete(fut) - elapsed = self.loop.time() - start - self.assertTrue(0 <= elapsed < 0.1, elapsed) # asyncio issue #195: cancelling a _WaitHandleFuture twice # must not crash diff --git a/Lib/test/test_asyncio/utils.py b/Lib/test/test_asyncio/utils.py index 1e5ab6eb935ef1..e1101bf42eb24e 100644 --- a/Lib/test/test_asyncio/utils.py +++ b/Lib/test/test_asyncio/utils.py @@ -36,6 +36,12 @@ from test.support import threading_helper +# Use the maximum known clock resolution (gh-75191, gh-110088): Windows +# GetTickCount64() has a resolution of 15.6 ms. Use 20 ms to tolerate rounding +# issues. +CLOCK_RES = 0.020 + + def data_file(*filename): fullname = os.path.join(support.TEST_HOME_DIR, *filename) if os.path.isfile(fullname): diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py index b5e9271ac0c3cd..c899111e1e4c16 100644 --- a/Lib/test/test_codecs.py +++ b/Lib/test/test_codecs.py @@ -1762,6 +1762,76 @@ def test_file_closes_if_lookup_error_raised(self): file().close.assert_called() + def test_copy(self): + orig = codecs.lookup('utf-8') + dup = copy.copy(orig) + self.assertIsNot(dup, orig) + self.assertEqual(dup, orig) + self.assertTrue(orig._is_text_encoding) + self.assertEqual(dup.encode, orig.encode) + self.assertEqual(dup.name, orig.name) + self.assertEqual(dup.incrementalencoder, orig.incrementalencoder) + + # Test a CodecInfo with _is_text_encoding equal to false. + orig = codecs.lookup("base64") + dup = copy.copy(orig) + self.assertIsNot(dup, orig) + self.assertEqual(dup, orig) + self.assertFalse(orig._is_text_encoding) + self.assertEqual(dup.encode, orig.encode) + self.assertEqual(dup.name, orig.name) + self.assertEqual(dup.incrementalencoder, orig.incrementalencoder) + + def test_deepcopy(self): + orig = codecs.lookup('utf-8') + dup = copy.deepcopy(orig) + self.assertIsNot(dup, orig) + self.assertEqual(dup, orig) + self.assertTrue(orig._is_text_encoding) + self.assertEqual(dup.encode, orig.encode) + self.assertEqual(dup.name, orig.name) + self.assertEqual(dup.incrementalencoder, orig.incrementalencoder) + + # Test a CodecInfo with _is_text_encoding equal to false. + orig = codecs.lookup("base64") + dup = copy.deepcopy(orig) + self.assertIsNot(dup, orig) + self.assertEqual(dup, orig) + self.assertFalse(orig._is_text_encoding) + self.assertEqual(dup.encode, orig.encode) + self.assertEqual(dup.name, orig.name) + self.assertEqual(dup.incrementalencoder, orig.incrementalencoder) + + def test_pickle(self): + codec_info = codecs.lookup('utf-8') + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(protocol=proto): + pickled_codec_info = pickle.dumps(codec_info) + unpickled_codec_info = pickle.loads(pickled_codec_info) + self.assertIsNot(codec_info, unpickled_codec_info) + self.assertEqual(codec_info, unpickled_codec_info) + self.assertEqual(codec_info.name, unpickled_codec_info.name) + self.assertEqual( + codec_info.incrementalencoder, + unpickled_codec_info.incrementalencoder + ) + self.assertTrue(unpickled_codec_info._is_text_encoding) + + # Test a CodecInfo with _is_text_encoding equal to false. + codec_info = codecs.lookup('base64') + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + with self.subTest(protocol=proto): + pickled_codec_info = pickle.dumps(codec_info) + unpickled_codec_info = pickle.loads(pickled_codec_info) + self.assertIsNot(codec_info, unpickled_codec_info) + self.assertEqual(codec_info, unpickled_codec_info) + self.assertEqual(codec_info.name, unpickled_codec_info.name) + self.assertEqual( + codec_info.incrementalencoder, + unpickled_codec_info.incrementalencoder + ) + self.assertFalse(unpickled_codec_info._is_text_encoding) + class StreamReaderTest(unittest.TestCase): diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 53e3e8f75aa766..c4452e38934cf8 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -1278,6 +1278,11 @@ def f(x): while x: 0 if 1 else 0 + def test_remove_redundant_nop_edge_case(self): + # See gh-109889 + def f(): + a if (1 if b else c) else d + @requires_debug_ranges() class TestSourcePositions(unittest.TestCase): # Ensure that compiled code snippets have correct line and column numbers diff --git a/Lib/test/test_concurrent_futures/test_as_completed.py b/Lib/test/test_concurrent_futures/test_as_completed.py index 2b3bec8cafbcb0..c90b0021d85fc7 100644 --- a/Lib/test/test_concurrent_futures/test_as_completed.py +++ b/Lib/test/test_concurrent_futures/test_as_completed.py @@ -42,11 +42,14 @@ def test_future_times_out(self): EXCEPTION_FUTURE, SUCCESSFUL_FUTURE} - for timeout in (0, 0.01): + # Windows clock resolution is around 15.6 ms + short_timeout = 0.100 + for timeout in (0, short_timeout): with self.subTest(timeout): - future = self.executor.submit(time.sleep, 0.1) completed_futures = set() + future = self.executor.submit(time.sleep, short_timeout * 10) + try: for f in futures.as_completed( already_completed | {future}, diff --git a/Lib/test/test_concurrent_futures/test_process_pool.py b/Lib/test/test_concurrent_futures/test_process_pool.py index 7763a4946f110c..c73c2da1a01088 100644 --- a/Lib/test/test_concurrent_futures/test_process_pool.py +++ b/Lib/test/test_concurrent_futures/test_process_pool.py @@ -1,5 +1,6 @@ import os import sys +import threading import time import unittest from concurrent import futures @@ -187,6 +188,34 @@ def test_max_tasks_early_shutdown(self): for i, future in enumerate(futures): self.assertEqual(future.result(), mul(i, i)) + def test_python_finalization_error(self): + # gh-109047: Catch RuntimeError on thread creation + # during Python finalization. + + context = self.get_context() + + # gh-109047: Mock the threading.start_new_thread() function to inject + # RuntimeError: simulate the error raised during Python finalization. + # Block the second creation: create _ExecutorManagerThread, but block + # QueueFeederThread. + orig_start_new_thread = threading._start_new_thread + nthread = 0 + def mock_start_new_thread(func, *args): + nonlocal nthread + if nthread >= 1: + raise RuntimeError("can't create new thread at " + "interpreter shutdown") + nthread += 1 + return orig_start_new_thread(func, *args) + + with support.swap_attr(threading, '_start_new_thread', + mock_start_new_thread): + executor = self.executor_type(max_workers=2, mp_context=context) + with executor: + with self.assertRaises(BrokenProcessPool): + list(executor.map(mul, [(2, 3)] * 10)) + executor.shutdown() + create_executor_tests(globals(), ProcessPoolExecutorTest, executor_mixins=(ProcessPoolForkMixin, diff --git a/Lib/test/test_concurrent_futures/test_wait.py b/Lib/test/test_concurrent_futures/test_wait.py index 3f64ca173c02f6..ff486202092c81 100644 --- a/Lib/test/test_concurrent_futures/test_wait.py +++ b/Lib/test/test_concurrent_futures/test_wait.py @@ -112,24 +112,25 @@ def test_all_completed(self): future2]), finished) self.assertEqual(set(), pending) - @support.requires_resource('walltime') def test_timeout(self): - future1 = self.executor.submit(mul, 6, 7) - future2 = self.executor.submit(time.sleep, 6) + short_timeout = 0.050 + long_timeout = short_timeout * 10 + + future = self.executor.submit(time.sleep, long_timeout) finished, pending = futures.wait( [CANCELLED_AND_NOTIFIED_FUTURE, EXCEPTION_FUTURE, SUCCESSFUL_FUTURE, - future1, future2], - timeout=5, + future], + timeout=short_timeout, return_when=futures.ALL_COMPLETED) self.assertEqual(set([CANCELLED_AND_NOTIFIED_FUTURE, EXCEPTION_FUTURE, - SUCCESSFUL_FUTURE, - future1]), finished) - self.assertEqual(set([future2]), pending) + SUCCESSFUL_FUTURE]), + finished) + self.assertEqual(set([future]), pending) class ThreadPoolWaitTests(ThreadPoolMixin, WaitTests, BaseTestCase): diff --git a/Lib/test/test_cppext/__init__.py b/Lib/test/test_cppext/__init__.py index 74bf420900367e..25b6fc64a03a51 100644 --- a/Lib/test/test_cppext/__init__.py +++ b/Lib/test/test_cppext/__init__.py @@ -14,6 +14,10 @@ SETUP = os.path.join(os.path.dirname(__file__), 'setup.py') +# gh-110119: pip does not currently support 't' in the ABI flag use by +# --disable-gil builds. Once it does, we can remove this skip. +@unittest.skipIf(sysconfig.get_config_var('Py_NOGIL') == 1, + 'test does not work with --disable-gil') @support.requires_subprocess() class TestCPPExt(unittest.TestCase): @support.requires_resource('cpu') diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py index d104e5dd904999..8ab0e1ecbc4a7f 100644 --- a/Lib/test/test_dis.py +++ b/Lib/test/test_dis.py @@ -524,10 +524,8 @@ async def _asyncwith(c): dis_asyncwith = """\ %4d RETURN_GENERATOR - -None POP_TOP - -%4d RESUME 0 + POP_TOP + RESUME 0 %4d LOAD_FAST 0 (c) BEFORE_ASYNC_WITH @@ -598,7 +596,6 @@ async def _asyncwith(c): ExceptionTable: 12 rows """ % (_asyncwith.__code__.co_firstlineno, - _asyncwith.__code__.co_firstlineno, _asyncwith.__code__.co_firstlineno + 1, _asyncwith.__code__.co_firstlineno + 2, _asyncwith.__code__.co_firstlineno + 1, @@ -757,10 +754,8 @@ def foo(x): None COPY_FREE_VARS 1 %4d RETURN_GENERATOR - -None POP_TOP - -%4d RESUME 0 + POP_TOP + RESUME 0 LOAD_FAST 0 (.0) >> FOR_ITER 10 (to 34) STORE_FAST 1 (z) @@ -782,7 +777,6 @@ def foo(x): __file__, _h.__code__.co_firstlineno + 3, _h.__code__.co_firstlineno + 3, - _h.__code__.co_firstlineno + 3, ) def load_test(x, y=0): diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py deleted file mode 100644 index 5a4394a0993c8d..00000000000000 --- a/Lib/test/test_gdb.py +++ /dev/null @@ -1,1065 +0,0 @@ -# Verify that gdb can pretty-print the various PyObject* types -# -# The code for testing gdb was adapted from similar work in Unladen Swallow's -# Lib/test/test_jit_gdb.py - -import os -import re -import subprocess -import sys -import sysconfig -import textwrap -import unittest - -from test import support -from test.support import findfile, python_is_optimized - -def get_gdb_version(): - try: - cmd = ["gdb", "-nx", "--version"] - proc = subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True) - with proc: - version, stderr = proc.communicate() - - if proc.returncode: - raise Exception(f"Command {' '.join(cmd)!r} failed " - f"with exit code {proc.returncode}: " - f"stdout={version!r} stderr={stderr!r}") - except OSError: - # This is what "no gdb" looks like. There may, however, be other - # errors that manifest this way too. - raise unittest.SkipTest("Couldn't find gdb on the path") - - # Regex to parse: - # 'GNU gdb (GDB; SUSE Linux Enterprise 12) 7.7\n' -> 7.7 - # 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9 - # 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1 - # 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5 - # 'HP gdb 6.7 for HP Itanium (32 or 64 bit) and target HP-UX 11iv2 and 11iv3.\n' -> 6.7 - match = re.search(r"^(?:GNU|HP) gdb.*?\b(\d+)\.(\d+)", version) - if match is None: - raise Exception("unable to parse GDB version: %r" % version) - return (version, int(match.group(1)), int(match.group(2))) - -gdb_version, gdb_major_version, gdb_minor_version = get_gdb_version() -if gdb_major_version < 7: - raise unittest.SkipTest("gdb versions before 7.0 didn't support python " - "embedding. Saw %s.%s:\n%s" - % (gdb_major_version, gdb_minor_version, - gdb_version)) - -if not sysconfig.is_python_build(): - raise unittest.SkipTest("test_gdb only works on source builds at the moment.") - -if ((sysconfig.get_config_var('PGO_PROF_USE_FLAG') or 'xxx') in - (sysconfig.get_config_var('PY_CORE_CFLAGS') or '')): - raise unittest.SkipTest("test_gdb is not reliable on PGO builds") - -# Location of custom hooks file in a repository checkout. -checkout_hook_path = os.path.join(os.path.dirname(sys.executable), - 'python-gdb.py') - -PYTHONHASHSEED = '123' - - -def cet_protection(): - cflags = sysconfig.get_config_var('CFLAGS') - if not cflags: - return False - flags = cflags.split() - # True if "-mcet -fcf-protection" options are found, but false - # if "-fcf-protection=none" or "-fcf-protection=return" is found. - return (('-mcet' in flags) - and any((flag.startswith('-fcf-protection') - and not flag.endswith(("=none", "=return"))) - for flag in flags)) - -# Control-flow enforcement technology -CET_PROTECTION = cet_protection() - - -def run_gdb(*args, **env_vars): - """Runs gdb in --batch mode with the additional arguments given by *args. - - Returns its (stdout, stderr) decoded from utf-8 using the replace handler. - """ - if env_vars: - env = os.environ.copy() - env.update(env_vars) - else: - env = None - # -nx: Do not execute commands from any .gdbinit initialization files - # (issue #22188) - base_cmd = ('gdb', '--batch', '-nx') - if (gdb_major_version, gdb_minor_version) >= (7, 4): - base_cmd += ('-iex', 'add-auto-load-safe-path ' + checkout_hook_path) - proc = subprocess.Popen(base_cmd + args, - # Redirect stdin to prevent GDB from messing with - # the terminal settings - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env=env) - with proc: - out, err = proc.communicate() - return out.decode('utf-8', 'replace'), err.decode('utf-8', 'replace') - -# Verify that "gdb" was built with the embedded python support enabled: -gdbpy_version, _ = run_gdb("--eval-command=python import sys; print(sys.version_info)") -if not gdbpy_version: - raise unittest.SkipTest("gdb not built with embedded python support") - -if "major=2" in gdbpy_version: - raise unittest.SkipTest("gdb built with Python 2") - -# Verify that "gdb" can load our custom hooks, as OS security settings may -# disallow this without a customized .gdbinit. -_, gdbpy_errors = run_gdb('--args', sys.executable) -if "auto-loading has been declined" in gdbpy_errors: - msg = "gdb security settings prevent use of custom hooks: " - raise unittest.SkipTest(msg + gdbpy_errors.rstrip()) - -def gdb_has_frame_select(): - # Does this build of gdb have gdb.Frame.select ? - stdout, _ = run_gdb("--eval-command=python print(dir(gdb.Frame))") - m = re.match(r'.*\[(.*)\].*', stdout) - if not m: - raise unittest.SkipTest("Unable to parse output from gdb.Frame.select test") - gdb_frame_dir = m.group(1).split(', ') - return "'select'" in gdb_frame_dir - -HAS_PYUP_PYDOWN = gdb_has_frame_select() - -BREAKPOINT_FN='builtin_id' - -@unittest.skipIf(support.PGO, "not useful for PGO") -class DebuggerTests(unittest.TestCase): - - """Test that the debugger can debug Python.""" - - def get_stack_trace(self, source=None, script=None, - breakpoint=BREAKPOINT_FN, - cmds_after_breakpoint=None, - import_site=False, - ignore_stderr=False): - ''' - Run 'python -c SOURCE' under gdb with a breakpoint. - - Support injecting commands after the breakpoint is reached - - Returns the stdout from gdb - - cmds_after_breakpoint: if provided, a list of strings: gdb commands - ''' - # We use "set breakpoint pending yes" to avoid blocking with a: - # Function "foo" not defined. - # Make breakpoint pending on future shared library load? (y or [n]) - # error, which typically happens python is dynamically linked (the - # breakpoints of interest are to be found in the shared library) - # When this happens, we still get: - # Function "textiowrapper_write" not defined. - # emitted to stderr each time, alas. - - # Initially I had "--eval-command=continue" here, but removed it to - # avoid repeated print breakpoints when traversing hierarchical data - # structures - - # Generate a list of commands in gdb's language: - commands = ['set breakpoint pending yes', - 'break %s' % breakpoint, - - # The tests assume that the first frame of printed - # backtrace will not contain program counter, - # that is however not guaranteed by gdb - # therefore we need to use 'set print address off' to - # make sure the counter is not there. For example: - # #0 in PyObject_Print ... - # is assumed, but sometimes this can be e.g. - # #0 0x00003fffb7dd1798 in PyObject_Print ... - 'set print address off', - - 'run'] - - # GDB as of 7.4 onwards can distinguish between the - # value of a variable at entry vs current value: - # http://sourceware.org/gdb/onlinedocs/gdb/Variables.html - # which leads to the selftests failing with errors like this: - # AssertionError: 'v@entry=()' != '()' - # Disable this: - if (gdb_major_version, gdb_minor_version) >= (7, 4): - commands += ['set print entry-values no'] - - if cmds_after_breakpoint: - if CET_PROTECTION: - # bpo-32962: When Python is compiled with -mcet - # -fcf-protection, function arguments are unusable before - # running the first instruction of the function entry point. - # The 'next' command makes the required first step. - commands += ['next'] - commands += cmds_after_breakpoint - else: - commands += ['backtrace'] - - # print commands - - # Use "commands" to generate the arguments with which to invoke "gdb": - args = ['--eval-command=%s' % cmd for cmd in commands] - args += ["--args", - sys.executable] - args.extend(subprocess._args_from_interpreter_flags()) - - if not import_site: - # -S suppresses the default 'import site' - args += ["-S"] - - if source: - args += ["-c", source] - elif script: - args += [script] - - # Use "args" to invoke gdb, capturing stdout, stderr: - out, err = run_gdb(*args, PYTHONHASHSEED=PYTHONHASHSEED) - - if not ignore_stderr: - for line in err.splitlines(): - print(line, file=sys.stderr) - - # bpo-34007: Sometimes some versions of the shared libraries that - # are part of the traceback are compiled in optimised mode and the - # Program Counter (PC) is not present, not allowing gdb to walk the - # frames back. When this happens, the Python bindings of gdb raise - # an exception, making the test impossible to succeed. - if "PC not saved" in err: - raise unittest.SkipTest("gdb cannot walk the frame object" - " because the Program Counter is" - " not present") - - # bpo-40019: Skip the test if gdb failed to read debug information - # because the Python binary is optimized. - for pattern in ( - '(frame information optimized out)', - 'Unable to read information on python frame', - # gh-91960: On Python built with "clang -Og", gdb gets - # "frame=" for _PyEval_EvalFrameDefault() parameter - '(unable to read python frame information)', - # gh-104736: On Python built with "clang -Og" on ppc64le, - # "py-bt" displays a truncated or not traceback, but "where" - # logs this error message: - 'Backtrace stopped: frame did not save the PC', - # gh-104736: When "bt" command displays something like: - # "#1 0x0000000000000000 in ?? ()", the traceback is likely - # truncated or wrong. - ' ?? ()', - ): - if pattern in out: - raise unittest.SkipTest(f"{pattern!r} found in gdb output") - - return out - - def get_gdb_repr(self, source, - cmds_after_breakpoint=None, - import_site=False): - # Given an input python source representation of data, - # run "python -c'id(DATA)'" under gdb with a breakpoint on - # builtin_id and scrape out gdb's representation of the "op" - # parameter, and verify that the gdb displays the same string - # - # Verify that the gdb displays the expected string - # - # For a nested structure, the first time we hit the breakpoint will - # give us the top-level structure - - # NOTE: avoid decoding too much of the traceback as some - # undecodable characters may lurk there in optimized mode - # (issue #19743). - cmds_after_breakpoint = cmds_after_breakpoint or ["backtrace 1"] - gdb_output = self.get_stack_trace(source, breakpoint=BREAKPOINT_FN, - cmds_after_breakpoint=cmds_after_breakpoint, - import_site=import_site) - # gdb can insert additional '\n' and space characters in various places - # in its output, depending on the width of the terminal it's connected - # to (using its "wrap_here" function) - m = re.search( - # Match '#0 builtin_id(self=..., v=...)' - r'#0\s+builtin_id\s+\(self\=.*,\s+v=\s*(.*?)?\)' - # Match ' at Python/bltinmodule.c'. - # bpo-38239: builtin_id() is defined in Python/bltinmodule.c, - # but accept any "Directory\file.c" to support Link Time - # Optimization (LTO). - r'\s+at\s+\S*[A-Za-z]+/[A-Za-z0-9_-]+\.c', - gdb_output, re.DOTALL) - if not m: - self.fail('Unexpected gdb output: %r\n%s' % (gdb_output, gdb_output)) - return m.group(1), gdb_output - - def assertEndsWith(self, actual, exp_end): - '''Ensure that the given "actual" string ends with "exp_end"''' - self.assertTrue(actual.endswith(exp_end), - msg='%r did not end with %r' % (actual, exp_end)) - - def assertMultilineMatches(self, actual, pattern): - m = re.match(pattern, actual, re.DOTALL) - if not m: - self.fail(msg='%r did not match %r' % (actual, pattern)) - - def get_sample_script(self): - return findfile('gdb_sample.py') - -class PrettyPrintTests(DebuggerTests): - def test_getting_backtrace(self): - gdb_output = self.get_stack_trace('id(42)') - self.assertTrue(BREAKPOINT_FN in gdb_output) - - def assertGdbRepr(self, val, exp_repr=None): - # Ensure that gdb's rendering of the value in a debugged process - # matches repr(value) in this process: - gdb_repr, gdb_output = self.get_gdb_repr('id(' + ascii(val) + ')') - if not exp_repr: - exp_repr = repr(val) - self.assertEqual(gdb_repr, exp_repr, - ('%r did not equal expected %r; full output was:\n%s' - % (gdb_repr, exp_repr, gdb_output))) - - @support.requires_resource('cpu') - def test_int(self): - 'Verify the pretty-printing of various int values' - self.assertGdbRepr(42) - self.assertGdbRepr(0) - self.assertGdbRepr(-7) - self.assertGdbRepr(1000000000000) - self.assertGdbRepr(-1000000000000000) - - def test_singletons(self): - 'Verify the pretty-printing of True, False and None' - self.assertGdbRepr(True) - self.assertGdbRepr(False) - self.assertGdbRepr(None) - - def test_dicts(self): - 'Verify the pretty-printing of dictionaries' - self.assertGdbRepr({}) - self.assertGdbRepr({'foo': 'bar'}, "{'foo': 'bar'}") - # Python preserves insertion order since 3.6 - self.assertGdbRepr({'foo': 'bar', 'douglas': 42}, "{'foo': 'bar', 'douglas': 42}") - - def test_lists(self): - 'Verify the pretty-printing of lists' - self.assertGdbRepr([]) - self.assertGdbRepr(list(range(5))) - - @support.requires_resource('cpu') - def test_bytes(self): - 'Verify the pretty-printing of bytes' - self.assertGdbRepr(b'') - self.assertGdbRepr(b'And now for something hopefully the same') - self.assertGdbRepr(b'string with embedded NUL here \0 and then some more text') - self.assertGdbRepr(b'this is a tab:\t' - b' this is a slash-N:\n' - b' this is a slash-R:\r' - ) - - self.assertGdbRepr(b'this is byte 255:\xff and byte 128:\x80') - - self.assertGdbRepr(bytes([b for b in range(255)])) - - @support.requires_resource('cpu') - def test_strings(self): - 'Verify the pretty-printing of unicode strings' - # We cannot simply call locale.getpreferredencoding() here, - # as GDB might have been linked against a different version - # of Python with a different encoding and coercion policy - # with respect to PEP 538 and PEP 540. - out, err = run_gdb( - '--eval-command', - 'python import locale; print(locale.getpreferredencoding())') - - encoding = out.rstrip() - if err or not encoding: - raise RuntimeError( - f'unable to determine the preferred encoding ' - f'of embedded Python in GDB: {err}') - - def check_repr(text): - try: - text.encode(encoding) - except UnicodeEncodeError: - self.assertGdbRepr(text, ascii(text)) - else: - self.assertGdbRepr(text) - - self.assertGdbRepr('') - self.assertGdbRepr('And now for something hopefully the same') - self.assertGdbRepr('string with embedded NUL here \0 and then some more text') - - # Test printing a single character: - # U+2620 SKULL AND CROSSBONES - check_repr('\u2620') - - # Test printing a Japanese unicode string - # (I believe this reads "mojibake", using 3 characters from the CJK - # Unified Ideographs area, followed by U+3051 HIRAGANA LETTER KE) - check_repr('\u6587\u5b57\u5316\u3051') - - # Test a character outside the BMP: - # U+1D121 MUSICAL SYMBOL C CLEF - # This is: - # UTF-8: 0xF0 0x9D 0x84 0xA1 - # UTF-16: 0xD834 0xDD21 - check_repr(chr(0x1D121)) - - def test_tuples(self): - 'Verify the pretty-printing of tuples' - self.assertGdbRepr(tuple(), '()') - self.assertGdbRepr((1,), '(1,)') - self.assertGdbRepr(('foo', 'bar', 'baz')) - - @support.requires_resource('cpu') - def test_sets(self): - 'Verify the pretty-printing of sets' - if (gdb_major_version, gdb_minor_version) < (7, 3): - self.skipTest("pretty-printing of sets needs gdb 7.3 or later") - self.assertGdbRepr(set(), "set()") - self.assertGdbRepr(set(['a']), "{'a'}") - # PYTHONHASHSEED is need to get the exact frozenset item order - if not sys.flags.ignore_environment: - self.assertGdbRepr(set(['a', 'b']), "{'a', 'b'}") - self.assertGdbRepr(set([4, 5, 6]), "{4, 5, 6}") - - # Ensure that we handle sets containing the "dummy" key value, - # which happens on deletion: - gdb_repr, gdb_output = self.get_gdb_repr('''s = set(['a','b']) -s.remove('a') -id(s)''') - self.assertEqual(gdb_repr, "{'b'}") - - @support.requires_resource('cpu') - def test_frozensets(self): - 'Verify the pretty-printing of frozensets' - if (gdb_major_version, gdb_minor_version) < (7, 3): - self.skipTest("pretty-printing of frozensets needs gdb 7.3 or later") - self.assertGdbRepr(frozenset(), "frozenset()") - self.assertGdbRepr(frozenset(['a']), "frozenset({'a'})") - # PYTHONHASHSEED is need to get the exact frozenset item order - if not sys.flags.ignore_environment: - self.assertGdbRepr(frozenset(['a', 'b']), "frozenset({'a', 'b'})") - self.assertGdbRepr(frozenset([4, 5, 6]), "frozenset({4, 5, 6})") - - def test_exceptions(self): - # Test a RuntimeError - gdb_repr, gdb_output = self.get_gdb_repr(''' -try: - raise RuntimeError("I am an error") -except RuntimeError as e: - id(e) -''') - self.assertEqual(gdb_repr, - "RuntimeError('I am an error',)") - - - # Test division by zero: - gdb_repr, gdb_output = self.get_gdb_repr(''' -try: - a = 1 / 0 -except ZeroDivisionError as e: - id(e) -''') - self.assertEqual(gdb_repr, - "ZeroDivisionError('division by zero',)") - - def test_modern_class(self): - 'Verify the pretty-printing of new-style class instances' - gdb_repr, gdb_output = self.get_gdb_repr(''' -class Foo: - pass -foo = Foo() -foo.an_int = 42 -id(foo)''') - m = re.match(r'', gdb_repr) - self.assertTrue(m, - msg='Unexpected new-style class rendering %r' % gdb_repr) - - def test_subclassing_list(self): - 'Verify the pretty-printing of an instance of a list subclass' - gdb_repr, gdb_output = self.get_gdb_repr(''' -class Foo(list): - pass -foo = Foo() -foo += [1, 2, 3] -foo.an_int = 42 -id(foo)''') - m = re.match(r'', gdb_repr) - - self.assertTrue(m, - msg='Unexpected new-style class rendering %r' % gdb_repr) - - def test_subclassing_tuple(self): - 'Verify the pretty-printing of an instance of a tuple subclass' - # This should exercise the negative tp_dictoffset code in the - # new-style class support - gdb_repr, gdb_output = self.get_gdb_repr(''' -class Foo(tuple): - pass -foo = Foo((1, 2, 3)) -foo.an_int = 42 -id(foo)''') - m = re.match(r'', gdb_repr) - - self.assertTrue(m, - msg='Unexpected new-style class rendering %r' % gdb_repr) - - def assertSane(self, source, corruption, exprepr=None): - '''Run Python under gdb, corrupting variables in the inferior process - immediately before taking a backtrace. - - Verify that the variable's representation is the expected failsafe - representation''' - if corruption: - cmds_after_breakpoint=[corruption, 'backtrace'] - else: - cmds_after_breakpoint=['backtrace'] - - gdb_repr, gdb_output = \ - self.get_gdb_repr(source, - cmds_after_breakpoint=cmds_after_breakpoint) - if exprepr: - if gdb_repr == exprepr: - # gdb managed to print the value in spite of the corruption; - # this is good (see http://bugs.python.org/issue8330) - return - - # Match anything for the type name; 0xDEADBEEF could point to - # something arbitrary (see http://bugs.python.org/issue8330) - pattern = '<.* at remote 0x-?[0-9a-f]+>' - - m = re.match(pattern, gdb_repr) - if not m: - self.fail('Unexpected gdb representation: %r\n%s' % \ - (gdb_repr, gdb_output)) - - def test_NULL_ptr(self): - 'Ensure that a NULL PyObject* is handled gracefully' - gdb_repr, gdb_output = ( - self.get_gdb_repr('id(42)', - cmds_after_breakpoint=['set variable v=0', - 'backtrace']) - ) - - self.assertEqual(gdb_repr, '0x0') - - def test_NULL_ob_type(self): - 'Ensure that a PyObject* with NULL ob_type is handled gracefully' - self.assertSane('id(42)', - 'set v->ob_type=0') - - def test_corrupt_ob_type(self): - 'Ensure that a PyObject* with a corrupt ob_type is handled gracefully' - self.assertSane('id(42)', - 'set v->ob_type=0xDEADBEEF', - exprepr='42') - - def test_corrupt_tp_flags(self): - 'Ensure that a PyObject* with a type with corrupt tp_flags is handled' - self.assertSane('id(42)', - 'set v->ob_type->tp_flags=0x0', - exprepr='42') - - def test_corrupt_tp_name(self): - 'Ensure that a PyObject* with a type with corrupt tp_name is handled' - self.assertSane('id(42)', - 'set v->ob_type->tp_name=0xDEADBEEF', - exprepr='42') - - def test_builtins_help(self): - 'Ensure that the new-style class _Helper in site.py can be handled' - - if sys.flags.no_site: - self.skipTest("need site module, but -S option was used") - - # (this was the issue causing tracebacks in - # http://bugs.python.org/issue8032#msg100537 ) - gdb_repr, gdb_output = self.get_gdb_repr('id(__builtins__.help)', import_site=True) - - m = re.match(r'<_Helper\(\) at remote 0x-?[0-9a-f]+>', gdb_repr) - self.assertTrue(m, - msg='Unexpected rendering %r' % gdb_repr) - - def test_selfreferential_list(self): - '''Ensure that a reference loop involving a list doesn't lead proxyval - into an infinite loop:''' - gdb_repr, gdb_output = \ - self.get_gdb_repr("a = [3, 4, 5] ; a.append(a) ; id(a)") - self.assertEqual(gdb_repr, '[3, 4, 5, [...]]') - - gdb_repr, gdb_output = \ - self.get_gdb_repr("a = [3, 4, 5] ; b = [a] ; a.append(b) ; id(a)") - self.assertEqual(gdb_repr, '[3, 4, 5, [[...]]]') - - def test_selfreferential_dict(self): - '''Ensure that a reference loop involving a dict doesn't lead proxyval - into an infinite loop:''' - gdb_repr, gdb_output = \ - self.get_gdb_repr("a = {} ; b = {'bar':a} ; a['foo'] = b ; id(a)") - - self.assertEqual(gdb_repr, "{'foo': {'bar': {...}}}") - - def test_selfreferential_old_style_instance(self): - gdb_repr, gdb_output = \ - self.get_gdb_repr(''' -class Foo: - pass -foo = Foo() -foo.an_attr = foo -id(foo)''') - self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>', - gdb_repr), - 'Unexpected gdb representation: %r\n%s' % \ - (gdb_repr, gdb_output)) - - def test_selfreferential_new_style_instance(self): - gdb_repr, gdb_output = \ - self.get_gdb_repr(''' -class Foo(object): - pass -foo = Foo() -foo.an_attr = foo -id(foo)''') - self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>', - gdb_repr), - 'Unexpected gdb representation: %r\n%s' % \ - (gdb_repr, gdb_output)) - - gdb_repr, gdb_output = \ - self.get_gdb_repr(''' -class Foo(object): - pass -a = Foo() -b = Foo() -a.an_attr = b -b.an_attr = a -id(a)''') - self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>\) at remote 0x-?[0-9a-f]+>', - gdb_repr), - 'Unexpected gdb representation: %r\n%s' % \ - (gdb_repr, gdb_output)) - - def test_truncation(self): - 'Verify that very long output is truncated' - gdb_repr, gdb_output = self.get_gdb_repr('id(list(range(1000)))') - self.assertEqual(gdb_repr, - "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, " - "14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, " - "27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, " - "40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, " - "53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, " - "66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, " - "79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, " - "92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, " - "104, 105, 106, 107, 108, 109, 110, 111, 112, 113, " - "114, 115, 116, 117, 118, 119, 120, 121, 122, 123, " - "124, 125, 126, 127, 128, 129, 130, 131, 132, 133, " - "134, 135, 136, 137, 138, 139, 140, 141, 142, 143, " - "144, 145, 146, 147, 148, 149, 150, 151, 152, 153, " - "154, 155, 156, 157, 158, 159, 160, 161, 162, 163, " - "164, 165, 166, 167, 168, 169, 170, 171, 172, 173, " - "174, 175, 176, 177, 178, 179, 180, 181, 182, 183, " - "184, 185, 186, 187, 188, 189, 190, 191, 192, 193, " - "194, 195, 196, 197, 198, 199, 200, 201, 202, 203, " - "204, 205, 206, 207, 208, 209, 210, 211, 212, 213, " - "214, 215, 216, 217, 218, 219, 220, 221, 222, 223, " - "224, 225, 226...(truncated)") - self.assertEqual(len(gdb_repr), - 1024 + len('...(truncated)')) - - def test_builtin_method(self): - gdb_repr, gdb_output = self.get_gdb_repr('import sys; id(sys.stdout.readlines)') - self.assertTrue(re.match(r'', - gdb_repr), - 'Unexpected gdb representation: %r\n%s' % \ - (gdb_repr, gdb_output)) - - def test_frames(self): - gdb_output = self.get_stack_trace(''' -import sys -def foo(a, b, c): - return sys._getframe(0) - -f = foo(3, 4, 5) -id(f)''', - breakpoint='builtin_id', - cmds_after_breakpoint=['print (PyFrameObject*)v'] - ) - self.assertTrue(re.match(r'.*\s+\$1 =\s+Frame 0x-?[0-9a-f]+, for file , line 4, in foo \(a=3.*', - gdb_output, - re.DOTALL), - 'Unexpected gdb representation: %r\n%s' % (gdb_output, gdb_output)) - -@unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") -class PyListTests(DebuggerTests): - def assertListing(self, expected, actual): - self.assertEndsWith(actual, expected) - - def test_basic_command(self): - 'Verify that the "py-list" command works' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-list']) - - self.assertListing(' 5 \n' - ' 6 def bar(a, b, c):\n' - ' 7 baz(a, b, c)\n' - ' 8 \n' - ' 9 def baz(*args):\n' - ' >10 id(42)\n' - ' 11 \n' - ' 12 foo(1, 2, 3)\n', - bt) - - def test_one_abs_arg(self): - 'Verify the "py-list" command with one absolute argument' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-list 9']) - - self.assertListing(' 9 def baz(*args):\n' - ' >10 id(42)\n' - ' 11 \n' - ' 12 foo(1, 2, 3)\n', - bt) - - def test_two_abs_args(self): - 'Verify the "py-list" command with two absolute arguments' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-list 1,3']) - - self.assertListing(' 1 # Sample script for use by test_gdb.py\n' - ' 2 \n' - ' 3 def foo(a, b, c):\n', - bt) - -SAMPLE_WITH_C_CALL = """ - -from _testcapi import pyobject_vectorcall - -def foo(a, b, c): - bar(a, b, c) - -def bar(a, b, c): - pyobject_vectorcall(baz, (a, b, c), None) - -def baz(*args): - id(42) - -foo(1, 2, 3) - -""" - - -class StackNavigationTests(DebuggerTests): - @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_pyup_command(self): - 'Verify that the "py-up" command works' - bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, - cmds_after_breakpoint=['py-up', 'py-up']) - self.assertMultilineMatches(bt, - r'''^.* -#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) -#[0-9]+ -$''') - - @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") - def test_down_at_bottom(self): - 'Verify handling of "py-down" at the bottom of the stack' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-down']) - self.assertEndsWith(bt, - 'Unable to find a newer python frame\n') - - @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") - def test_up_at_top(self): - 'Verify handling of "py-up" at the top of the stack' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-up'] * 5) - self.assertEndsWith(bt, - 'Unable to find an older python frame\n') - - @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_up_then_down(self): - 'Verify "py-up" followed by "py-down"' - bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, - cmds_after_breakpoint=['py-up', 'py-up', 'py-down']) - self.assertMultilineMatches(bt, - r'''^.* -#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) -#[0-9]+ -#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) -$''') - -class PyBtTests(DebuggerTests): - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_bt(self): - 'Verify that the "py-bt" command works' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-bt']) - self.assertMultilineMatches(bt, - r'''^.* -Traceback \(most recent call first\): - - File ".*gdb_sample.py", line 10, in baz - id\(42\) - File ".*gdb_sample.py", line 7, in bar - baz\(a, b, c\) - File ".*gdb_sample.py", line 4, in foo - bar\(a=a, b=b, c=c\) - File ".*gdb_sample.py", line 12, in - foo\(1, 2, 3\) -''') - - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_bt_full(self): - 'Verify that the "py-bt-full" command works' - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-bt-full']) - self.assertMultilineMatches(bt, - r'''^.* -#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\) - baz\(a, b, c\) -#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\) - bar\(a=a, b=b, c=c\) -#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 12, in \(\) - foo\(1, 2, 3\) -''') - - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - @support.requires_resource('cpu') - def test_threads(self): - 'Verify that "py-bt" indicates threads that are waiting for the GIL' - cmd = ''' -from threading import Thread - -class TestThread(Thread): - # These threads would run forever, but we'll interrupt things with the - # debugger - def run(self): - i = 0 - while 1: - i += 1 - -t = {} -for i in range(4): - t[i] = TestThread() - t[i].start() - -# Trigger a breakpoint on the main thread -id(42) - -''' - # Verify with "py-bt": - gdb_output = self.get_stack_trace(cmd, - cmds_after_breakpoint=['thread apply all py-bt']) - self.assertIn('Waiting for the GIL', gdb_output) - - # Verify with "py-bt-full": - gdb_output = self.get_stack_trace(cmd, - cmds_after_breakpoint=['thread apply all py-bt-full']) - self.assertIn('Waiting for the GIL', gdb_output) - - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - # Some older versions of gdb will fail with - # "Cannot find new threads: generic error" - # unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround - def test_gc(self): - 'Verify that "py-bt" indicates if a thread is garbage-collecting' - cmd = ('from gc import collect\n' - 'id(42)\n' - 'def foo():\n' - ' collect()\n' - 'def bar():\n' - ' foo()\n' - 'bar()\n') - # Verify with "py-bt": - gdb_output = self.get_stack_trace(cmd, - cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt'], - ) - self.assertIn('Garbage-collecting', gdb_output) - - # Verify with "py-bt-full": - gdb_output = self.get_stack_trace(cmd, - cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt-full'], - ) - self.assertIn('Garbage-collecting', gdb_output) - - - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - @support.requires_resource('cpu') - # Some older versions of gdb will fail with - # "Cannot find new threads: generic error" - # unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround - # - # gdb will also generate many erroneous errors such as: - # Function "meth_varargs" not defined. - # This is because we are calling functions from an "external" module - # (_testcapimodule) rather than compiled-in functions. It seems difficult - # to suppress these. See also the comment in DebuggerTests.get_stack_trace - def test_pycfunction(self): - 'Verify that "py-bt" displays invocations of PyCFunction instances' - # bpo-46600: If the compiler inlines _null_to_none() in meth_varargs() - # (ex: clang -Og), _null_to_none() is the frame #1. Otherwise, - # meth_varargs() is the frame #1. - expected_frame = r'#(1|2)' - # Various optimizations multiply the code paths by which these are - # called, so test a variety of calling conventions. - for func_name, args in ( - ('meth_varargs', ''), - ('meth_varargs_keywords', ''), - ('meth_o', '[]'), - ('meth_noargs', ''), - ('meth_fastcall', ''), - ('meth_fastcall_keywords', ''), - ): - for obj in ( - '_testcapi', - '_testcapi.MethClass', - '_testcapi.MethClass()', - '_testcapi.MethStatic()', - - # XXX: bound methods don't yet give nice tracebacks - # '_testcapi.MethInstance()', - ): - with self.subTest(f'{obj}.{func_name}'): - cmd = textwrap.dedent(f''' - import _testcapi - def foo(): - {obj}.{func_name}({args}) - def bar(): - foo() - bar() - ''') - # Verify with "py-bt": - gdb_output = self.get_stack_trace( - cmd, - breakpoint=func_name, - cmds_after_breakpoint=['bt', 'py-bt'], - # bpo-45207: Ignore 'Function "meth_varargs" not - # defined.' message in stderr. - ignore_stderr=True, - ) - self.assertIn(f'\n.*") - -class PyLocalsTests(DebuggerTests): - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_basic_command(self): - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-up', 'py-locals']) - self.assertMultilineMatches(bt, - r".*\nargs = \(1, 2, 3\)\n.*") - - @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") - @unittest.skipIf(python_is_optimized(), - "Python was compiled with optimizations") - def test_locals_after_up(self): - bt = self.get_stack_trace(script=self.get_sample_script(), - cmds_after_breakpoint=['py-up', 'py-up', 'py-locals']) - self.assertMultilineMatches(bt, - r'''^.* -Locals for foo -a = 1 -b = 2 -c = 3 -Locals for -.*$''') - - -def setUpModule(): - if support.verbose: - print("GDB version %s.%s:" % (gdb_major_version, gdb_minor_version)) - for line in gdb_version.splitlines(): - print(" " * 4 + line) - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_gdb/__init__.py b/Lib/test/test_gdb/__init__.py new file mode 100644 index 00000000000000..d74075e456792d --- /dev/null +++ b/Lib/test/test_gdb/__init__.py @@ -0,0 +1,30 @@ +# Verify that gdb can pretty-print the various PyObject* types +# +# The code for testing gdb was adapted from similar work in Unladen Swallow's +# Lib/test/test_jit_gdb.py + +import os +import sysconfig +import unittest +from test import support + + +MS_WINDOWS = (os.name == 'nt') +if MS_WINDOWS: + # On Windows, Python is usually built by MSVC. Passing /p:DebugSymbols=true + # option to MSBuild produces PDB debug symbols, but gdb doesn't support PDB + # debug symbol files. + raise unittest.SkipTest("test_gdb doesn't work on Windows") + +if support.PGO: + raise unittest.SkipTest("test_gdb is not useful for PGO") + +if not sysconfig.is_python_build(): + raise unittest.SkipTest("test_gdb only works on source builds at the moment.") + +if support.check_cflags_pgo(): + raise unittest.SkipTest("test_gdb is not reliable on PGO builds") + + +def load_tests(*args): + return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/gdb_sample.py b/Lib/test/test_gdb/gdb_sample.py similarity index 75% rename from Lib/test/gdb_sample.py rename to Lib/test/test_gdb/gdb_sample.py index 4188f50136fb97..a7f23db73ea6e6 100644 --- a/Lib/test/gdb_sample.py +++ b/Lib/test/test_gdb/gdb_sample.py @@ -1,4 +1,4 @@ -# Sample script for use by test_gdb.py +# Sample script for use by test_gdb def foo(a, b, c): bar(a=a, b=b, c=c) diff --git a/Lib/test/test_gdb/test_backtrace.py b/Lib/test/test_gdb/test_backtrace.py new file mode 100644 index 00000000000000..c41e7cb7c210de --- /dev/null +++ b/Lib/test/test_gdb/test_backtrace.py @@ -0,0 +1,134 @@ +import textwrap +import unittest +from test import support +from test.support import python_is_optimized + +from .util import setup_module, DebuggerTests, CET_PROTECTION, SAMPLE_SCRIPT + + +def setUpModule(): + setup_module() + + +class PyBtTests(DebuggerTests): + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_bt(self): + 'Verify that the "py-bt" command works' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-bt']) + self.assertMultilineMatches(bt, + r'''^.* +Traceback \(most recent call first\): + + File ".*gdb_sample.py", line 10, in baz + id\(42\) + File ".*gdb_sample.py", line 7, in bar + baz\(a, b, c\) + File ".*gdb_sample.py", line 4, in foo + bar\(a=a, b=b, c=c\) + File ".*gdb_sample.py", line 12, in + foo\(1, 2, 3\) +''') + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_bt_full(self): + 'Verify that the "py-bt-full" command works' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-bt-full']) + self.assertMultilineMatches(bt, + r'''^.* +#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 7, in bar \(a=1, b=2, c=3\) + baz\(a, b, c\) +#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 4, in foo \(a=1, b=2, c=3\) + bar\(a=a, b=b, c=c\) +#[0-9]+ Frame 0x-?[0-9a-f]+, for file .*gdb_sample.py, line 12, in \(\) + foo\(1, 2, 3\) +''') + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + @support.requires_resource('cpu') + def test_threads(self): + 'Verify that "py-bt" indicates threads that are waiting for the GIL' + cmd = ''' +from threading import Thread + +class TestThread(Thread): + # These threads would run forever, but we'll interrupt things with the + # debugger + def run(self): + i = 0 + while 1: + i += 1 + +t = {} +for i in range(4): + t[i] = TestThread() + t[i].start() + +# Trigger a breakpoint on the main thread +id(42) + +''' + # Verify with "py-bt": + gdb_output = self.get_stack_trace(cmd, + cmds_after_breakpoint=['thread apply all py-bt']) + self.assertIn('Waiting for the GIL', gdb_output) + + # Verify with "py-bt-full": + gdb_output = self.get_stack_trace(cmd, + cmds_after_breakpoint=['thread apply all py-bt-full']) + self.assertIn('Waiting for the GIL', gdb_output) + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + # Some older versions of gdb will fail with + # "Cannot find new threads: generic error" + # unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround + def test_gc(self): + 'Verify that "py-bt" indicates if a thread is garbage-collecting' + cmd = ('from gc import collect\n' + 'id(42)\n' + 'def foo():\n' + ' collect()\n' + 'def bar():\n' + ' foo()\n' + 'bar()\n') + # Verify with "py-bt": + gdb_output = self.get_stack_trace(cmd, + cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt'], + ) + self.assertIn('Garbage-collecting', gdb_output) + + # Verify with "py-bt-full": + gdb_output = self.get_stack_trace(cmd, + cmds_after_breakpoint=['break update_refs', 'continue', 'py-bt-full'], + ) + self.assertIn('Garbage-collecting', gdb_output) + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_wrapper_call(self): + cmd = textwrap.dedent(''' + class MyList(list): + def __init__(self): + super(*[]).__init__() # wrapper_call() + + id("first break point") + l = MyList() + ''') + cmds_after_breakpoint = ['break wrapper_call', 'continue'] + if CET_PROTECTION: + # bpo-32962: same case as in get_stack_trace(): + # we need an additional 'next' command in order to read + # arguments of the innermost function of the call stack. + cmds_after_breakpoint.append('next') + cmds_after_breakpoint.append('py-bt') + + # Verify with "py-bt": + gdb_output = self.get_stack_trace(cmd, + cmds_after_breakpoint=cmds_after_breakpoint) + self.assertRegex(gdb_output, + r"10 id(42)\n' + ' 11 \n' + ' 12 foo(1, 2, 3)\n', + bt) + + def test_one_abs_arg(self): + 'Verify the "py-list" command with one absolute argument' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-list 9']) + + self.assertListing(' 9 def baz(*args):\n' + ' >10 id(42)\n' + ' 11 \n' + ' 12 foo(1, 2, 3)\n', + bt) + + def test_two_abs_args(self): + 'Verify the "py-list" command with two absolute arguments' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-list 1,3']) + + self.assertListing(' 1 # Sample script for use by test_gdb\n' + ' 2 \n' + ' 3 def foo(a, b, c):\n', + bt) + +SAMPLE_WITH_C_CALL = """ + +from _testcapi import pyobject_vectorcall + +def foo(a, b, c): + bar(a, b, c) + +def bar(a, b, c): + pyobject_vectorcall(baz, (a, b, c), None) + +def baz(*args): + id(42) + +foo(1, 2, 3) + +""" + + +class StackNavigationTests(DebuggerTests): + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_pyup_command(self): + 'Verify that the "py-up" command works' + bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, + cmds_after_breakpoint=['py-up', 'py-up']) + self.assertMultilineMatches(bt, + r'''^.* +#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) +#[0-9]+ +$''') + + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + def test_down_at_bottom(self): + 'Verify handling of "py-down" at the bottom of the stack' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-down']) + self.assertEndsWith(bt, + 'Unable to find a newer python frame\n') + + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + def test_up_at_top(self): + 'Verify handling of "py-up" at the top of the stack' + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-up'] * 5) + self.assertEndsWith(bt, + 'Unable to find an older python frame\n') + + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_up_then_down(self): + 'Verify "py-up" followed by "py-down"' + bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, + cmds_after_breakpoint=['py-up', 'py-up', 'py-down']) + self.assertMultilineMatches(bt, + r'''^.* +#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) +#[0-9]+ +#[0-9]+ Frame 0x-?[0-9a-f]+, for file , line 12, in baz \(args=\(1, 2, 3\)\) +$''') + +class PyPrintTests(DebuggerTests): + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_basic_command(self): + 'Verify that the "py-print" command works' + bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, + cmds_after_breakpoint=['py-up', 'py-print args']) + self.assertMultilineMatches(bt, + r".*\nlocal 'args' = \(1, 2, 3\)\n.*") + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + def test_print_after_up(self): + bt = self.get_stack_trace(source=SAMPLE_WITH_C_CALL, + cmds_after_breakpoint=['py-up', 'py-up', 'py-print c', 'py-print b', 'py-print a']) + self.assertMultilineMatches(bt, + r".*\nlocal 'c' = 3\nlocal 'b' = 2\nlocal 'a' = 1\n.*") + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_printing_global(self): + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-up', 'py-print __name__']) + self.assertMultilineMatches(bt, + r".*\nglobal '__name__' = '__main__'\n.*") + + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_printing_builtin(self): + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-up', 'py-print len']) + self.assertMultilineMatches(bt, + r".*\nbuiltin 'len' = \n.*") + +class PyLocalsTests(DebuggerTests): + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_basic_command(self): + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-up', 'py-locals']) + self.assertMultilineMatches(bt, + r".*\nargs = \(1, 2, 3\)\n.*") + + @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") + @unittest.skipIf(python_is_optimized(), + "Python was compiled with optimizations") + def test_locals_after_up(self): + bt = self.get_stack_trace(script=SAMPLE_SCRIPT, + cmds_after_breakpoint=['py-up', 'py-up', 'py-locals']) + self.assertMultilineMatches(bt, + r'''^.* +Locals for foo +a = 1 +b = 2 +c = 3 +Locals for +.*$''') diff --git a/Lib/test/test_gdb/test_pretty_print.py b/Lib/test/test_gdb/test_pretty_print.py new file mode 100644 index 00000000000000..dfc77d65ab16a4 --- /dev/null +++ b/Lib/test/test_gdb/test_pretty_print.py @@ -0,0 +1,438 @@ +import re +import sys +from test import support + +from .util import ( + BREAKPOINT_FN, GDB_VERSION, + run_gdb, setup_module, DebuggerTests) + + +def setUpModule(): + setup_module() + + +class PrettyPrintTests(DebuggerTests): + def get_gdb_repr(self, source, + cmds_after_breakpoint=None, + import_site=False): + # Given an input python source representation of data, + # run "python -c'id(DATA)'" under gdb with a breakpoint on + # builtin_id and scrape out gdb's representation of the "op" + # parameter, and verify that the gdb displays the same string + # + # Verify that the gdb displays the expected string + # + # For a nested structure, the first time we hit the breakpoint will + # give us the top-level structure + + # NOTE: avoid decoding too much of the traceback as some + # undecodable characters may lurk there in optimized mode + # (issue #19743). + cmds_after_breakpoint = cmds_after_breakpoint or ["backtrace 1"] + gdb_output = self.get_stack_trace(source, breakpoint=BREAKPOINT_FN, + cmds_after_breakpoint=cmds_after_breakpoint, + import_site=import_site) + # gdb can insert additional '\n' and space characters in various places + # in its output, depending on the width of the terminal it's connected + # to (using its "wrap_here" function) + m = re.search( + # Match '#0 builtin_id(self=..., v=...)' + r'#0\s+builtin_id\s+\(self\=.*,\s+v=\s*(.*?)?\)' + # Match ' at Python/bltinmodule.c'. + # bpo-38239: builtin_id() is defined in Python/bltinmodule.c, + # but accept any "Directory\file.c" to support Link Time + # Optimization (LTO). + r'\s+at\s+\S*[A-Za-z]+/[A-Za-z0-9_-]+\.c', + gdb_output, re.DOTALL) + if not m: + self.fail('Unexpected gdb output: %r\n%s' % (gdb_output, gdb_output)) + return m.group(1), gdb_output + + def test_getting_backtrace(self): + gdb_output = self.get_stack_trace('id(42)') + self.assertTrue(BREAKPOINT_FN in gdb_output) + + def assertGdbRepr(self, val, exp_repr=None): + # Ensure that gdb's rendering of the value in a debugged process + # matches repr(value) in this process: + gdb_repr, gdb_output = self.get_gdb_repr('id(' + ascii(val) + ')') + if not exp_repr: + exp_repr = repr(val) + self.assertEqual(gdb_repr, exp_repr, + ('%r did not equal expected %r; full output was:\n%s' + % (gdb_repr, exp_repr, gdb_output))) + + @support.requires_resource('cpu') + def test_int(self): + 'Verify the pretty-printing of various int values' + self.assertGdbRepr(42) + self.assertGdbRepr(0) + self.assertGdbRepr(-7) + self.assertGdbRepr(1000000000000) + self.assertGdbRepr(-1000000000000000) + + def test_singletons(self): + 'Verify the pretty-printing of True, False and None' + self.assertGdbRepr(True) + self.assertGdbRepr(False) + self.assertGdbRepr(None) + + def test_dicts(self): + 'Verify the pretty-printing of dictionaries' + self.assertGdbRepr({}) + self.assertGdbRepr({'foo': 'bar'}, "{'foo': 'bar'}") + # Python preserves insertion order since 3.6 + self.assertGdbRepr({'foo': 'bar', 'douglas': 42}, "{'foo': 'bar', 'douglas': 42}") + + def test_lists(self): + 'Verify the pretty-printing of lists' + self.assertGdbRepr([]) + self.assertGdbRepr(list(range(5))) + + @support.requires_resource('cpu') + def test_bytes(self): + 'Verify the pretty-printing of bytes' + self.assertGdbRepr(b'') + self.assertGdbRepr(b'And now for something hopefully the same') + self.assertGdbRepr(b'string with embedded NUL here \0 and then some more text') + self.assertGdbRepr(b'this is a tab:\t' + b' this is a slash-N:\n' + b' this is a slash-R:\r' + ) + + self.assertGdbRepr(b'this is byte 255:\xff and byte 128:\x80') + + self.assertGdbRepr(bytes([b for b in range(255)])) + + @support.requires_resource('cpu') + def test_strings(self): + 'Verify the pretty-printing of unicode strings' + # We cannot simply call locale.getpreferredencoding() here, + # as GDB might have been linked against a different version + # of Python with a different encoding and coercion policy + # with respect to PEP 538 and PEP 540. + stdout, stderr = run_gdb( + '--eval-command', + 'python import locale; print(locale.getpreferredencoding())') + + encoding = stdout + if stderr or not encoding: + raise RuntimeError( + f'unable to determine the Python locale preferred encoding ' + f'of embedded Python in GDB\n' + f'stdout={stdout!r}\n' + f'stderr={stderr!r}') + + def check_repr(text): + try: + text.encode(encoding) + except UnicodeEncodeError: + self.assertGdbRepr(text, ascii(text)) + else: + self.assertGdbRepr(text) + + self.assertGdbRepr('') + self.assertGdbRepr('And now for something hopefully the same') + self.assertGdbRepr('string with embedded NUL here \0 and then some more text') + + # Test printing a single character: + # U+2620 SKULL AND CROSSBONES + check_repr('\u2620') + + # Test printing a Japanese unicode string + # (I believe this reads "mojibake", using 3 characters from the CJK + # Unified Ideographs area, followed by U+3051 HIRAGANA LETTER KE) + check_repr('\u6587\u5b57\u5316\u3051') + + # Test a character outside the BMP: + # U+1D121 MUSICAL SYMBOL C CLEF + # This is: + # UTF-8: 0xF0 0x9D 0x84 0xA1 + # UTF-16: 0xD834 0xDD21 + check_repr(chr(0x1D121)) + + def test_tuples(self): + 'Verify the pretty-printing of tuples' + self.assertGdbRepr(tuple(), '()') + self.assertGdbRepr((1,), '(1,)') + self.assertGdbRepr(('foo', 'bar', 'baz')) + + @support.requires_resource('cpu') + def test_sets(self): + 'Verify the pretty-printing of sets' + if GDB_VERSION < (7, 3): + self.skipTest("pretty-printing of sets needs gdb 7.3 or later") + self.assertGdbRepr(set(), "set()") + self.assertGdbRepr(set(['a']), "{'a'}") + # PYTHONHASHSEED is need to get the exact frozenset item order + if not sys.flags.ignore_environment: + self.assertGdbRepr(set(['a', 'b']), "{'a', 'b'}") + self.assertGdbRepr(set([4, 5, 6]), "{4, 5, 6}") + + # Ensure that we handle sets containing the "dummy" key value, + # which happens on deletion: + gdb_repr, gdb_output = self.get_gdb_repr('''s = set(['a','b']) +s.remove('a') +id(s)''') + self.assertEqual(gdb_repr, "{'b'}") + + @support.requires_resource('cpu') + def test_frozensets(self): + 'Verify the pretty-printing of frozensets' + if GDB_VERSION < (7, 3): + self.skipTest("pretty-printing of frozensets needs gdb 7.3 or later") + self.assertGdbRepr(frozenset(), "frozenset()") + self.assertGdbRepr(frozenset(['a']), "frozenset({'a'})") + # PYTHONHASHSEED is need to get the exact frozenset item order + if not sys.flags.ignore_environment: + self.assertGdbRepr(frozenset(['a', 'b']), "frozenset({'a', 'b'})") + self.assertGdbRepr(frozenset([4, 5, 6]), "frozenset({4, 5, 6})") + + def test_exceptions(self): + # Test a RuntimeError + gdb_repr, gdb_output = self.get_gdb_repr(''' +try: + raise RuntimeError("I am an error") +except RuntimeError as e: + id(e) +''') + self.assertEqual(gdb_repr, + "RuntimeError('I am an error',)") + + + # Test division by zero: + gdb_repr, gdb_output = self.get_gdb_repr(''' +try: + a = 1 / 0 +except ZeroDivisionError as e: + id(e) +''') + self.assertEqual(gdb_repr, + "ZeroDivisionError('division by zero',)") + + def test_modern_class(self): + 'Verify the pretty-printing of new-style class instances' + gdb_repr, gdb_output = self.get_gdb_repr(''' +class Foo: + pass +foo = Foo() +foo.an_int = 42 +id(foo)''') + m = re.match(r'', gdb_repr) + self.assertTrue(m, + msg='Unexpected new-style class rendering %r' % gdb_repr) + + def test_subclassing_list(self): + 'Verify the pretty-printing of an instance of a list subclass' + gdb_repr, gdb_output = self.get_gdb_repr(''' +class Foo(list): + pass +foo = Foo() +foo += [1, 2, 3] +foo.an_int = 42 +id(foo)''') + m = re.match(r'', gdb_repr) + + self.assertTrue(m, + msg='Unexpected new-style class rendering %r' % gdb_repr) + + def test_subclassing_tuple(self): + 'Verify the pretty-printing of an instance of a tuple subclass' + # This should exercise the negative tp_dictoffset code in the + # new-style class support + gdb_repr, gdb_output = self.get_gdb_repr(''' +class Foo(tuple): + pass +foo = Foo((1, 2, 3)) +foo.an_int = 42 +id(foo)''') + m = re.match(r'', gdb_repr) + + self.assertTrue(m, + msg='Unexpected new-style class rendering %r' % gdb_repr) + + def assertSane(self, source, corruption, exprepr=None): + '''Run Python under gdb, corrupting variables in the inferior process + immediately before taking a backtrace. + + Verify that the variable's representation is the expected failsafe + representation''' + if corruption: + cmds_after_breakpoint=[corruption, 'backtrace'] + else: + cmds_after_breakpoint=['backtrace'] + + gdb_repr, gdb_output = \ + self.get_gdb_repr(source, + cmds_after_breakpoint=cmds_after_breakpoint) + if exprepr: + if gdb_repr == exprepr: + # gdb managed to print the value in spite of the corruption; + # this is good (see http://bugs.python.org/issue8330) + return + + # Match anything for the type name; 0xDEADBEEF could point to + # something arbitrary (see http://bugs.python.org/issue8330) + pattern = '<.* at remote 0x-?[0-9a-f]+>' + + m = re.match(pattern, gdb_repr) + if not m: + self.fail('Unexpected gdb representation: %r\n%s' % \ + (gdb_repr, gdb_output)) + + def test_NULL_ptr(self): + 'Ensure that a NULL PyObject* is handled gracefully' + gdb_repr, gdb_output = ( + self.get_gdb_repr('id(42)', + cmds_after_breakpoint=['set variable v=0', + 'backtrace']) + ) + + self.assertEqual(gdb_repr, '0x0') + + def test_NULL_ob_type(self): + 'Ensure that a PyObject* with NULL ob_type is handled gracefully' + self.assertSane('id(42)', + 'set v->ob_type=0') + + def test_corrupt_ob_type(self): + 'Ensure that a PyObject* with a corrupt ob_type is handled gracefully' + self.assertSane('id(42)', + 'set v->ob_type=0xDEADBEEF', + exprepr='42') + + def test_corrupt_tp_flags(self): + 'Ensure that a PyObject* with a type with corrupt tp_flags is handled' + self.assertSane('id(42)', + 'set v->ob_type->tp_flags=0x0', + exprepr='42') + + def test_corrupt_tp_name(self): + 'Ensure that a PyObject* with a type with corrupt tp_name is handled' + self.assertSane('id(42)', + 'set v->ob_type->tp_name=0xDEADBEEF', + exprepr='42') + + def test_builtins_help(self): + 'Ensure that the new-style class _Helper in site.py can be handled' + + if sys.flags.no_site: + self.skipTest("need site module, but -S option was used") + + # (this was the issue causing tracebacks in + # http://bugs.python.org/issue8032#msg100537 ) + gdb_repr, gdb_output = self.get_gdb_repr('id(__builtins__.help)', import_site=True) + + m = re.match(r'<_Helper\(\) at remote 0x-?[0-9a-f]+>', gdb_repr) + self.assertTrue(m, + msg='Unexpected rendering %r' % gdb_repr) + + def test_selfreferential_list(self): + '''Ensure that a reference loop involving a list doesn't lead proxyval + into an infinite loop:''' + gdb_repr, gdb_output = \ + self.get_gdb_repr("a = [3, 4, 5] ; a.append(a) ; id(a)") + self.assertEqual(gdb_repr, '[3, 4, 5, [...]]') + + gdb_repr, gdb_output = \ + self.get_gdb_repr("a = [3, 4, 5] ; b = [a] ; a.append(b) ; id(a)") + self.assertEqual(gdb_repr, '[3, 4, 5, [[...]]]') + + def test_selfreferential_dict(self): + '''Ensure that a reference loop involving a dict doesn't lead proxyval + into an infinite loop:''' + gdb_repr, gdb_output = \ + self.get_gdb_repr("a = {} ; b = {'bar':a} ; a['foo'] = b ; id(a)") + + self.assertEqual(gdb_repr, "{'foo': {'bar': {...}}}") + + def test_selfreferential_old_style_instance(self): + gdb_repr, gdb_output = \ + self.get_gdb_repr(''' +class Foo: + pass +foo = Foo() +foo.an_attr = foo +id(foo)''') + self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>', + gdb_repr), + 'Unexpected gdb representation: %r\n%s' % \ + (gdb_repr, gdb_output)) + + def test_selfreferential_new_style_instance(self): + gdb_repr, gdb_output = \ + self.get_gdb_repr(''' +class Foo(object): + pass +foo = Foo() +foo.an_attr = foo +id(foo)''') + self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>', + gdb_repr), + 'Unexpected gdb representation: %r\n%s' % \ + (gdb_repr, gdb_output)) + + gdb_repr, gdb_output = \ + self.get_gdb_repr(''' +class Foo(object): + pass +a = Foo() +b = Foo() +a.an_attr = b +b.an_attr = a +id(a)''') + self.assertTrue(re.match(r'\) at remote 0x-?[0-9a-f]+>\) at remote 0x-?[0-9a-f]+>', + gdb_repr), + 'Unexpected gdb representation: %r\n%s' % \ + (gdb_repr, gdb_output)) + + def test_truncation(self): + 'Verify that very long output is truncated' + gdb_repr, gdb_output = self.get_gdb_repr('id(list(range(1000)))') + self.assertEqual(gdb_repr, + "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, " + "14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, " + "27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, " + "40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, " + "53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, " + "66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, " + "79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, " + "92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, " + "104, 105, 106, 107, 108, 109, 110, 111, 112, 113, " + "114, 115, 116, 117, 118, 119, 120, 121, 122, 123, " + "124, 125, 126, 127, 128, 129, 130, 131, 132, 133, " + "134, 135, 136, 137, 138, 139, 140, 141, 142, 143, " + "144, 145, 146, 147, 148, 149, 150, 151, 152, 153, " + "154, 155, 156, 157, 158, 159, 160, 161, 162, 163, " + "164, 165, 166, 167, 168, 169, 170, 171, 172, 173, " + "174, 175, 176, 177, 178, 179, 180, 181, 182, 183, " + "184, 185, 186, 187, 188, 189, 190, 191, 192, 193, " + "194, 195, 196, 197, 198, 199, 200, 201, 202, 203, " + "204, 205, 206, 207, 208, 209, 210, 211, 212, 213, " + "214, 215, 216, 217, 218, 219, 220, 221, 222, 223, " + "224, 225, 226...(truncated)") + self.assertEqual(len(gdb_repr), + 1024 + len('...(truncated)')) + + def test_builtin_method(self): + gdb_repr, gdb_output = self.get_gdb_repr('import sys; id(sys.stdout.readlines)') + self.assertTrue(re.match(r'', + gdb_repr), + 'Unexpected gdb representation: %r\n%s' % \ + (gdb_repr, gdb_output)) + + def test_frames(self): + gdb_output = self.get_stack_trace(''' +import sys +def foo(a, b, c): + return sys._getframe(0) + +f = foo(3, 4, 5) +id(f)''', + breakpoint='builtin_id', + cmds_after_breakpoint=['print (PyFrameObject*)v'] + ) + self.assertTrue(re.match(r'.*\s+\$1 =\s+Frame 0x-?[0-9a-f]+, for file , line 4, in foo \(a=3.*', + gdb_output, + re.DOTALL), + 'Unexpected gdb representation: %r\n%s' % (gdb_output, gdb_output)) diff --git a/Lib/test/test_gdb/util.py b/Lib/test/test_gdb/util.py new file mode 100644 index 00000000000000..7f4e3cba3534bd --- /dev/null +++ b/Lib/test/test_gdb/util.py @@ -0,0 +1,286 @@ +import os +import re +import shlex +import subprocess +import sys +import sysconfig +import unittest +from test import support + + +# Location of custom hooks file in a repository checkout. +CHECKOUT_HOOK_PATH = os.path.join(os.path.dirname(sys.executable), + 'python-gdb.py') + +SAMPLE_SCRIPT = os.path.join(os.path.dirname(__file__), 'gdb_sample.py') +BREAKPOINT_FN = 'builtin_id' + +PYTHONHASHSEED = '123' + + +def clean_environment(): + # Remove PYTHON* environment variables such as PYTHONHOME + return {name: value for name, value in os.environ.items() + if not name.startswith('PYTHON')} + + +# Temporary value until it's initialized by get_gdb_version() below +GDB_VERSION = (0, 0) + +def run_gdb(*args, exitcode=0, **env_vars): + """Runs gdb in --batch mode with the additional arguments given by *args. + + Returns its (stdout, stderr) decoded from utf-8 using the replace handler. + """ + env = clean_environment() + if env_vars: + env.update(env_vars) + + cmd = ['gdb', + # Batch mode: Exit after processing all the command files + # specified with -x/--command + '--batch', + # -nx: Do not execute commands from any .gdbinit initialization + # files (gh-66384) + '-nx'] + if GDB_VERSION >= (7, 4): + cmd.extend(('--init-eval-command', + f'add-auto-load-safe-path {CHECKOUT_HOOK_PATH}')) + cmd.extend(args) + + proc = subprocess.run( + cmd, + # Redirect stdin to prevent gdb from messing with the terminal settings + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf8", errors="backslashreplace", + env=env) + + stdout = proc.stdout + stderr = proc.stderr + if proc.returncode != exitcode: + cmd_text = shlex.join(cmd) + raise Exception(f"{cmd_text} failed with exit code {proc.returncode}, " + f"expected exit code {exitcode}:\n" + f"stdout={stdout!r}\n" + f"stderr={stderr!r}") + + return (stdout, stderr) + + +def get_gdb_version(): + try: + stdout, stderr = run_gdb('--version') + except OSError: + # This is what "no gdb" looks like. There may, however, be other + # errors that manifest this way too. + raise unittest.SkipTest("Couldn't find gdb program on the path") + + # Regex to parse: + # 'GNU gdb (GDB; SUSE Linux Enterprise 12) 7.7\n' -> 7.7 + # 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9 + # 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1 + # 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5 + # 'HP gdb 6.7 for HP Itanium (32 or 64 bit) and target HP-UX 11iv2 and 11iv3.\n' -> 6.7 + match = re.search(r"^(?:GNU|HP) gdb.*?\b(\d+)\.(\d+)", stdout) + if match is None: + raise Exception("unable to parse gdb version: %r" % stdout) + version_text = stdout + major = int(match.group(1)) + minor = int(match.group(2)) + version = (major, minor) + return (version_text, version) + +GDB_VERSION_TEXT, GDB_VERSION = get_gdb_version() +if GDB_VERSION < (7, 0): + raise unittest.SkipTest( + f"gdb versions before 7.0 didn't support python embedding. " + f"Saw gdb version {GDB_VERSION[0]}.{GDB_VERSION[1]}:\n" + f"{GDB_VERSION_TEXT}") + + +def check_usable_gdb(): + # Verify that "gdb" was built with the embedded Python support enabled and + # verify that "gdb" can load our custom hooks, as OS security settings may + # disallow this without a customized .gdbinit. + stdout, stderr = run_gdb( + '--eval-command=python import sys; print(sys.version_info)', + '--args', sys.executable) + + if "auto-loading has been declined" in stderr: + raise unittest.SkipTest( + f"gdb security settings prevent use of custom hooks; " + f"stderr: {stderr!r}") + + if not stdout: + raise unittest.SkipTest( + f"gdb not built with embedded python support; " + f"stderr: {stderr!r}") + + if "major=2" in stdout: + raise unittest.SkipTest("gdb built with Python 2") + +check_usable_gdb() + + +# Control-flow enforcement technology +def cet_protection(): + cflags = sysconfig.get_config_var('CFLAGS') + if not cflags: + return False + flags = cflags.split() + # True if "-mcet -fcf-protection" options are found, but false + # if "-fcf-protection=none" or "-fcf-protection=return" is found. + return (('-mcet' in flags) + and any((flag.startswith('-fcf-protection') + and not flag.endswith(("=none", "=return"))) + for flag in flags)) +CET_PROTECTION = cet_protection() + + +def setup_module(): + if support.verbose: + print(f"gdb version {GDB_VERSION[0]}.{GDB_VERSION[1]}:") + for line in GDB_VERSION_TEXT.splitlines(): + print(" " * 4 + line) + print() + + +class DebuggerTests(unittest.TestCase): + + """Test that the debugger can debug Python.""" + + def get_stack_trace(self, source=None, script=None, + breakpoint=BREAKPOINT_FN, + cmds_after_breakpoint=None, + import_site=False, + ignore_stderr=False): + ''' + Run 'python -c SOURCE' under gdb with a breakpoint. + + Support injecting commands after the breakpoint is reached + + Returns the stdout from gdb + + cmds_after_breakpoint: if provided, a list of strings: gdb commands + ''' + # We use "set breakpoint pending yes" to avoid blocking with a: + # Function "foo" not defined. + # Make breakpoint pending on future shared library load? (y or [n]) + # error, which typically happens python is dynamically linked (the + # breakpoints of interest are to be found in the shared library) + # When this happens, we still get: + # Function "textiowrapper_write" not defined. + # emitted to stderr each time, alas. + + # Initially I had "--eval-command=continue" here, but removed it to + # avoid repeated print breakpoints when traversing hierarchical data + # structures + + # Generate a list of commands in gdb's language: + commands = [ + 'set breakpoint pending yes', + 'break %s' % breakpoint, + + # The tests assume that the first frame of printed + # backtrace will not contain program counter, + # that is however not guaranteed by gdb + # therefore we need to use 'set print address off' to + # make sure the counter is not there. For example: + # #0 in PyObject_Print ... + # is assumed, but sometimes this can be e.g. + # #0 0x00003fffb7dd1798 in PyObject_Print ... + 'set print address off', + + 'run', + ] + + # GDB as of 7.4 onwards can distinguish between the + # value of a variable at entry vs current value: + # http://sourceware.org/gdb/onlinedocs/gdb/Variables.html + # which leads to the selftests failing with errors like this: + # AssertionError: 'v@entry=()' != '()' + # Disable this: + if GDB_VERSION >= (7, 4): + commands += ['set print entry-values no'] + + if cmds_after_breakpoint: + if CET_PROTECTION: + # bpo-32962: When Python is compiled with -mcet + # -fcf-protection, function arguments are unusable before + # running the first instruction of the function entry point. + # The 'next' command makes the required first step. + commands += ['next'] + commands += cmds_after_breakpoint + else: + commands += ['backtrace'] + + # print commands + + # Use "commands" to generate the arguments with which to invoke "gdb": + args = ['--eval-command=%s' % cmd for cmd in commands] + args += ["--args", + sys.executable] + args.extend(subprocess._args_from_interpreter_flags()) + + if not import_site: + # -S suppresses the default 'import site' + args += ["-S"] + + if source: + args += ["-c", source] + elif script: + args += [script] + + # Use "args" to invoke gdb, capturing stdout, stderr: + out, err = run_gdb(*args, PYTHONHASHSEED=PYTHONHASHSEED) + + if not ignore_stderr: + for line in err.splitlines(): + print(line, file=sys.stderr) + + # bpo-34007: Sometimes some versions of the shared libraries that + # are part of the traceback are compiled in optimised mode and the + # Program Counter (PC) is not present, not allowing gdb to walk the + # frames back. When this happens, the Python bindings of gdb raise + # an exception, making the test impossible to succeed. + if "PC not saved" in err: + raise unittest.SkipTest("gdb cannot walk the frame object" + " because the Program Counter is" + " not present") + + # bpo-40019: Skip the test if gdb failed to read debug information + # because the Python binary is optimized. + for pattern in ( + '(frame information optimized out)', + 'Unable to read information on python frame', + + # gh-91960: On Python built with "clang -Og", gdb gets + # "frame=" for _PyEval_EvalFrameDefault() parameter + '(unable to read python frame information)', + + # gh-104736: On Python built with "clang -Og" on ppc64le, + # "py-bt" displays a truncated or not traceback, but "where" + # logs this error message: + 'Backtrace stopped: frame did not save the PC', + + # gh-104736: When "bt" command displays something like: + # "#1 0x0000000000000000 in ?? ()", the traceback is likely + # truncated or wrong. + ' ?? ()', + ): + if pattern in out: + raise unittest.SkipTest(f"{pattern!r} found in gdb output") + + return out + + def assertEndsWith(self, actual, exp_end): + '''Ensure that the given "actual" string ends with "exp_end"''' + self.assertTrue(actual.endswith(exp_end), + msg='%r did not end with %r' % (actual, exp_end)) + + def assertMultilineMatches(self, actual, pattern): + m = re.match(pattern, actual, re.DOTALL) + if not m: + self.fail(msg='%r did not match %r' % (actual, pattern)) diff --git a/Lib/test/test_importlib/test_locks.py b/Lib/test/test_importlib/test_locks.py index 7091c36aaaf761..befac5d62b0abf 100644 --- a/Lib/test/test_importlib/test_locks.py +++ b/Lib/test/test_importlib/test_locks.py @@ -93,7 +93,8 @@ def f(): b.release() if ra: a.release() - lock_tests.Bunch(f, NTHREADS).wait_for_finished() + with lock_tests.Bunch(f, NTHREADS): + pass self.assertEqual(len(results), NTHREADS) return results diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py index 90932c0f66f38f..9c0dac7d6c61fb 100644 --- a/Lib/test/test_interpreters.py +++ b/Lib/test/test_interpreters.py @@ -1,5 +1,6 @@ import contextlib import os +import sys import threading from textwrap import dedent import unittest @@ -487,6 +488,26 @@ def task(): pass +class FinalizationTests(TestBase): + + def test_gh_109793(self): + import subprocess + argv = [sys.executable, '-c', '''if True: + import _xxsubinterpreters as _interpreters + interpid = _interpreters.create() + raise Exception + '''] + proc = subprocess.run(argv, capture_output=True, text=True) + self.assertIn('Traceback', proc.stderr) + if proc.returncode == 0 and support.verbose: + print() + print("--- cmd unexpected succeeded ---") + print(f"stdout:\n{proc.stdout}") + print(f"stderr:\n{proc.stderr}") + print("------") + self.assertEqual(proc.returncode, 1) + + class TestIsShareable(TestBase): def test_default_shareables(self): diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 375f65f9d16182..cca02a010b80f4 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -90,8 +90,7 @@ def setUp(self): self._threading_key = threading_helper.threading_setup() logger_dict = logging.getLogger().manager.loggerDict - logging._acquireLock() - try: + with logging._lock: self.saved_handlers = logging._handlers.copy() self.saved_handler_list = logging._handlerList[:] self.saved_loggers = saved_loggers = logger_dict.copy() @@ -101,8 +100,6 @@ def setUp(self): for name in saved_loggers: logger_states[name] = getattr(saved_loggers[name], 'disabled', None) - finally: - logging._releaseLock() # Set two unused loggers self.logger1 = logging.getLogger("\xab\xd7\xbb") @@ -136,8 +133,7 @@ def tearDown(self): self.root_logger.removeHandler(h) h.close() self.root_logger.setLevel(self.original_logging_level) - logging._acquireLock() - try: + with logging._lock: logging._levelToName.clear() logging._levelToName.update(self.saved_level_to_name) logging._nameToLevel.clear() @@ -154,8 +150,6 @@ def tearDown(self): for name in self.logger_states: if logger_states[name] is not None: self.saved_loggers[name].disabled = logger_states[name] - finally: - logging._releaseLock() self.doCleanups() threading_helper.threading_cleanup(*self._threading_key) @@ -739,11 +733,8 @@ def __init__(self): stream=open('/dev/null', 'wt', encoding='utf-8')) def emit(self, record): - self.sub_handler.acquire() - try: + with self.sub_handler.lock: self.sub_handler.emit(record) - finally: - self.sub_handler.release() self.assertEqual(len(logging._handlers), 0) refed_h = _OurHandler() @@ -759,29 +750,22 @@ def emit(self, record): fork_happened__release_locks_and_end_thread = threading.Event() def lock_holder_thread_fn(): - logging._acquireLock() - try: - refed_h.acquire() - try: - # Tell the main thread to do the fork. - locks_held__ready_to_fork.set() - - # If the deadlock bug exists, the fork will happen - # without dealing with the locks we hold, deadlocking - # the child. - - # Wait for a successful fork or an unreasonable amount of - # time before releasing our locks. To avoid a timing based - # test we'd need communication from os.fork() as to when it - # has actually happened. Given this is a regression test - # for a fixed issue, potentially less reliably detecting - # regression via timing is acceptable for simplicity. - # The test will always take at least this long. :( - fork_happened__release_locks_and_end_thread.wait(0.5) - finally: - refed_h.release() - finally: - logging._releaseLock() + with logging._lock, refed_h.lock: + # Tell the main thread to do the fork. + locks_held__ready_to_fork.set() + + # If the deadlock bug exists, the fork will happen + # without dealing with the locks we hold, deadlocking + # the child. + + # Wait for a successful fork or an unreasonable amount of + # time before releasing our locks. To avoid a timing based + # test we'd need communication from os.fork() as to when it + # has actually happened. Given this is a regression test + # for a fixed issue, potentially less reliably detecting + # regression via timing is acceptable for simplicity. + # The test will always take at least this long. :( + fork_happened__release_locks_and_end_thread.wait(0.5) lock_holder_thread = threading.Thread( target=lock_holder_thread_fn, diff --git a/Lib/test/test_mmap.py b/Lib/test/test_mmap.py index 92c99d645b25cc..dfcf3039422af5 100644 --- a/Lib/test/test_mmap.py +++ b/Lib/test/test_mmap.py @@ -258,7 +258,7 @@ def test_access_parameter(self): try: m = mmap.mmap(f.fileno(), mapsize, prot=prot) except PermissionError: - # on macOS 14, PROT_READ | PROT_WRITE is not allowed + # on macOS 14, PROT_READ | PROT_EXEC is not allowed pass else: self.assertRaises(TypeError, m.write, b"abcdef") diff --git a/Lib/test/test_pathlib.py b/Lib/test/test_pathlib.py index 484a5e6c3bd64d..319148e9065a65 100644 --- a/Lib/test/test_pathlib.py +++ b/Lib/test/test_pathlib.py @@ -1582,14 +1582,172 @@ def test_group(self): # -# Tests for the concrete classes. +# Tests for the virtual classes. # -class PathTest(unittest.TestCase): - """Tests for the FS-accessing functionalities of the Path classes.""" +class PathBaseTest(PurePathTest): + cls = pathlib._PathBase - cls = pathlib.Path - can_symlink = os_helper.can_symlink() + def test_unsupported_operation(self): + P = self.cls + p = self.cls() + e = pathlib.UnsupportedOperation + self.assertRaises(e, p.stat) + self.assertRaises(e, p.lstat) + self.assertRaises(e, p.exists) + self.assertRaises(e, p.samefile, 'foo') + self.assertRaises(e, p.is_dir) + self.assertRaises(e, p.is_file) + self.assertRaises(e, p.is_mount) + self.assertRaises(e, p.is_symlink) + self.assertRaises(e, p.is_block_device) + self.assertRaises(e, p.is_char_device) + self.assertRaises(e, p.is_fifo) + self.assertRaises(e, p.is_socket) + self.assertRaises(e, p.open) + self.assertRaises(e, p.read_bytes) + self.assertRaises(e, p.read_text) + self.assertRaises(e, p.write_bytes, b'foo') + self.assertRaises(e, p.write_text, 'foo') + self.assertRaises(e, p.iterdir) + self.assertRaises(e, p.glob, '*') + self.assertRaises(e, p.rglob, '*') + self.assertRaises(e, lambda: list(p.walk())) + self.assertRaises(e, p.absolute) + self.assertRaises(e, P.cwd) + self.assertRaises(e, p.expanduser) + self.assertRaises(e, p.home) + self.assertRaises(e, p.readlink) + self.assertRaises(e, p.symlink_to, 'foo') + self.assertRaises(e, p.hardlink_to, 'foo') + self.assertRaises(e, p.mkdir) + self.assertRaises(e, p.touch) + self.assertRaises(e, p.rename, 'foo') + self.assertRaises(e, p.replace, 'foo') + self.assertRaises(e, p.chmod, 0o755) + self.assertRaises(e, p.lchmod, 0o755) + self.assertRaises(e, p.unlink) + self.assertRaises(e, p.rmdir) + self.assertRaises(e, p.owner) + self.assertRaises(e, p.group) + self.assertRaises(e, p.as_uri) + + def test_as_uri_common(self): + e = pathlib.UnsupportedOperation + self.assertRaises(e, self.cls().as_uri) + + def test_fspath_common(self): + self.assertRaises(TypeError, os.fspath, self.cls()) + + def test_as_bytes_common(self): + self.assertRaises(TypeError, bytes, self.cls()) + + def test_matches_path_api(self): + our_names = {name for name in dir(self.cls) if name[0] != '_'} + path_names = {name for name in dir(pathlib.Path) if name[0] != '_'} + self.assertEqual(our_names, path_names) + for attr_name in our_names: + our_attr = getattr(self.cls, attr_name) + path_attr = getattr(pathlib.Path, attr_name) + self.assertEqual(our_attr.__doc__, path_attr.__doc__) + + +class DummyPathIO(io.BytesIO): + """ + Used by DummyPath to implement `open('w')` + """ + + def __init__(self, files, path): + super().__init__() + self.files = files + self.path = path + + def close(self): + self.files[self.path] = self.getvalue() + super().close() + + +class DummyPath(pathlib._PathBase): + """ + Simple implementation of PathBase that keeps files and directories in + memory. + """ + _files = {} + _directories = {} + _symlinks = {} + + def stat(self, *, follow_symlinks=True): + if follow_symlinks: + path = str(self.resolve()) + else: + path = str(self.parent.resolve() / self.name) + if path in self._files: + st_mode = stat.S_IFREG + elif path in self._directories: + st_mode = stat.S_IFDIR + elif path in self._symlinks: + st_mode = stat.S_IFLNK + else: + raise FileNotFoundError(errno.ENOENT, "Not found", str(self)) + return os.stat_result((st_mode, hash(str(self)), 0, 0, 0, 0, 0, 0, 0, 0)) + + def open(self, mode='r', buffering=-1, encoding=None, + errors=None, newline=None): + if buffering != -1: + raise NotImplementedError + path_obj = self.resolve() + path = str(path_obj) + name = path_obj.name + parent = str(path_obj.parent) + if path in self._directories: + raise IsADirectoryError(errno.EISDIR, "Is a directory", path) + + text = 'b' not in mode + mode = ''.join(c for c in mode if c not in 'btU') + if mode == 'r': + if path not in self._files: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + stream = io.BytesIO(self._files[path]) + elif mode == 'w': + if parent not in self._directories: + raise FileNotFoundError(errno.ENOENT, "File not found", parent) + stream = DummyPathIO(self._files, path) + self._files[path] = b'' + self._directories[parent].add(name) + else: + raise NotImplementedError + if text: + stream = io.TextIOWrapper(stream, encoding=encoding, errors=errors, newline=newline) + return stream + + def iterdir(self): + path = str(self.resolve()) + if path in self._files: + raise NotADirectoryError(errno.ENOTDIR, "Not a directory", path) + elif path in self._directories: + return (self / name for name in self._directories[path]) + else: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + + def mkdir(self, mode=0o777, parents=False, exist_ok=False): + try: + self._directories[str(self.parent)].add(self.name) + self._directories[str(self)] = set() + except KeyError: + if not parents or self.parent == self: + raise FileNotFoundError(errno.ENOENT, "File not found", str(self.parent)) from None + self.parent.mkdir(parents=True, exist_ok=True) + self.mkdir(mode, parents=False, exist_ok=exist_ok) + except FileExistsError: + if not exist_ok: + raise + + +class DummyPathTest(unittest.TestCase): + """Tests for PathBase methods that use stat(), open() and iterdir().""" + + cls = DummyPath + can_symlink = False # (BASE) # | @@ -1612,37 +1770,38 @@ class PathTest(unittest.TestCase): # def setUp(self): - def cleanup(): - os.chmod(join('dirE'), 0o777) - os_helper.rmtree(BASE) - self.addCleanup(cleanup) - os.mkdir(BASE) - os.mkdir(join('dirA')) - os.mkdir(join('dirB')) - os.mkdir(join('dirC')) - os.mkdir(join('dirC', 'dirD')) - os.mkdir(join('dirE')) - with open(join('fileA'), 'wb') as f: - f.write(b"this is file A\n") - with open(join('dirB', 'fileB'), 'wb') as f: - f.write(b"this is file B\n") - with open(join('dirC', 'fileC'), 'wb') as f: - f.write(b"this is file C\n") - with open(join('dirC', 'novel.txt'), 'wb') as f: - f.write(b"this is a novel\n") - with open(join('dirC', 'dirD', 'fileD'), 'wb') as f: - f.write(b"this is file D\n") - os.chmod(join('dirE'), 0) - if self.can_symlink: - # Relative symlinks. - os.symlink('fileA', join('linkA')) - os.symlink('non-existing', join('brokenLink')) - os.symlink('dirB', join('linkB'), target_is_directory=True) - os.symlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'), target_is_directory=True) - # This one goes upwards, creating a loop. - os.symlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'), target_is_directory=True) - # Broken symlink (pointing to itself). - os.symlink('brokenLinkLoop', join('brokenLinkLoop')) + # note: this must be kept in sync with `PathTest.setUp()` + cls = self.cls + cls._files.clear() + cls._directories.clear() + cls._symlinks.clear() + join = cls.pathmod.join + cls._files.update({ + join(BASE, 'fileA'): b'this is file A\n', + join(BASE, 'dirB', 'fileB'): b'this is file B\n', + join(BASE, 'dirC', 'fileC'): b'this is file C\n', + join(BASE, 'dirC', 'dirD', 'fileD'): b'this is file D\n', + join(BASE, 'dirC', 'novel.txt'): b'this is a novel\n', + }) + cls._directories.update({ + BASE: {'dirA', 'dirB', 'dirC', 'dirE', 'fileA'}, + join(BASE, 'dirA'): set(), + join(BASE, 'dirB'): {'fileB'}, + join(BASE, 'dirC'): {'dirD', 'fileC', 'novel.txt'}, + join(BASE, 'dirC', 'dirD'): {'fileD'}, + join(BASE, 'dirE'): {}, + }) + dirname = BASE + while True: + dirname, basename = cls.pathmod.split(dirname) + if not basename: + break + cls._directories[dirname] = {basename} + + def tempdir(self): + path = self.cls(BASE).with_name('tmp-dirD') + path.mkdir() + return path def assertFileNotFound(self, func, *args, **kwargs): with self.assertRaises(FileNotFoundError) as cm: @@ -1991,9 +2150,11 @@ def test_rglob_symlink_loop(self): def test_glob_many_open_files(self): depth = 30 P = self.cls - base = P(BASE) / 'deep' - p = P(base, *(['d']*depth)) - p.mkdir(parents=True) + p = base = P(BASE) / 'deep' + p.mkdir() + for _ in range(depth): + p /= 'd' + p.mkdir() pattern = '/'.join(['*'] * depth) iters = [base.glob(pattern) for j in range(100)] for it in iters: @@ -2080,6 +2241,7 @@ def test_readlink(self): self.assertEqual((P / 'brokenLink').readlink(), self.cls('non-existing')) self.assertEqual((P / 'linkB').readlink(), self.cls('dirB')) + self.assertEqual((P / 'linkB' / 'linkD').readlink(), self.cls('../dirB')) with self.assertRaises(OSError): (P / 'fileA').readlink() @@ -2128,7 +2290,7 @@ def test_resolve_common(self): self._check_resolve_relative(p, P(BASE, 'dirB', 'fileB', 'foo', 'in', 'spam'), False) p = P(BASE, 'dirA', 'linkC', '..', 'foo', 'in', 'spam') - if os.name == 'nt': + if os.name == 'nt' and isinstance(p, pathlib.Path): # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(BASE, 'dirA', 'foo', 'in', @@ -2138,9 +2300,7 @@ def test_resolve_common(self): # resolves to 'dirB/..' first before resolving to parent of dirB. self._check_resolve_relative(p, P(BASE, 'foo', 'in', 'spam'), False) # Now create absolute symlinks. - d = os_helper._longpath(tempfile.mkdtemp(suffix='-dirD', - dir=os.getcwd())) - self.addCleanup(os_helper.rmtree, d) + d = self.tempdir() P(BASE, 'dirA', 'linkX').symlink_to(d) P(BASE, str(d), 'linkY').symlink_to(join('dirB')) p = P(BASE, 'dirA', 'linkX', 'linkY', 'fileB') @@ -2150,7 +2310,7 @@ def test_resolve_common(self): self._check_resolve_relative(p, P(BASE, 'dirB', 'foo', 'in', 'spam'), False) p = P(BASE, 'dirA', 'linkX', 'linkY', '..', 'foo', 'in', 'spam') - if os.name == 'nt': + if os.name == 'nt' and isinstance(p, pathlib.Path): # In Windows, if linkY points to dirB, 'dirA\linkY\..' # resolves to 'dirA' without resolving linkY first. self._check_resolve_relative(p, P(d, 'foo', 'in', 'spam'), False) @@ -2174,6 +2334,38 @@ def test_resolve_dot(self): # Non-strict self.assertEqual(r.resolve(strict=False), p / '3' / '4') + def _check_symlink_loop(self, *args): + path = self.cls(*args) + with self.assertRaises(OSError) as cm: + path.resolve(strict=True) + self.assertEqual(cm.exception.errno, errno.ELOOP) + + def test_resolve_loop(self): + if not self.can_symlink: + self.skipTest("symlinks required") + if os.name == 'nt' and issubclass(self.cls, pathlib.Path): + self.skipTest("symlink loops work differently with concrete Windows paths") + # Loops with relative symlinks. + self.cls(BASE, 'linkX').symlink_to('linkX/inside') + self._check_symlink_loop(BASE, 'linkX') + self.cls(BASE, 'linkY').symlink_to('linkY') + self._check_symlink_loop(BASE, 'linkY') + self.cls(BASE, 'linkZ').symlink_to('linkZ/../linkZ') + self._check_symlink_loop(BASE, 'linkZ') + # Non-strict + p = self.cls(BASE, 'linkZ', 'foo') + self.assertEqual(p.resolve(strict=False), p) + # Loops with absolute symlinks. + self.cls(BASE, 'linkU').symlink_to(join('linkU/inside')) + self._check_symlink_loop(BASE, 'linkU') + self.cls(BASE, 'linkV').symlink_to(join('linkV')) + self._check_symlink_loop(BASE, 'linkV') + self.cls(BASE, 'linkW').symlink_to(join('linkW/../linkW')) + self._check_symlink_loop(BASE, 'linkW') + # Non-strict + q = self.cls(BASE, 'linkW', 'foo') + self.assertEqual(q.resolve(strict=False), q) + def test_stat(self): statA = self.cls(BASE).joinpath('fileA').stat() statB = self.cls(BASE).joinpath('dirB', 'fileB').stat() @@ -2382,6 +2574,10 @@ def _check_complex_symlinks(self, link0_target): self.assertEqualNormCase(str(p), BASE) # Resolve relative paths. + try: + self.cls().absolute() + except pathlib.UnsupportedOperation: + return old_path = os.getcwd() os.chdir(BASE) try: @@ -2409,6 +2605,92 @@ def test_complex_symlinks_relative(self): def test_complex_symlinks_relative_dot_dot(self): self._check_complex_symlinks(os.path.join('dirA', '..')) + +class DummyPathWithSymlinks(DummyPath): + def readlink(self): + path = str(self.parent.resolve() / self.name) + if path in self._symlinks: + return self.with_segments(self._symlinks[path]) + elif path in self._files or path in self._directories: + raise OSError(errno.EINVAL, "Not a symlink", path) + else: + raise FileNotFoundError(errno.ENOENT, "File not found", path) + + def symlink_to(self, target, target_is_directory=False): + self._directories[str(self.parent)].add(self.name) + self._symlinks[str(self)] = str(target) + + +class DummyPathWithSymlinksTest(DummyPathTest): + cls = DummyPathWithSymlinks + can_symlink = True + + def setUp(self): + super().setUp() + cls = self.cls + join = cls.pathmod.join + cls._symlinks.update({ + join(BASE, 'linkA'): 'fileA', + join(BASE, 'linkB'): 'dirB', + join(BASE, 'dirA', 'linkC'): join('..', 'dirB'), + join(BASE, 'dirB', 'linkD'): join('..', 'dirB'), + join(BASE, 'brokenLink'): 'non-existing', + join(BASE, 'brokenLinkLoop'): 'brokenLinkLoop', + }) + cls._directories[BASE].update({'linkA', 'linkB', 'brokenLink', 'brokenLinkLoop'}) + cls._directories[join(BASE, 'dirA')].add('linkC') + cls._directories[join(BASE, 'dirB')].add('linkD') + + +# +# Tests for the concrete classes. +# + +class PathTest(DummyPathTest): + """Tests for the FS-accessing functionalities of the Path classes.""" + cls = pathlib.Path + can_symlink = os_helper.can_symlink() + + def setUp(self): + # note: this must be kept in sync with `DummyPathTest.setUp()` + def cleanup(): + os.chmod(join('dirE'), 0o777) + os_helper.rmtree(BASE) + self.addCleanup(cleanup) + os.mkdir(BASE) + os.mkdir(join('dirA')) + os.mkdir(join('dirB')) + os.mkdir(join('dirC')) + os.mkdir(join('dirC', 'dirD')) + os.mkdir(join('dirE')) + with open(join('fileA'), 'wb') as f: + f.write(b"this is file A\n") + with open(join('dirB', 'fileB'), 'wb') as f: + f.write(b"this is file B\n") + with open(join('dirC', 'fileC'), 'wb') as f: + f.write(b"this is file C\n") + with open(join('dirC', 'novel.txt'), 'wb') as f: + f.write(b"this is a novel\n") + with open(join('dirC', 'dirD', 'fileD'), 'wb') as f: + f.write(b"this is file D\n") + os.chmod(join('dirE'), 0) + if self.can_symlink: + # Relative symlinks. + os.symlink('fileA', join('linkA')) + os.symlink('non-existing', join('brokenLink')) + os.symlink('dirB', join('linkB'), target_is_directory=True) + os.symlink(os.path.join('..', 'dirB'), join('dirA', 'linkC'), target_is_directory=True) + # This one goes upwards, creating a loop. + os.symlink(os.path.join('..', 'dirB'), join('dirB', 'linkD'), target_is_directory=True) + # Broken symlink (pointing to itself). + os.symlink('brokenLinkLoop', join('brokenLinkLoop')) + + def tempdir(self): + d = os_helper._longpath(tempfile.mkdtemp(suffix='-dirD', + dir=os.getcwd())) + self.addCleanup(os_helper.rmtree, d) + return d + def test_concrete_class(self): if self.cls is pathlib.Path: expected = pathlib.WindowsPath if os.name == 'nt' else pathlib.PosixPath @@ -3178,12 +3460,6 @@ def test_absolute(self): self.assertEqual(str(P('//a').absolute()), '//a') self.assertEqual(str(P('//a/b').absolute()), '//a/b') - def _check_symlink_loop(self, *args): - path = self.cls(*args) - with self.assertRaises(OSError) as cm: - path.resolve(strict=True) - self.assertEqual(cm.exception.errno, errno.ELOOP) - @unittest.skipIf( is_emscripten or is_wasi, "umask is not implemented on Emscripten/WASI." @@ -3230,30 +3506,6 @@ def test_touch_mode(self): st = os.stat(join('masked_new_file')) self.assertEqual(stat.S_IMODE(st.st_mode), 0o750) - def test_resolve_loop(self): - if not self.can_symlink: - self.skipTest("symlinks required") - # Loops with relative symlinks. - os.symlink('linkX/inside', join('linkX')) - self._check_symlink_loop(BASE, 'linkX') - os.symlink('linkY', join('linkY')) - self._check_symlink_loop(BASE, 'linkY') - os.symlink('linkZ/../linkZ', join('linkZ')) - self._check_symlink_loop(BASE, 'linkZ') - # Non-strict - p = self.cls(BASE, 'linkZ', 'foo') - self.assertEqual(p.resolve(strict=False), p) - # Loops with absolute symlinks. - os.symlink(join('linkU/inside'), join('linkU')) - self._check_symlink_loop(BASE, 'linkU') - os.symlink(join('linkV'), join('linkV')) - self._check_symlink_loop(BASE, 'linkV') - os.symlink(join('linkW/../linkW'), join('linkW')) - self._check_symlink_loop(BASE, 'linkW') - # Non-strict - q = self.cls(BASE, 'linkW', 'foo') - self.assertEqual(q.resolve(strict=False), q) - def test_glob(self): P = self.cls p = P(BASE) diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py index c9c2b42861c6f4..a971f6b0250efb 100644 --- a/Lib/test/test_pty.py +++ b/Lib/test/test_pty.py @@ -80,17 +80,9 @@ def expectedFailureIfStdinIsTTY(fun): # because pty code is not too portable. class PtyTest(unittest.TestCase): def setUp(self): - old_alarm = signal.signal(signal.SIGALRM, self.handle_sig) - self.addCleanup(signal.signal, signal.SIGALRM, old_alarm) - old_sighup = signal.signal(signal.SIGHUP, self.handle_sighup) self.addCleanup(signal.signal, signal.SIGHUP, old_sighup) - # isatty() and close() can hang on some platforms. Set an alarm - # before running the test to make sure we don't hang forever. - self.addCleanup(signal.alarm, 0) - signal.alarm(10) - # Save original stdin window size. self.stdin_dim = None if _HAVE_WINSZ: @@ -101,9 +93,6 @@ def setUp(self): except tty.error: pass - def handle_sig(self, sig, frame): - self.fail("isatty hung") - @staticmethod def handle_sighup(signum, frame): pass diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 3ece31be9af3c3..c98b05abcea98c 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -374,16 +374,14 @@ def test_unknown_option(self): self.checkError(['--unknown-option'], 'unrecognized arguments: --unknown-option') - def check_ci_mode(self, args, use_resources): + def check_ci_mode(self, args, use_resources, rerun=True): ns = cmdline._parse_args(args) - if utils.MS_WINDOWS: - self.assertTrue(ns.nowindows) # Check Regrtest attributes which are more reliable than Namespace # which has an unclear API regrtest = main.Regrtest(ns) self.assertEqual(regrtest.num_workers, -1) - self.assertTrue(regrtest.want_rerun) + self.assertEqual(regrtest.want_rerun, rerun) self.assertTrue(regrtest.randomize) self.assertIsNone(regrtest.random_seed) self.assertTrue(regrtest.fail_env_changed) @@ -400,6 +398,14 @@ def test_fast_ci(self): regrtest = self.check_ci_mode(args, use_resources) self.assertEqual(regrtest.timeout, 10 * 60) + def test_fast_ci_python_cmd(self): + args = ['--fast-ci', '--python', 'python -X dev'] + use_resources = sorted(cmdline.ALL_RESOURCES) + use_resources.remove('cpu') + regrtest = self.check_ci_mode(args, use_resources, rerun=False) + self.assertEqual(regrtest.timeout, 10 * 60) + self.assertEqual(regrtest.python_cmd, ('python', '-X', 'dev')) + def test_fast_ci_resource(self): # it should be possible to override resources args = ['--fast-ci', '-u', 'network'] @@ -1965,16 +1971,20 @@ def test_dev_mode(self): self.check_executed_tests(output, tests, stats=len(tests), parallel=True) - def check_reexec(self, option): + def check_add_python_opts(self, option): # --fast-ci and --slow-ci add "-u -W default -bb -E" options to Python code = textwrap.dedent(r""" import sys import unittest + from test import support try: from _testinternalcapi import get_config except ImportError: get_config = None + # WASI/WASM buildbots don't use -E option + use_environment = (support.is_emscripten or support.is_wasi) + class WorkerTests(unittest.TestCase): @unittest.skipUnless(get_config is None, 'need get_config()') def test_config(self): @@ -1986,7 +1996,7 @@ def test_config(self): # -bb option self.assertTrue(config['bytes_warning'], 2) # -E option - self.assertTrue(config['use_environment'], 0) + self.assertTrue(config['use_environment'], use_environment) def test_python_opts(self): # -u option @@ -2000,7 +2010,8 @@ def test_python_opts(self): self.assertEqual(sys.flags.bytes_warning, 2) # -E option - self.assertTrue(sys.flags.ignore_environment) + self.assertEqual(not sys.flags.ignore_environment, + use_environment) """) testname = self.create_test(code=code) @@ -2018,7 +2029,7 @@ def test_python_opts(self): def test_add_python_opts(self): for opt in ("--fast-ci", "--slow-ci"): with self.subTest(opt=opt): - self.check_reexec(opt) + self.check_add_python_opts(opt) class TestUtils(unittest.TestCase): diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py index 502287b620d066..3e93b561c143d8 100644 --- a/Lib/test/test_reprlib.py +++ b/Lib/test/test_reprlib.py @@ -774,5 +774,16 @@ def __repr__(self): self.assertIs(X.f, X.__repr__.__wrapped__) + def test__type_params__(self): + class My: + @recursive_repr() + def __repr__[T: str](self, default: T = '') -> str: + return default + + type_params = My().__repr__.__type_params__ + self.assertEqual(len(type_params), 1) + self.assertEqual(type_params[0].__name__, 'T') + self.assertEqual(type_params[0].__bound__, str) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_selectors.py b/Lib/test/test_selectors.py index 33417ca6a11af0..677349c2bfca93 100644 --- a/Lib/test/test_selectors.py +++ b/Lib/test/test_selectors.py @@ -285,6 +285,35 @@ def test_select(self): self.assertEqual([(wr_key, selectors.EVENT_WRITE)], result) + def test_select_read_write(self): + # gh-110038: when a file descriptor is registered for both read and + # write, the two events must be seen on a single call to select(). + s = self.SELECTOR() + self.addCleanup(s.close) + + sock1, sock2 = self.make_socketpair() + sock2.send(b"foo") + my_key = s.register(sock1, selectors.EVENT_READ | selectors.EVENT_WRITE) + + seen_read, seen_write = False, False + result = s.select() + # We get the read and write either in the same result entry or in two + # distinct entries with the same key. + self.assertLessEqual(len(result), 2) + for key, events in result: + self.assertTrue(isinstance(key, selectors.SelectorKey)) + self.assertEqual(key, my_key) + self.assertFalse(events & ~(selectors.EVENT_READ | + selectors.EVENT_WRITE)) + if events & selectors.EVENT_READ: + self.assertFalse(seen_read) + seen_read = True + if events & selectors.EVENT_WRITE: + self.assertFalse(seen_write) + seen_write = True + self.assertTrue(seen_read) + self.assertTrue(seen_write) + def test_context_manager(self): s = self.SELECTOR() self.addCleanup(s.close) diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index 5b57c5fd54a68d..902bec78451307 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -7,6 +7,7 @@ import stat import subprocess import sys +import sysconfig import tempfile import textwrap import unittest @@ -800,6 +801,42 @@ def test_set_memlimit(self): support.max_memuse = old_max_memuse support.real_max_memuse = old_real_max_memuse + def test_copy_python_src_ignore(self): + # Get source directory + src_dir = sysconfig.get_config_var('abs_srcdir') + if not src_dir: + src_dir = sysconfig.get_config_var('srcdir') + src_dir = os.path.abspath(src_dir) + + # Check that the source code is available + if not os.path.exists(src_dir): + self.skipTest(f"cannot access Python source code directory:" + f" {src_dir!r}") + # Check that the landmark copy_python_src_ignore() expects is available + # (Previously we looked for 'Lib\os.py', which is always present on Windows.) + landmark = os.path.join(src_dir, 'Modules') + if not os.path.exists(landmark): + self.skipTest(f"cannot access Python source code directory:" + f" {landmark!r} landmark is missing") + + # Test support.copy_python_src_ignore() + + # Source code directory + ignored = {'.git', '__pycache__'} + names = os.listdir(src_dir) + self.assertEqual(support.copy_python_src_ignore(src_dir, names), + ignored | {'build'}) + + # Doc/ directory + path = os.path.join(src_dir, 'Doc') + self.assertEqual(support.copy_python_src_ignore(path, os.listdir(path)), + ignored | {'build', 'venv'}) + + # An other directory + path = os.path.join(src_dir, 'Objects') + self.assertEqual(support.copy_python_src_ignore(path, os.listdir(path)), + ignored) + # XXX -follows a list of untested API # make_legacy_pyc # is_resource_enabled diff --git a/Lib/test/test_symtable.py b/Lib/test/test_symtable.py index 36cb7b3f242e4c..82c1d7c856a1e5 100644 --- a/Lib/test/test_symtable.py +++ b/Lib/test/test_symtable.py @@ -40,6 +40,15 @@ def foo(): def namespace_test(): pass def namespace_test(): pass + +type Alias = int +type GenericAlias[T] = list[T] + +def generic_spam[T](a): + pass + +class GenericMine[T: int]: + pass """ @@ -59,6 +68,14 @@ class SymtableTest(unittest.TestCase): internal = find_block(spam, "internal") other_internal = find_block(spam, "other_internal") foo = find_block(top, "foo") + Alias = find_block(top, "Alias") + GenericAlias = find_block(top, "GenericAlias") + GenericAlias_inner = find_block(GenericAlias, "GenericAlias") + generic_spam = find_block(top, "generic_spam") + generic_spam_inner = find_block(generic_spam, "generic_spam") + GenericMine = find_block(top, "GenericMine") + GenericMine_inner = find_block(GenericMine, "GenericMine") + T = find_block(GenericMine, "T") def test_type(self): self.assertEqual(self.top.get_type(), "module") @@ -66,6 +83,15 @@ def test_type(self): self.assertEqual(self.a_method.get_type(), "function") self.assertEqual(self.spam.get_type(), "function") self.assertEqual(self.internal.get_type(), "function") + self.assertEqual(self.foo.get_type(), "function") + self.assertEqual(self.Alias.get_type(), "type alias") + self.assertEqual(self.GenericAlias.get_type(), "type parameter") + self.assertEqual(self.GenericAlias_inner.get_type(), "type alias") + self.assertEqual(self.generic_spam.get_type(), "type parameter") + self.assertEqual(self.generic_spam_inner.get_type(), "function") + self.assertEqual(self.GenericMine.get_type(), "type parameter") + self.assertEqual(self.GenericMine_inner.get_type(), "class") + self.assertEqual(self.T.get_type(), "TypeVar bound") def test_id(self): self.assertGreater(self.top.get_id(), 0) @@ -73,6 +99,11 @@ def test_id(self): self.assertGreater(self.a_method.get_id(), 0) self.assertGreater(self.spam.get_id(), 0) self.assertGreater(self.internal.get_id(), 0) + self.assertGreater(self.foo.get_id(), 0) + self.assertGreater(self.Alias.get_id(), 0) + self.assertGreater(self.GenericAlias.get_id(), 0) + self.assertGreater(self.generic_spam.get_id(), 0) + self.assertGreater(self.GenericMine.get_id(), 0) def test_optimized(self): self.assertFalse(self.top.is_optimized()) diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index c616a27364b494..16050171ad139d 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -1210,6 +1210,13 @@ def test_pystats(self): sys._stats_clear() sys._stats_dump() + @test.support.cpython_only + @unittest.skipUnless(hasattr(sys, 'abiflags'), 'need sys.abiflags') + def test_disable_gil_abi(self): + abi_threaded = 't' in sys.abiflags + py_nogil = (sysconfig.get_config_var('Py_NOGIL') == 1) + self.assertEqual(py_nogil, abi_threaded) + @test.support.cpython_only class UnraisableHookTest(unittest.TestCase): diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 71fcad268b8036..13bfacbac83f13 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -35,6 +35,23 @@ platforms_to_skip = ('netbsd5', 'hp-ux11') +# gh-89363: Skip fork() test if Python is built with Address Sanitizer (ASAN) +# to work around a libasan race condition, dead lock in pthread_create(). +skip_if_asan_fork = support.skip_if_sanitizer( + "libasan has a pthread_create() dead lock", + address=True) + + +def skip_unless_reliable_fork(test): + if not support.has_fork_support: + return unittest.skip("requires working os.fork()")(test) + if sys.platform in platforms_to_skip: + return unittest.skip("due to known OS bug related to thread+fork")(test) + if support.check_sanitizer(address=True): + return unittest.skip("libasan has a pthread_create() dead lock related to thread+fork")(test) + return test + + def restore_default_excepthook(testcase): testcase.addCleanup(setattr, threading, 'excepthook', threading.excepthook) threading.excepthook = threading.__excepthook__ @@ -539,7 +556,7 @@ def test_daemon_param(self): t = threading.Thread(daemon=True) self.assertTrue(t.daemon) - @support.requires_fork() + @skip_unless_reliable_fork def test_dummy_thread_after_fork(self): # Issue #14308: a dummy thread in the active list doesn't mess up # the after-fork mechanism. @@ -571,11 +588,7 @@ def background_thread(evt): self.assertEqual(out, b'') self.assertEqual(err, b'') - @support.requires_fork() - # gh-89363: Skip multiprocessing tests if Python is built with ASAN to - # work around a libasan race condition: dead lock in pthread_create(). - @support.skip_if_sanitizer("libasan has a pthread_create() dead lock", - address=True) + @skip_unless_reliable_fork def test_is_alive_after_fork(self): # Try hard to trigger #18418: is_alive() could sometimes be True on # threads that vanished after a fork. @@ -611,7 +624,7 @@ def f(): th.start() th.join() - @support.requires_fork() + @skip_unless_reliable_fork @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()") def test_main_thread_after_fork(self): code = """if 1: @@ -632,8 +645,7 @@ def test_main_thread_after_fork(self): self.assertEqual(err, b"") self.assertEqual(data, "MainThread\nTrue\nTrue\n") - @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") - @support.requires_fork() + @skip_unless_reliable_fork @unittest.skipUnless(hasattr(os, 'waitpid'), "test needs os.waitpid()") def test_main_thread_after_fork_from_nonmain_thread(self): code = """if 1: @@ -1080,8 +1092,7 @@ def test_1_join_on_shutdown(self): """ self._run_and_join(script) - @support.requires_fork() - @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") + @skip_unless_reliable_fork def test_2_join_in_forked_process(self): # Like the test above, but from a forked interpreter script = """if 1: @@ -1101,8 +1112,7 @@ def test_2_join_in_forked_process(self): """ self._run_and_join(script) - @support.requires_fork() - @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") + @skip_unless_reliable_fork def test_3_join_in_forked_from_thread(self): # Like the test above, but fork() was called from a worker thread # In the forked process, the main Thread object must be marked as stopped. @@ -1172,8 +1182,7 @@ def main(): rc, out, err = assert_python_ok('-c', script) self.assertFalse(err) - @support.requires_fork() - @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") + @skip_unless_reliable_fork def test_reinit_tls_after_fork(self): # Issue #13817: fork() would deadlock in a multithreaded program with # the ad-hoc TLS implementation. @@ -1199,7 +1208,7 @@ def do_fork_and_wait(): for t in threads: t.join() - @support.requires_fork() + @skip_unless_reliable_fork def test_clear_threads_states_after_fork(self): # Issue #17094: check that threads states are cleared after fork() diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index eb83aa39425515..0ffe3e1d0cc498 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -21,7 +21,7 @@ skip_if_broken_multiprocessing_synchronize, verbose, requires_subprocess, is_emscripten, is_wasi, requires_venv_with_pip, TEST_HOME_DIR, - requires_resource) + requires_resource, copy_python_src_ignore) from test.support.os_helper import (can_symlink, EnvironmentVarGuard, rmtree) import unittest import venv @@ -560,12 +560,6 @@ def test_zippath_from_non_installed_posix(self): stdlib_zip) additional_pythonpath_for_non_installed = [] - # gh-109748: Don't copy __pycache__/ sub-directories, because they can - # be modified by other Python tests running in parallel. - ignored_names = {'__pycache__'} - def ignore_pycache(src, names): - return ignored_names - # Copy stdlib files to the non-installed python so venv can # correctly calculate the prefix. for eachpath in sys.path: @@ -583,7 +577,7 @@ def ignore_pycache(src, names): shutil.copy(fn, libdir) elif os.path.isdir(fn): shutil.copytree(fn, os.path.join(libdir, name), - ignore=ignore_pycache) + ignore=copy_python_src_ignore) else: additional_pythonpath_for_non_installed.append( eachpath) diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index c108ee21a7a2a3..c54de880c20360 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -246,9 +246,9 @@ def library_recipes(): result.extend([ dict( - name="OpenSSL 3.0.10", - url="https://www.openssl.org/source/openssl-3.0.10.tar.gz", - checksum='1761d4f5b13a1028b9b6f3d4b8e17feb0cedc9370f6afe61d7193d2cdce83323', + name="OpenSSL 3.0.11", + url="https://www.openssl.org/source/openssl-3.0.11.tar.gz", + checksum='b3425d3bb4a2218d0697eb41f7fc0cdede016ed19ca49d168b78e8d947887f55', buildrecipe=build_universal_openssl, configure=None, install=None, diff --git a/Makefile.pre.in b/Makefile.pre.in index ccbacfc8571851..fa5b9e6654c26c 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -492,9 +492,6 @@ OBJECT_OBJS= \ Objects/weakrefobject.o \ @PERF_TRAMPOLINE_OBJ@ -DEEPFREEZE_C = Python/deepfreeze/deepfreeze.c -DEEPFREEZE_OBJS = Python/deepfreeze/deepfreeze.o - ########################################################################## # objects that get linked into the Python library LIBRARY_OBJS_OMIT_FROZEN= \ @@ -1252,9 +1249,7 @@ regen-frozen: Tools/build/freeze_modules.py $(FROZEN_FILES_IN) ############################################################################ # Deepfreeze targets -.PHONY: regen-deepfreeze -regen-deepfreeze: $(DEEPFREEZE_C) - +DEEPFREEZE_C = Python/deepfreeze/deepfreeze.c DEEPFREEZE_DEPS=$(srcdir)/Tools/build/deepfreeze.py Include/internal/pycore_global_strings.h $(FREEZE_MODULE_DEPS) $(FROZEN_FILES_OUT) # BEGIN: deepfreeze modules @@ -1294,10 +1289,9 @@ regen-importlib: regen-frozen # Global objects # Dependencies which can add and/or remove _Py_ID() identifiers: -# - deepfreeze.c # - "make clinic" .PHONY: regen-global-objects -regen-global-objects: $(srcdir)/Tools/build/generate_global_objects.py $(DEEPFREEZE_C) clinic +regen-global-objects: $(srcdir)/Tools/build/generate_global_objects.py clinic $(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/generate_global_objects.py ############################################################################ @@ -1859,21 +1853,6 @@ cleantest: all test: all $(TESTRUNNER) --fast-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) -# Run the full test suite twice - once without .pyc files, and once with. -# In the past, we've had problems where bugs in the marshalling or -# elsewhere caused bytecode read from .pyc files to behave differently -# than bytecode generated directly from a .py source file. Sometimes -# the bytecode read from a .pyc file had the bug, sometimes the directly -# generated bytecode. This is sometimes a very shy bug needing a lot of -# sample data. -.PHONY: testall -testall: all - -find $(srcdir)/Lib -name '*.py[co]' -exec rm -f {} ';' || true - $(TESTPYTHON) -E $(srcdir)/Lib/compileall.py - -find $(srcdir)/Lib -name '*.py[co]' -exec rm -f {} ';' || true - $(TESTRUNNER) --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) - $(TESTRUNNER) --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) - # Run the test suite for both architectures in a Universal build on OSX. # Must be run on an Intel box. .PHONY: testuniversal @@ -1886,8 +1865,9 @@ testuniversal: all $(RUNSHARED) /usr/libexec/oah/translate \ ./$(BUILDPYTHON) -E -m test -j 0 -u all $(TESTOPTS) -# Like testall, but with only one pass and without multiple processes. -# Run an optional script to include information about the build environment. +# Like test, but using --slow-ci which enables all test resources and use +# longer timeout. Run an optional pybuildbot.identify script to include +# information about the build environment. .PHONY: buildbottest buildbottest: all -@if which pybuildbot.identify >/dev/null 2>&1; then \ @@ -1895,7 +1875,7 @@ buildbottest: all fi $(TESTRUNNER) --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) -# Like testall, but run Python tests with HOSTRUNNER directly. +# Like buildbottest, but run Python tests with HOSTRUNNER directly. .PHONY: hostrunnertest hostrunnertest: all $(RUNSHARED) $(HOSTRUNNER) ./$(BUILDPYTHON) -m test --slow-ci --timeout=$(TESTTIMEOUT) $(TESTOPTS) @@ -2169,6 +2149,7 @@ TESTSUBDIRS= idlelib/idle_test \ test/test_email \ test/test_email/data \ test/test_future_stmt \ + test/test_gdb \ test/test_import \ test/test_import/data \ test/test_import/data/circular_imports \ diff --git a/Misc/ACKS b/Misc/ACKS index aaa178fc3b5d08..ccdfae66832f0e 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -349,6 +349,7 @@ Robbie Clemons Steve Clift Hervé Coatanhay Riccardo Coccioli +Jacob Coffee Nick Coghlan Josh Cogliati Noam Cohen diff --git a/Misc/NEWS.d/3.12.0a4.rst b/Misc/NEWS.d/3.12.0a4.rst index b3b39024056ccc..75246f3f13503e 100644 --- a/Misc/NEWS.d/3.12.0a4.rst +++ b/Misc/NEWS.d/3.12.0a4.rst @@ -23,10 +23,10 @@ Remove :opcode:`UNARY_POSITIVE`, :opcode:`ASYNC_GEN_WRAP` and .. nonce: D7H6j4 .. section: Core and Builtins -Add new :opcode:`CALL_INSTRINSIC_1` instruction. Remove +Add new :opcode:`CALL_INTRINSIC_1` instruction. Remove :opcode:`IMPORT_STAR`, :opcode:`PRINT_EXPR` and :opcode:`STOPITERATION_ERROR`, replacing them with the -:opcode:`CALL_INSTRINSIC_1` instruction. +:opcode:`CALL_INTRINSIC_1` instruction. .. diff --git a/Misc/NEWS.d/3.8.0a1.rst b/Misc/NEWS.d/3.8.0a1.rst index 57f72e95b029fc..0dc6e945719ec7 100644 --- a/Misc/NEWS.d/3.8.0a1.rst +++ b/Misc/NEWS.d/3.8.0a1.rst @@ -380,7 +380,7 @@ Implement :pep:`572` (assignment expressions). Patch by Emily Morehouse. .. nonce: voIdcp .. section: Core and Builtins -Speed up :class:`namedtuple` attribute access by 1.6x using a C fast-path +Speed up :func:`namedtuple` attribute access by 1.6x using a C fast-path for the name descriptors. Patch by Pablo Galindo. .. @@ -8253,7 +8253,7 @@ Explain how IDLE's Shell displays output. Improve the doc about IDLE running user code. The section is renamed from "IDLE -- console differences" is renamed "Running user code". It mostly -covers the implications of using custom :samp:sys.std{xxx}` objects. +covers the implications of using custom :samp:`sys.std{xxx}` objects. .. diff --git a/Misc/NEWS.d/next/Build/2023-09-26-16-00-50.gh-issue-109740.wboWdQ.rst b/Misc/NEWS.d/next/Build/2023-09-26-16-00-50.gh-issue-109740.wboWdQ.rst new file mode 100644 index 00000000000000..f59f462aecd1fc --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-09-26-16-00-50.gh-issue-109740.wboWdQ.rst @@ -0,0 +1,2 @@ +The experimental ``--disable-gil`` configure flag now includes "t" (for "threaded") in +extension ABI tags. diff --git a/Misc/NEWS.d/next/Build/2023-09-29-21-01-48.gh-issue-109566._enldb.rst b/Misc/NEWS.d/next/Build/2023-09-29-21-01-48.gh-issue-109566._enldb.rst new file mode 100644 index 00000000000000..1141a4738b3151 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2023-09-29-21-01-48.gh-issue-109566._enldb.rst @@ -0,0 +1,2 @@ +Remove ``make testall`` target: use ``make buildbottest`` instead. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-09-25-09-24-10.gh-issue-109793.zFQBkv.rst b/Misc/NEWS.d/next/Core and Builtins/2023-09-25-09-24-10.gh-issue-109793.zFQBkv.rst new file mode 100644 index 00000000000000..d2dc4c830a9031 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-09-25-09-24-10.gh-issue-109793.zFQBkv.rst @@ -0,0 +1,4 @@ +The main thread no longer exits prematurely when a subinterpreter +is cleaned up during runtime finalization. The bug was a problem +particularly because, when triggered, the Python process would +always return with a 0 exitcode, even if it failed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-09-26-21-26-54.gh-issue-109923.WO3CHi.rst b/Misc/NEWS.d/next/Core and Builtins/2023-09-26-21-26-54.gh-issue-109923.WO3CHi.rst new file mode 100644 index 00000000000000..f2184592af0051 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-09-26-21-26-54.gh-issue-109923.WO3CHi.rst @@ -0,0 +1 @@ +Set line number on the ``POP_TOP`` that follows a ``RETURN_GENERATOR``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2023-09-27-21-35-49.gh-issue-109889.t5hIRT.rst b/Misc/NEWS.d/next/Core and Builtins/2023-09-27-21-35-49.gh-issue-109889.t5hIRT.rst new file mode 100644 index 00000000000000..8be373f0f6b6cd --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2023-09-27-21-35-49.gh-issue-109889.t5hIRT.rst @@ -0,0 +1,2 @@ +Fix the compiler's redundant NOP detection algorithm to skip over NOPs with +no line number when looking for the next instruction's lineno. diff --git a/Misc/NEWS.d/next/Library/2023-07-03-20-23-56.gh-issue-89812.cFkDOE.rst b/Misc/NEWS.d/next/Library/2023-07-03-20-23-56.gh-issue-89812.cFkDOE.rst new file mode 100644 index 00000000000000..a4221fc4ca900b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-07-03-20-23-56.gh-issue-89812.cFkDOE.rst @@ -0,0 +1,2 @@ +Add private ``pathlib._PathBase`` class, which provides experimental support +for virtual filesystems, and may be made public in a future version of Python. diff --git a/Misc/NEWS.d/next/Library/2023-09-10-20-23-20.gh-issue-66143.71xvgL.rst b/Misc/NEWS.d/next/Library/2023-09-10-20-23-20.gh-issue-66143.71xvgL.rst new file mode 100644 index 00000000000000..2769c05d937353 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-10-20-23-20.gh-issue-66143.71xvgL.rst @@ -0,0 +1,2 @@ +The :class:`codecs.CodecInfo` object has been made copyable and pickleable. +Patched by Robert Lehmann and Furkan Onder. diff --git a/Misc/NEWS.d/next/Library/2023-09-15-17-12-53.gh-issue-109461.VNFPTK.rst b/Misc/NEWS.d/next/Library/2023-09-15-17-12-53.gh-issue-109461.VNFPTK.rst new file mode 100644 index 00000000000000..28f0c16e620146 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-15-17-12-53.gh-issue-109461.VNFPTK.rst @@ -0,0 +1 @@ +:mod:`logging`: Use a context manager for lock acquisition. diff --git a/Misc/NEWS.d/next/Library/2023-09-16-15-44-16.gh-issue-109495.m2H5Bk.rst b/Misc/NEWS.d/next/Library/2023-09-16-15-44-16.gh-issue-109495.m2H5Bk.rst new file mode 100644 index 00000000000000..a7e1b3a64fa785 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-16-15-44-16.gh-issue-109495.m2H5Bk.rst @@ -0,0 +1 @@ +Remove unnecessary extra ``__slots__`` in :py:class:`datetime`\'s pure python implementation to reduce memory size, as they are defined in the superclass. Patch by James Hilton-Balfe diff --git a/Misc/NEWS.d/next/Library/2023-09-24-16-43-33.gh-issue-109782.gMC_7z.rst b/Misc/NEWS.d/next/Library/2023-09-24-16-43-33.gh-issue-109782.gMC_7z.rst new file mode 100644 index 00000000000000..7612e59dc45412 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-24-16-43-33.gh-issue-109782.gMC_7z.rst @@ -0,0 +1,2 @@ +Ensure the signature of :func:`os.path.isdir` is identical on all platforms. +Patch by Amin Alaee. diff --git a/Misc/NEWS.d/next/Library/2023-09-25-02-11-14.gh-issue-109047.b1TrqG.rst b/Misc/NEWS.d/next/Library/2023-09-25-02-11-14.gh-issue-109047.b1TrqG.rst new file mode 100644 index 00000000000000..71cb5a80847d0a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-25-02-11-14.gh-issue-109047.b1TrqG.rst @@ -0,0 +1,4 @@ +:mod:`concurrent.futures`: The *executor manager thread* now catches exceptions +when adding an item to the *call queue*. During Python finalization, creating a +new thread can now raise :exc:`RuntimeError`. Catch the exception and call +``terminate_broken()`` in this case. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2023-09-25-09-59-59.gh-issue-109818.dLRtT-.rst b/Misc/NEWS.d/next/Library/2023-09-25-09-59-59.gh-issue-109818.dLRtT-.rst new file mode 100644 index 00000000000000..184086af2585ea --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-25-09-59-59.gh-issue-109818.dLRtT-.rst @@ -0,0 +1,2 @@ +Fix :func:`reprlib.recursive_repr` not copying ``__type_params__`` from +decorated function. diff --git a/Misc/NEWS.d/next/Library/2023-09-28-18-08-02.gh-issue-110045.0YIGKv.rst b/Misc/NEWS.d/next/Library/2023-09-28-18-08-02.gh-issue-110045.0YIGKv.rst new file mode 100644 index 00000000000000..44a6df1083762f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-28-18-08-02.gh-issue-110045.0YIGKv.rst @@ -0,0 +1,2 @@ +Update the :mod:`symtable` module to support the new scopes introduced by +:pep:`695`. diff --git a/Misc/NEWS.d/next/Library/2023-09-28-18-50-33.gh-issue-110038.nx_gCu.rst b/Misc/NEWS.d/next/Library/2023-09-28-18-50-33.gh-issue-110038.nx_gCu.rst new file mode 100644 index 00000000000000..6b2abd802fccdc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-28-18-50-33.gh-issue-110038.nx_gCu.rst @@ -0,0 +1,3 @@ +Fixed an issue that caused :meth:`KqueueSelector.select` to not return all +the ready events in some cases when a file descriptor is registered for both +read and write. diff --git a/Misc/NEWS.d/next/Library/2023-09-28-18-53-11.gh-issue-110036.fECxTj.rst b/Misc/NEWS.d/next/Library/2023-09-28-18-53-11.gh-issue-110036.fECxTj.rst new file mode 100644 index 00000000000000..ddb11b5c3546a1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-09-28-18-53-11.gh-issue-110036.fECxTj.rst @@ -0,0 +1,5 @@ +On Windows, multiprocessing ``Popen.terminate()`` now catchs +:exc:`PermissionError` and get the process exit code. If the process is +still running, raise again the :exc:`PermissionError`. Otherwise, the +process terminated as expected: store its exit code. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-28-12-25-19.gh-issue-109972.GYnwIP.rst b/Misc/NEWS.d/next/Tests/2023-09-28-12-25-19.gh-issue-109972.GYnwIP.rst new file mode 100644 index 00000000000000..7b6007678388b1 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-28-12-25-19.gh-issue-109972.GYnwIP.rst @@ -0,0 +1,2 @@ +Split test_gdb.py file into a test_gdb package made of multiple tests, so tests +can now be run in parallel. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-28-14-47-14.gh-issue-109594.DB5KPP.rst b/Misc/NEWS.d/next/Tests/2023-09-28-14-47-14.gh-issue-109594.DB5KPP.rst new file mode 100644 index 00000000000000..5a4ae2b0837df6 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-28-14-47-14.gh-issue-109594.DB5KPP.rst @@ -0,0 +1,4 @@ +Fix test_timeout() of test_concurrent_futures.test_wait. Remove the future +which may or may not complete depending if it takes longer than the timeout +ot not. Keep the second future which does not complete before wait() +timeout. Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-28-18-14-52.gh-issue-110033.2yHMx0.rst b/Misc/NEWS.d/next/Tests/2023-09-28-18-14-52.gh-issue-110033.2yHMx0.rst new file mode 100644 index 00000000000000..fb6089377083bf --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-28-18-14-52.gh-issue-110033.2yHMx0.rst @@ -0,0 +1,5 @@ +Fix ``test_interprocess_signal()`` of ``test_signal``. Make sure that the +``subprocess.Popen`` object is deleted before the test raising an exception +in a signal handler. Otherwise, ``Popen.__del__()`` can get the exception +which is logged as ``Exception ignored in: ...`` and the test fails. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-29-00-19-21.gh-issue-109974.Sh_g-r.rst b/Misc/NEWS.d/next/Tests/2023-09-29-00-19-21.gh-issue-109974.Sh_g-r.rst new file mode 100644 index 00000000000000..a130cf690a57cb --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-29-00-19-21.gh-issue-109974.Sh_g-r.rst @@ -0,0 +1,3 @@ +Fix race conditions in test_threading lock tests. Wait until a condition is met +rather than using :func:`time.sleep` with a hardcoded number of seconds. Patch +by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-29-12-48-42.gh-issue-110088.qUhRga.rst b/Misc/NEWS.d/next/Tests/2023-09-29-12-48-42.gh-issue-110088.qUhRga.rst new file mode 100644 index 00000000000000..cf44a123c2c925 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-29-12-48-42.gh-issue-110088.qUhRga.rst @@ -0,0 +1,4 @@ +Fix test_asyncio timeouts: don't measure the maximum duration, a test should +not measure a CI performance. Only measure the minimum duration when a task has +a timeout or delay. Add ``CLOCK_RES`` to ``test_asyncio.utils``. Patch by +Victor Stinner. diff --git a/Misc/NEWS.d/next/Tests/2023-09-29-14-11-30.gh-issue-110031.fQnFnc.rst b/Misc/NEWS.d/next/Tests/2023-09-29-14-11-30.gh-issue-110031.fQnFnc.rst new file mode 100644 index 00000000000000..a8a163c567d2b3 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2023-09-29-14-11-30.gh-issue-110031.fQnFnc.rst @@ -0,0 +1,2 @@ +Skip test_threading tests using thread+fork if Python is built with Address +Sanitizer (ASAN). Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-08-12-13-18-15.gh-issue-107565.Tv22Ne.rst b/Misc/NEWS.d/next/Tools-Demos/2023-08-12-13-18-15.gh-issue-107565.Tv22Ne.rst deleted file mode 100644 index c43ee680e8158e..00000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2023-08-12-13-18-15.gh-issue-107565.Tv22Ne.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update multissltests and GitHub CI workflows to use OpenSSL 1.1.1v, 3.0.10, -and 3.1.2. diff --git a/Misc/NEWS.d/next/Tools-Demos/2023-09-27-23-31-54.gh-issue-109991.sUUYY8.rst b/Misc/NEWS.d/next/Tools-Demos/2023-09-27-23-31-54.gh-issue-109991.sUUYY8.rst new file mode 100644 index 00000000000000..13c1163ab53443 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2023-09-27-23-31-54.gh-issue-109991.sUUYY8.rst @@ -0,0 +1,2 @@ +Update GitHub CI workflows to use OpenSSL 3.0.11 and multissltests to use +1.1.1w, 3.0.11, and 3.1.3. diff --git a/Misc/NEWS.d/next/Windows/2023-09-05-10-08-47.gh-issue-107565.CIMftz.rst b/Misc/NEWS.d/next/Windows/2023-09-05-10-08-47.gh-issue-107565.CIMftz.rst deleted file mode 100644 index 024a58299caed9..00000000000000 --- a/Misc/NEWS.d/next/Windows/2023-09-05-10-08-47.gh-issue-107565.CIMftz.rst +++ /dev/null @@ -1 +0,0 @@ -Update Windows build to use OpenSSL 3.0.10. diff --git a/Misc/NEWS.d/next/Windows/2023-09-28-17-09-23.gh-issue-109991.CIMftz.rst b/Misc/NEWS.d/next/Windows/2023-09-28-17-09-23.gh-issue-109991.CIMftz.rst new file mode 100644 index 00000000000000..ee988f90863426 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2023-09-28-17-09-23.gh-issue-109991.CIMftz.rst @@ -0,0 +1 @@ +Update Windows build to use OpenSSL 3.0.11. diff --git a/Misc/NEWS.d/next/macOS/2023-08-12-13-33-57.gh-issue-107565.SJwqf4.rst b/Misc/NEWS.d/next/macOS/2023-08-12-13-33-57.gh-issue-107565.SJwqf4.rst deleted file mode 100644 index c238c4760239e1..00000000000000 --- a/Misc/NEWS.d/next/macOS/2023-08-12-13-33-57.gh-issue-107565.SJwqf4.rst +++ /dev/null @@ -1 +0,0 @@ -Update macOS installer to use OpenSSL 3.0.10. diff --git a/Misc/NEWS.d/next/macOS/2023-09-27-22-35-22.gh-issue-109991.-xJzaF.rst b/Misc/NEWS.d/next/macOS/2023-09-27-22-35-22.gh-issue-109991.-xJzaF.rst new file mode 100644 index 00000000000000..8d369988274f28 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2023-09-27-22-35-22.gh-issue-109991.-xJzaF.rst @@ -0,0 +1 @@ +Update macOS installer to use OpenSSL 3.0.11. diff --git a/Modules/_scproxy.c b/Modules/_scproxy.c index 0df0324df55f7d..6cc09088bdc869 100644 --- a/Modules/_scproxy.c +++ b/Modules/_scproxy.c @@ -249,16 +249,8 @@ static struct PyModuleDef _scproxy_module = { .m_slots = _scproxy_slots, }; -#ifdef __cplusplus -extern "C" { -#endif - PyMODINIT_FUNC PyInit__scproxy(void) { return PyModuleDef_Init(&_scproxy_module); } - -#ifdef __cplusplus -} -#endif diff --git a/Modules/_stat.c b/Modules/_stat.c index 6cea26175dee5e..3fd951b6fc1022 100644 --- a/Modules/_stat.c +++ b/Modules/_stat.c @@ -13,10 +13,6 @@ #include "Python.h" -#ifdef __cplusplus -extern "C" { -#endif - #ifdef HAVE_SYS_TYPES_H #include #endif /* HAVE_SYS_TYPES_H */ @@ -631,7 +627,3 @@ PyInit__stat(void) { return PyModuleDef_Init(&statmodule); } - -#ifdef __cplusplus -} -#endif diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index c3e7f86b3e33f1..e77a31b947f45e 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -1977,7 +1977,7 @@ os__path_splitroot(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py #if defined(MS_WINDOWS) PyDoc_STRVAR(os__path_isdir__doc__, -"_path_isdir($module, /, path)\n" +"_path_isdir($module, /, s)\n" "--\n" "\n" "Return true if the pathname refers to an existing directory."); @@ -1986,7 +1986,7 @@ PyDoc_STRVAR(os__path_isdir__doc__, {"_path_isdir", _PyCFunction_CAST(os__path_isdir), METH_FASTCALL|METH_KEYWORDS, os__path_isdir__doc__}, static PyObject * -os__path_isdir_impl(PyObject *module, PyObject *path); +os__path_isdir_impl(PyObject *module, PyObject *s); static PyObject * os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) @@ -2001,7 +2001,7 @@ os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje PyObject *ob_item[NUM_KEYWORDS]; } _kwtuple = { .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) - .ob_item = { &_Py_ID(path), }, + .ob_item = { &_Py_ID(s), }, }; #undef NUM_KEYWORDS #define KWTUPLE (&_kwtuple.ob_base.ob_base) @@ -2010,7 +2010,7 @@ os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje # define KWTUPLE NULL #endif // !Py_BUILD_CORE - static const char * const _keywords[] = {"path", NULL}; + static const char * const _keywords[] = {"s", NULL}; static _PyArg_Parser _parser = { .keywords = _keywords, .fname = "_path_isdir", @@ -2018,14 +2018,14 @@ os__path_isdir(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje }; #undef KWTUPLE PyObject *argsbuf[1]; - PyObject *path; + PyObject *s; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); if (!args) { goto exit; } - path = args[0]; - return_value = os__path_isdir_impl(module, path); + s = args[0]; + return_value = os__path_isdir_impl(module, s); exit: return return_value; @@ -11988,4 +11988,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=1dd5aa7495cd6e3a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=51aa26bc6a41e1da input=a9049054013a1b77]*/ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index b051c71b3ade9b..4b6bf68be07202 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -174,7 +174,6 @@ faulthandler_dump_traceback(int fd, int all_threads, PyInterpreterState *interp) { static volatile int reentrant = 0; - PyThreadState *tstate; if (reentrant) return; @@ -189,7 +188,7 @@ faulthandler_dump_traceback(int fd, int all_threads, fault if the thread released the GIL, and so this function cannot be used. Read the thread specific storage (TSS) instead: call PyGILState_GetThisThreadState(). */ - tstate = PyGILState_GetThisThreadState(); + PyThreadState *tstate = PyGILState_GetThisThreadState(); if (all_threads) { (void)_Py_DumpTracebackThreads(fd, NULL, tstate); diff --git a/Modules/main.c b/Modules/main.c index 7f88c97207475b..05bedff050699f 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -26,10 +26,6 @@ "Type \"help\", \"copyright\", \"credits\" or \"license\" " \ "for more information." -#ifdef __cplusplus -extern "C" { -#endif - /* --- pymain_init() ---------------------------------------------- */ static PyStatus @@ -742,7 +738,3 @@ Py_BytesMain(int argc, char **argv) .wchar_argv = NULL}; return pymain_main(&args); } - -#ifdef __cplusplus -} -#endif diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 096aa043514c85..d7d3e365d2c553 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -222,10 +222,6 @@ #endif -#ifdef __cplusplus -extern "C" { -#endif - PyDoc_STRVAR(posix__doc__, "This module provides access to operating system functionality that is\n\ standardized by the C Standard and the POSIX standard (a thinly\n\ @@ -4912,25 +4908,25 @@ os__path_splitroot_impl(PyObject *module, path_t *path) /*[clinic input] os._path_isdir - path: 'O' + s: 'O' Return true if the pathname refers to an existing directory. [clinic start generated code]*/ static PyObject * -os__path_isdir_impl(PyObject *module, PyObject *path) -/*[clinic end generated code: output=00faea0af309669d input=b1d2571cf7291aaf]*/ +os__path_isdir_impl(PyObject *module, PyObject *s) +/*[clinic end generated code: output=9d87ab3c8b8a4e61 input=c17f7ef21d22d64e]*/ { HANDLE hfile; BOOL close_file = TRUE; FILE_BASIC_INFO info; - path_t _path = PATH_T_INITIALIZE("isdir", "path", 0, 1); + path_t _path = PATH_T_INITIALIZE("isdir", "s", 0, 1); int result; BOOL slow_path = TRUE; FILE_STAT_BASIC_INFORMATION statInfo; - if (!path_converter(path, &_path)) { + if (!path_converter(s, &_path)) { path_cleanup(&_path); if (PyErr_ExceptionMatches(PyExc_ValueError)) { PyErr_Clear(); @@ -17002,7 +16998,3 @@ INITFUNC(void) { return PyModuleDef_Init(&posixmodule); } - -#ifdef __cplusplus -} -#endif diff --git a/Modules/readline.c b/Modules/readline.c index aeae654162f13f..4b473023c6e524 100644 --- a/Modules/readline.c +++ b/Modules/readline.c @@ -1018,6 +1018,8 @@ on_hook(PyObject *func) static int #if defined(_RL_FUNCTION_TYPEDEF) on_startup_hook(void) +#elif defined(WITH_APPLE_EDITLINE) +on_startup_hook(const char *Py_UNUSED(text), int Py_UNUSED(state)) #else on_startup_hook(void) #endif @@ -1033,6 +1035,8 @@ on_startup_hook(void) static int #if defined(_RL_FUNCTION_TYPEDEF) on_pre_input_hook(void) +#elif defined(WITH_APPLE_EDITLINE) +on_pre_input_hook(const char *Py_UNUSED(text), int Py_UNUSED(state)) #else on_pre_input_hook(void) #endif diff --git a/Modules/symtablemodule.c b/Modules/symtablemodule.c index dba80034d310af..ddc9ac3324356d 100644 --- a/Modules/symtablemodule.c +++ b/Modules/symtablemodule.c @@ -86,6 +86,14 @@ symtable_init_constants(PyObject *m) if (PyModule_AddIntConstant(m, "TYPE_CLASS", ClassBlock) < 0) return -1; if (PyModule_AddIntConstant(m, "TYPE_MODULE", ModuleBlock) < 0) return -1; + if (PyModule_AddIntConstant(m, "TYPE_ANNOTATION", AnnotationBlock) < 0) + return -1; + if (PyModule_AddIntConstant(m, "TYPE_TYPE_VAR_BOUND", TypeVarBoundBlock) < 0) + return -1; + if (PyModule_AddIntConstant(m, "TYPE_TYPE_ALIAS", TypeAliasBlock) < 0) + return -1; + if (PyModule_AddIntConstant(m, "TYPE_TYPE_PARAM", TypeParamBlock) < 0) + return -1; if (PyModule_AddIntMacro(m, LOCAL) < 0) return -1; if (PyModule_AddIntMacro(m, GLOBAL_EXPLICIT) < 0) return -1; diff --git a/Objects/fileobject.c b/Objects/fileobject.c index 0cf2b47c3b3ae7..066172baf9f027 100644 --- a/Objects/fileobject.c +++ b/Objects/fileobject.c @@ -21,10 +21,6 @@ #define NEWLINE_LF 2 /* \n newline seen */ #define NEWLINE_CRLF 4 /* \r\n newline seen */ -#ifdef __cplusplus -extern "C" { -#endif - /* External C interface */ PyObject * @@ -539,8 +535,3 @@ _PyFile_Flush(PyObject *file) Py_DECREF(tmp); return 0; } - - -#ifdef __cplusplus -} -#endif diff --git a/Objects/object.c b/Objects/object.c index 15c2bf65de6acf..3ed272afdced7c 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -28,10 +28,6 @@ # error "Py_LIMITED_API macro must not be defined" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* Defined in tracemalloc.c */ extern void _PyMem_DumpTraceback(int fd, const void *ptr); @@ -2808,7 +2804,3 @@ int Py_IsFalse(PyObject *x) { return Py_Is(x, Py_False); } - -#ifdef __cplusplus -} -#endif diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index aca28e4842d645..49981a1f881c21 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -100,11 +100,6 @@ NOTE: In the interpreter's initialization phase, some globals are currently */ - -#ifdef __cplusplus -extern "C" { -#endif - // Maximum code point of Unicode 6.0: 0x10ffff (1,114,111). // The value must be the same in fileutils.c. #define MAX_UNICODE 0x10ffff @@ -15397,8 +15392,3 @@ PyInit__string(void) { return PyModuleDef_Init(&_string_module); } - - -#ifdef __cplusplus -} -#endif diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index 1d3abcc3def1fa..fc43ce2b1835d0 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -53,7 +53,7 @@ echo.Fetching external libraries... set libraries= set libraries=%libraries% bzip2-1.0.8 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.4.4 -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.10 +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-3.0.11 set libraries=%libraries% sqlite-3.42.0.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.13.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.13.0 @@ -76,7 +76,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.4.4 -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.10 +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-3.0.11 if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.13.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff --git a/PCbuild/python.props b/PCbuild/python.props index 94faa8221eac5a..35c6e92be45dc9 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -74,8 +74,8 @@ $(ExternalsDir)libffi-3.4.4\ $(libffiDir)$(ArchName)\ $(libffiOutDir)include - $(ExternalsDir)openssl-3.0.10\ - $(ExternalsDir)openssl-bin-3.0.10\$(ArchName)\ + $(ExternalsDir)openssl-3.0.11\ + $(ExternalsDir)openssl-bin-3.0.11\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.2.13\ diff --git a/PCbuild/rt.bat b/PCbuild/rt.bat index 7ae7141bfc4eaa..332ba5edcf4082 100644 --- a/PCbuild/rt.bat +++ b/PCbuild/rt.bat @@ -32,7 +32,7 @@ set pcbuild=%~dp0 set suffix= set qmode= set dashO= -set regrtestargs=--fail-env-changed --fail-rerun +set regrtestargs=--fast-ci set exe= :CheckOpts diff --git a/Python/abstract_interp_cases.c.h b/Python/abstract_interp_cases.c.h index 5a3848cd726245..61b1db9e5a1543 100644 --- a/Python/abstract_interp_cases.c.h +++ b/Python/abstract_interp_cases.c.h @@ -474,6 +474,31 @@ break; } + case _LOAD_ATTR_SLOT: { + STACK_GROW(((oparg & 1) ? 1 : 0)); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1 - (oparg & 1 ? 1 : 0))), true); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-(oparg & 1 ? 1 : 0))), true); + break; + } + + case _GUARD_DORV_VALUES: { + break; + } + + case _STORE_ATTR_INSTANCE_VALUE: { + STACK_SHRINK(2); + break; + } + + case _GUARD_TYPE_VERSION_STORE: { + break; + } + + case _STORE_ATTR_SLOT: { + STACK_SHRINK(2); + break; + } + case COMPARE_OP: { STACK_SHRINK(1); PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); @@ -627,6 +652,28 @@ break; } + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: { + break; + } + + case _GUARD_KEYS_VERSION: { + break; + } + + case _LOAD_ATTR_METHOD_WITH_VALUES: { + STACK_GROW(1); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); + break; + } + + case _LOAD_ATTR_METHOD_NO_DICT: { + STACK_GROW(1); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-2)), true); + PARTITIONNODE_OVERWRITE((_Py_PARTITIONNODE_t *)PARTITIONNODE_NULLROOT, PEEK(-(-1)), true); + break; + } + case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { break; } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 402b27101dbdb6..f7681bd234a43f 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1939,10 +1939,7 @@ dummy_func( DECREF_INPUTS(); } - inst(LOAD_ATTR_SLOT, (unused/1, type_version/2, index/1, unused/5, owner -- attr, null if (oparg & 1))) { - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + op(_LOAD_ATTR_SLOT, (index/1, owner -- attr, null if (oparg & 1))) { char *addr = (char *)owner + index; attr = *(PyObject **)addr; DEOPT_IF(attr == NULL, LOAD_ATTR); @@ -1952,6 +1949,12 @@ dummy_func( DECREF_INPUTS(); } + macro(LOAD_ATTR_SLOT) = + unused/1 + + _GUARD_TYPE_VERSION + + _LOAD_ATTR_SLOT + // NOTE: This action may also deopt + unused/5; + inst(LOAD_ATTR_CLASS, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, null if (oparg & 1))) { DEOPT_IF(!PyType_Check(owner), LOAD_ATTR); @@ -2019,13 +2022,14 @@ dummy_func( DISPATCH_INLINED(new_frame); } - inst(STORE_ATTR_INSTANCE_VALUE, (unused/1, type_version/2, index/1, value, owner --)) { - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); + op(_GUARD_DORV_VALUES, (owner -- owner)) { + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + } + + op(_STORE_ATTR_INSTANCE_VALUE, (index/1, value, owner --)) { + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); STAT_INC(STORE_ATTR, hit); PyDictValues *values = _PyDictOrValues_GetValues(dorv); PyObject *old_value = values->values[index]; @@ -2039,6 +2043,12 @@ dummy_func( Py_DECREF(owner); } + macro(STORE_ATTR_INSTANCE_VALUE) = + unused/1 + + _GUARD_TYPE_VERSION_STORE + + _GUARD_DORV_VALUES + + _STORE_ATTR_INSTANCE_VALUE; + inst(STORE_ATTR_WITH_HINT, (unused/1, type_version/2, hint/1, value, owner --)) { PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); @@ -2080,10 +2090,13 @@ dummy_func( Py_DECREF(owner); } - inst(STORE_ATTR_SLOT, (unused/1, type_version/2, index/1, value, owner --)) { + op(_GUARD_TYPE_VERSION_STORE, (type_version/2, owner -- owner)) { PyTypeObject *tp = Py_TYPE(owner); assert(type_version != 0); DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + } + + op(_STORE_ATTR_SLOT, (index/1, value, owner --)) { char *addr = (char *)owner + index; STAT_INC(STORE_ATTR, hit); PyObject *old_value = *(PyObject **)addr; @@ -2092,6 +2105,11 @@ dummy_func( Py_DECREF(owner); } + macro(STORE_ATTR_SLOT) = + unused/1 + + _GUARD_TYPE_VERSION_STORE + + _STORE_ATTR_SLOT; + family(COMPARE_OP, INLINE_CACHE_ENTRIES_COMPARE_OP) = { COMPARE_OP_FLOAT, COMPARE_OP_INT, @@ -2769,20 +2787,24 @@ dummy_func( exc_info->exc_value = Py_NewRef(new_exc); } - inst(LOAD_ATTR_METHOD_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, owner -- attr, self if (1))) { - assert(oparg & 1); - /* Cached method object */ - PyTypeObject *owner_cls = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); + op(_GUARD_DORV_VALUES_INST_ATTR_FROM_DICT, (owner -- owner)) { + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), LOAD_ATTR); + } + + op(_GUARD_KEYS_VERSION, (keys_version/2, owner -- owner)) { + PyTypeObject *owner_cls = Py_TYPE(owner); PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != keys_version, LOAD_ATTR); + } + + op(_LOAD_ATTR_METHOD_WITH_VALUES, (descr/4, owner -- attr, self if (1))) { + assert(oparg & 1); + /* Cached method object */ STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); attr = Py_NewRef(descr); @@ -2790,11 +2812,16 @@ dummy_func( self = owner; } - inst(LOAD_ATTR_METHOD_NO_DICT, (unused/1, type_version/2, unused/2, descr/4, owner -- attr, self if (1))) { + macro(LOAD_ATTR_METHOD_WITH_VALUES) = + unused/1 + + _GUARD_TYPE_VERSION + + _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT + + _GUARD_KEYS_VERSION + + _LOAD_ATTR_METHOD_WITH_VALUES; + + op(_LOAD_ATTR_METHOD_NO_DICT, (descr/4, owner -- attr, self if (1))) { assert(oparg & 1); - PyTypeObject *owner_cls = Py_TYPE(owner); - DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(owner_cls->tp_dictoffset == 0); + assert(Py_TYPE(owner)->tp_dictoffset == 0); STAT_INC(LOAD_ATTR, hit); assert(descr != NULL); assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); @@ -2802,6 +2829,12 @@ dummy_func( self = owner; } + macro(LOAD_ATTR_METHOD_NO_DICT) = + unused/1 + + _GUARD_TYPE_VERSION + + unused/2 + + _LOAD_ATTR_METHOD_NO_DICT; + inst(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES, (unused/1, type_version/2, keys_version/2, descr/4, owner -- attr, unused if (0))) { assert((oparg & 1) == 0); PyTypeObject *owner_cls = Py_TYPE(owner); diff --git a/Python/dtoa.c b/Python/dtoa.c index c5e343b82f74c5..5dfc0e179cbc34 100644 --- a/Python/dtoa.c +++ b/Python/dtoa.c @@ -172,10 +172,6 @@ typedef uint64_t ULLong; #define Bug(x) {fprintf(stderr, "%s\n", x); exit(1);} #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef union { double d; ULong L[2]; } U; #ifdef IEEE_8087 @@ -2813,8 +2809,5 @@ _Py_dg_dtoa(double dd, int mode, int ndigits, _Py_dg_freedtoa(s0); return NULL; } -#ifdef __cplusplus -} -#endif #endif // _PY_SHORT_FLOAT_REPR == 1 diff --git a/Python/dynload_win.c b/Python/dynload_win.c index f69995b8f9e3a1..fcb3cb744047ce 100644 --- a/Python/dynload_win.c +++ b/Python/dynload_win.c @@ -15,10 +15,16 @@ #define PYD_DEBUG_SUFFIX "" #endif +#ifdef Py_NOGIL +# define PYD_THREADING_TAG "t" +#else +# define PYD_THREADING_TAG "" +#endif + #ifdef PYD_PLATFORM_TAG -#define PYD_TAGGED_SUFFIX PYD_DEBUG_SUFFIX ".cp" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) "-" PYD_PLATFORM_TAG ".pyd" +#define PYD_TAGGED_SUFFIX PYD_DEBUG_SUFFIX ".cp" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) PYD_THREADING_TAG "-" PYD_PLATFORM_TAG ".pyd" #else -#define PYD_TAGGED_SUFFIX PYD_DEBUG_SUFFIX ".cp" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) ".pyd" +#define PYD_TAGGED_SUFFIX PYD_DEBUG_SUFFIX ".cp" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) PYD_THREADING_TAG ".pyd" #endif #define PYD_UNTAGGED_SUFFIX PYD_DEBUG_SUFFIX ".pyd" diff --git a/Python/errors.c b/Python/errors.c index b05b3ef1dda8fe..15af39b10dc07e 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -16,11 +16,6 @@ # include // _sys_nerr #endif - -#ifdef __cplusplus -extern "C" { -#endif - /* Forward declarations */ static PyObject * _PyErr_FormatV(PyThreadState *tstate, PyObject *exception, @@ -1918,7 +1913,3 @@ PyErr_ProgramTextObject(PyObject *filename, int lineno) { return _PyErr_ProgramDecodedTextObject(filename, lineno, NULL); } - -#ifdef __cplusplus -} -#endif diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index befb972f1e90f5..55a03c9a23a572 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -1714,6 +1714,82 @@ break; } + case _LOAD_ATTR_SLOT: { + PyObject *owner; + PyObject *attr; + PyObject *null = NULL; + owner = stack_pointer[-1]; + uint16_t index = (uint16_t)operand; + char *addr = (char *)owner + index; + attr = *(PyObject **)addr; + DEOPT_IF(attr == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(attr); + null = NULL; + Py_DECREF(owner); + STACK_GROW(((oparg & 1) ? 1 : 0)); + stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; + if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } + break; + } + + case _GUARD_DORV_VALUES: { + PyObject *owner; + owner = stack_pointer[-1]; + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + break; + } + + case _STORE_ATTR_INSTANCE_VALUE: { + PyObject *owner; + PyObject *value; + owner = stack_pointer[-1]; + value = stack_pointer[-2]; + uint16_t index = (uint16_t)operand; + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + STAT_INC(STORE_ATTR, hit); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); + STACK_SHRINK(2); + break; + } + + case _GUARD_TYPE_VERSION_STORE: { + PyObject *owner; + owner = stack_pointer[-1]; + uint32_t type_version = (uint32_t)operand; + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + break; + } + + case _STORE_ATTR_SLOT: { + PyObject *owner; + PyObject *value; + owner = stack_pointer[-1]; + value = stack_pointer[-2]; + uint16_t index = (uint16_t)operand; + char *addr = (char *)owner + index; + STAT_INC(STORE_ATTR, hit); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + STACK_SHRINK(2); + break; + } + case COMPARE_OP: { PyObject *right; PyObject *left; @@ -2219,6 +2295,66 @@ break; } + case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: { + PyObject *owner; + owner = stack_pointer[-1]; + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + break; + } + + case _GUARD_KEYS_VERSION: { + PyObject *owner; + owner = stack_pointer[-1]; + uint32_t keys_version = (uint32_t)operand; + PyTypeObject *owner_cls = Py_TYPE(owner); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != + keys_version, LOAD_ATTR); + break; + } + + case _LOAD_ATTR_METHOD_WITH_VALUES: { + PyObject *owner; + PyObject *attr; + PyObject *self; + owner = stack_pointer[-1]; + PyObject *descr = (PyObject *)operand; + assert(oparg & 1); + /* Cached method object */ + STAT_INC(LOAD_ATTR, hit); + assert(descr != NULL); + attr = Py_NewRef(descr); + assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + self = owner; + STACK_GROW(1); + stack_pointer[-2] = attr; + stack_pointer[-1] = self; + break; + } + + case _LOAD_ATTR_METHOD_NO_DICT: { + PyObject *owner; + PyObject *attr; + PyObject *self; + owner = stack_pointer[-1]; + PyObject *descr = (PyObject *)operand; + assert(oparg & 1); + assert(Py_TYPE(owner)->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + assert(descr != NULL); + assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + attr = Py_NewRef(descr); + self = owner; + STACK_GROW(1); + stack_pointer[-2] = attr; + stack_pointer[-1] = self; + break; + } + case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { PyObject *null; PyObject *callable; diff --git a/Python/flowgraph.c b/Python/flowgraph.c index 9c24264cfbb459..e89ad39b35719b 100644 --- a/Python/flowgraph.c +++ b/Python/flowgraph.c @@ -1017,7 +1017,17 @@ remove_redundant_nops(basicblock *bb) { } /* or if last instruction in BB and next BB has same line number */ if (next) { - if (lineno == next->b_instr[0].i_loc.lineno) { + location next_loc = NO_LOCATION; + for (int next_i=0; next_i < next->b_iused; next_i++) { + cfg_instr *instr = &next->b_instr[next_i]; + if (instr->i_opcode == NOP && instr->i_loc.lineno == NO_LOCATION.lineno) { + /* Skip over NOPs without location, they will be removed */ + continue; + } + next_loc = instr->i_loc; + break; + } + if (lineno == next_loc.lineno) { continue; } } @@ -2468,17 +2478,19 @@ insert_prefix_instructions(_PyCompile_CodeUnitMetadata *umd, basicblock *entrybl * of 0. This is because RETURN_GENERATOR pushes an element * with _PyFrame_StackPush before switching stacks. */ + + location loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1); cfg_instr make_gen = { .i_opcode = RETURN_GENERATOR, .i_oparg = 0, - .i_loc = LOCATION(umd->u_firstlineno, umd->u_firstlineno, -1, -1), + .i_loc = loc, .i_target = NULL, }; RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 0, &make_gen)); cfg_instr pop_top = { .i_opcode = POP_TOP, .i_oparg = 0, - .i_loc = NO_LOCATION, + .i_loc = loc, .i_target = NULL, }; RETURN_IF_ERROR(basicblock_insert_instruction(entryblock, 1, &pop_top)); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index ebb87a86de432e..2701d416648a20 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -2504,19 +2504,25 @@ PyObject *owner; PyObject *attr; PyObject *null = NULL; + // _GUARD_TYPE_VERSION owner = stack_pointer[-1]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint16_t index = read_u16(&next_instr[3].cache); - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); - char *addr = (char *)owner + index; - attr = *(PyObject **)addr; - DEOPT_IF(attr == NULL, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - Py_INCREF(attr); - null = NULL; - Py_DECREF(owner); + { + uint32_t type_version = read_u32(&next_instr[1].cache); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + } + // _LOAD_ATTR_SLOT + { + uint16_t index = read_u16(&next_instr[3].cache); + char *addr = (char *)owner + index; + attr = *(PyObject **)addr; + DEOPT_IF(attr == NULL, LOAD_ATTR); + STAT_INC(LOAD_ATTR, hit); + Py_INCREF(attr); + null = NULL; + Py_DECREF(owner); + } STACK_GROW(((oparg & 1) ? 1 : 0)); stack_pointer[-1 - (oparg & 1 ? 1 : 0)] = attr; if (oparg & 1) { stack_pointer[-(oparg & 1 ? 1 : 0)] = null; } @@ -2615,27 +2621,37 @@ TARGET(STORE_ATTR_INSTANCE_VALUE) { PyObject *owner; PyObject *value; + // _GUARD_TYPE_VERSION_STORE owner = stack_pointer[-1]; - value = stack_pointer[-2]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint16_t index = read_u16(&next_instr[3].cache); - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - assert(tp->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); - STAT_INC(STORE_ATTR, hit); - PyDictValues *values = _PyDictOrValues_GetValues(dorv); - PyObject *old_value = values->values[index]; - values->values[index] = value; - if (old_value == NULL) { - _PyDictValues_AddToInsertionOrder(values, index); + { + uint32_t type_version = read_u32(&next_instr[1].cache); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); } - else { - Py_DECREF(old_value); + // _GUARD_DORV_VALUES + { + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(dorv), STORE_ATTR); + } + // _STORE_ATTR_INSTANCE_VALUE + value = stack_pointer[-2]; + { + uint16_t index = read_u16(&next_instr[3].cache); + PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner); + STAT_INC(STORE_ATTR, hit); + PyDictValues *values = _PyDictOrValues_GetValues(dorv); + PyObject *old_value = values->values[index]; + values->values[index] = value; + if (old_value == NULL) { + _PyDictValues_AddToInsertionOrder(values, index); + } + else { + Py_DECREF(old_value); + } + Py_DECREF(owner); } - Py_DECREF(owner); STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -2694,19 +2710,25 @@ TARGET(STORE_ATTR_SLOT) { PyObject *owner; PyObject *value; + // _GUARD_TYPE_VERSION_STORE owner = stack_pointer[-1]; + { + uint32_t type_version = read_u32(&next_instr[1].cache); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); + } + // _STORE_ATTR_SLOT value = stack_pointer[-2]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint16_t index = read_u16(&next_instr[3].cache); - PyTypeObject *tp = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(tp->tp_version_tag != type_version, STORE_ATTR); - char *addr = (char *)owner + index; - STAT_INC(STORE_ATTR, hit); - PyObject *old_value = *(PyObject **)addr; - *(PyObject **)addr = value; - Py_XDECREF(old_value); - Py_DECREF(owner); + { + uint16_t index = read_u16(&next_instr[3].cache); + char *addr = (char *)owner + index; + STAT_INC(STORE_ATTR, hit); + PyObject *old_value = *(PyObject **)addr; + *(PyObject **)addr = value; + Py_XDECREF(old_value); + Py_DECREF(owner); + } STACK_SHRINK(2); next_instr += 4; DISPATCH(); @@ -3557,28 +3579,41 @@ PyObject *owner; PyObject *attr; PyObject *self; + // _GUARD_TYPE_VERSION owner = stack_pointer[-1]; - uint32_t type_version = read_u32(&next_instr[1].cache); - uint32_t keys_version = read_u32(&next_instr[3].cache); - PyObject *descr = read_obj(&next_instr[5].cache); - assert(oparg & 1); - /* Cached method object */ - PyTypeObject *owner_cls = Py_TYPE(owner); - assert(type_version != 0); - DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(owner_cls->tp_flags & Py_TPFLAGS_MANAGED_DICT); - PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); - DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && - !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), - LOAD_ATTR); - PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; - DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != - keys_version, LOAD_ATTR); - STAT_INC(LOAD_ATTR, hit); - assert(descr != NULL); - attr = Py_NewRef(descr); - assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - self = owner; + { + uint32_t type_version = read_u32(&next_instr[1].cache); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + } + // _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT + { + assert(Py_TYPE(owner)->tp_flags & Py_TPFLAGS_MANAGED_DICT); + PyDictOrValues *dorv = _PyObject_DictOrValuesPointer(owner); + DEOPT_IF(!_PyDictOrValues_IsValues(*dorv) && + !_PyObject_MakeInstanceAttributesFromDict(owner, dorv), + LOAD_ATTR); + } + // _GUARD_KEYS_VERSION + { + uint32_t keys_version = read_u32(&next_instr[3].cache); + PyTypeObject *owner_cls = Py_TYPE(owner); + PyHeapTypeObject *owner_heap_type = (PyHeapTypeObject *)owner_cls; + DEOPT_IF(owner_heap_type->ht_cached_keys->dk_version != + keys_version, LOAD_ATTR); + } + // _LOAD_ATTR_METHOD_WITH_VALUES + { + PyObject *descr = read_obj(&next_instr[5].cache); + assert(oparg & 1); + /* Cached method object */ + STAT_INC(LOAD_ATTR, hit); + assert(descr != NULL); + attr = Py_NewRef(descr); + assert(_PyType_HasFeature(Py_TYPE(attr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + self = owner; + } STACK_GROW(1); stack_pointer[-2] = attr; stack_pointer[-1] = self; @@ -3590,18 +3625,25 @@ PyObject *owner; PyObject *attr; PyObject *self; + // _GUARD_TYPE_VERSION owner = stack_pointer[-1]; - uint32_t type_version = read_u32(&next_instr[1].cache); - PyObject *descr = read_obj(&next_instr[5].cache); - assert(oparg & 1); - PyTypeObject *owner_cls = Py_TYPE(owner); - DEOPT_IF(owner_cls->tp_version_tag != type_version, LOAD_ATTR); - assert(owner_cls->tp_dictoffset == 0); - STAT_INC(LOAD_ATTR, hit); - assert(descr != NULL); - assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); - attr = Py_NewRef(descr); - self = owner; + { + uint32_t type_version = read_u32(&next_instr[1].cache); + PyTypeObject *tp = Py_TYPE(owner); + assert(type_version != 0); + DEOPT_IF(tp->tp_version_tag != type_version, LOAD_ATTR); + } + // _LOAD_ATTR_METHOD_NO_DICT + { + PyObject *descr = read_obj(&next_instr[5].cache); + assert(oparg & 1); + assert(Py_TYPE(owner)->tp_dictoffset == 0); + STAT_INC(LOAD_ATTR, hit); + assert(descr != NULL); + assert(_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)); + attr = Py_NewRef(descr); + self = owner; + } STACK_GROW(1); stack_pointer[-2] = attr; stack_pointer[-1] = self; diff --git a/Python/getargs.c b/Python/getargs.c index cbfe561111176c..d590e2e153389e 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -7,10 +7,6 @@ #include "pycore_pylifecycle.h" // _PyArg_Fini #include "pycore_tuple.h" // _PyTuple_ITEMS() -#ifdef __cplusplus -extern "C" { -#endif - /* Export Stable ABIs (abi only) */ PyAPI_FUNC(int) _PyArg_Parse_SizeT(PyObject *, const char *, ...); PyAPI_FUNC(int) _PyArg_ParseTuple_SizeT(PyObject *, const char *, ...); @@ -2867,7 +2863,3 @@ _PyArg_Fini(void) } _PyRuntime.getargs.static_parsers = NULL; } - -#ifdef __cplusplus -}; -#endif diff --git a/Python/getopt.c b/Python/getopt.c index 4135bf1446ecfc..f64c89fa22734a 100644 --- a/Python/getopt.c +++ b/Python/getopt.c @@ -29,10 +29,6 @@ #include #include "pycore_getopt.h" -#ifdef __cplusplus -extern "C" { -#endif - int _PyOS_opterr = 1; /* generate error messages */ Py_ssize_t _PyOS_optind = 1; /* index into argv array */ const wchar_t *_PyOS_optarg = NULL; /* optional argument */ @@ -172,8 +168,3 @@ int _PyOS_GetOpt(Py_ssize_t argc, wchar_t * const *argv, int *longindex) return option; } - -#ifdef __cplusplus -} -#endif - diff --git a/Python/import.c b/Python/import.c index 5a06cb367e828b..5636968ed9e63b 100644 --- a/Python/import.c +++ b/Python/import.c @@ -24,9 +24,6 @@ #ifdef HAVE_FCNTL_H #include #endif -#ifdef __cplusplus -extern "C" { -#endif /*[clinic input] @@ -3887,8 +3884,3 @@ PyInit__imp(void) { return PyModuleDef_Init(&imp_module); } - - -#ifdef __cplusplus -} -#endif diff --git a/Python/pathconfig.c b/Python/pathconfig.c index 0ac64ec8110259..50c60093cd4e32 100644 --- a/Python/pathconfig.c +++ b/Python/pathconfig.c @@ -16,10 +16,6 @@ # include #endif -#ifdef __cplusplus -extern "C" { -#endif - /* External interface */ @@ -500,8 +496,3 @@ _PyPathConfig_ComputeSysPath0(const PyWideStringList *argv, PyObject **path0_p) *path0_p = path0_obj; return 1; } - - -#ifdef __cplusplus -} -#endif diff --git a/Python/pyhash.c b/Python/pyhash.c index b2bdab5099d86a..f9060b8003a0a7 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -14,10 +14,6 @@ # include #endif -#ifdef __cplusplus -extern "C" { -#endif - _Py_HashSecret_t _Py_HashSecret = {{0}}; #if Py_HASH_ALGORITHM == Py_HASH_EXTERNAL @@ -503,7 +499,3 @@ pysiphash(const void *src, Py_ssize_t src_sz) { static PyHash_FuncDef PyHash_Func = {pysiphash, "siphash24", 64, 128}; #endif - -#ifdef __cplusplus -} -#endif diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index aec8da10249d21..23f66ec3601df6 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -61,11 +61,6 @@ #define PUTS(fd, str) (void)_Py_write_noraise(fd, str, (int)strlen(str)) -#ifdef __cplusplus -extern "C" { -#endif - - /* Forward declarations */ static PyStatus add_main_module(PyInterpreterState *interp); static PyStatus init_import_site(void); @@ -3139,7 +3134,3 @@ PyOS_setsig(int sig, PyOS_sighandler_t handler) return oldhandler; #endif } - -#ifdef __cplusplus -} -#endif diff --git a/Python/pystate.c b/Python/pystate.c index dcc6c112215b30..01aa2552e56f0d 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -29,16 +29,12 @@ to avoid the expense of doing their own locking). -------------------------------------------------------------------------- */ #ifdef HAVE_DLOPEN -#ifdef HAVE_DLFCN_H -#include -#endif -#if !HAVE_DECL_RTLD_LAZY -#define RTLD_LAZY 1 -#endif -#endif - -#ifdef __cplusplus -extern "C" { +# ifdef HAVE_DLFCN_H +# include +# endif +# if !HAVE_DECL_RTLD_LAZY +# define RTLD_LAZY 1 +# endif #endif @@ -2964,14 +2960,24 @@ _PyThreadState_MustExit(PyThreadState *tstate) tstate->interp->runtime to support calls from Python daemon threads. After Py_Finalize() has been called, tstate can be a dangling pointer: point to PyThreadState freed memory. */ + unsigned long finalizing_id = _PyRuntimeState_GetFinalizingID(&_PyRuntime); PyThreadState *finalizing = _PyRuntimeState_GetFinalizing(&_PyRuntime); if (finalizing == NULL) { + // XXX This isn't completely safe from daemon thraeds, + // since tstate might be a dangling pointer. finalizing = _PyInterpreterState_GetFinalizing(tstate->interp); + finalizing_id = _PyInterpreterState_GetFinalizingID(tstate->interp); } - return (finalizing != NULL && finalizing != tstate); -} - - -#ifdef __cplusplus + // XXX else check &_PyRuntime._main_interpreter._initial_thread + if (finalizing == NULL) { + return 0; + } + else if (finalizing == tstate) { + return 0; + } + else if (finalizing_id == PyThread_get_thread_ident()) { + /* gh-109793: we must have switched interpreters. */ + return 0; + } + return 1; } -#endif diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 81ab78e95ab68c..1b282aa870bfd2 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -37,11 +37,6 @@ # include "windows.h" #endif - -#ifdef __cplusplus -extern "C" { -#endif - /* Forward */ static void flush_io(void); static PyObject *run_mod(mod_ty, PyObject *, PyObject *, PyObject *, @@ -2017,7 +2012,3 @@ PyRun_InteractiveLoop(FILE *f, const char *p) { return PyRun_InteractiveLoopFlags(f, p, NULL); } - -#ifdef __cplusplus -} -#endif diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 9c1ee0215d7cf6..7ba7be10aacb92 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -2039,11 +2039,6 @@ sys_call_tracing_impl(PyObject *module, PyObject *func, PyObject *funcargs) return _PyEval_CallTracing(func, funcargs); } - -#ifdef __cplusplus -extern "C" { -#endif - /*[clinic input] sys._debugmallocstats @@ -2072,10 +2067,6 @@ sys__debugmallocstats_impl(PyObject *module) extern PyObject *_Py_GetObjects(PyObject *, PyObject *); #endif -#ifdef __cplusplus -} -#endif - /*[clinic input] sys._clear_type_cache @@ -2297,11 +2288,6 @@ sys__getframemodulename_impl(PyObject *module, int depth) return Py_NewRef(r); } - -#ifdef __cplusplus -extern "C" { -#endif - static PerfMapState perf_map_state; PyAPI_FUNC(int) PyUnstable_PerfMapState_Init(void) { @@ -2370,10 +2356,6 @@ PyAPI_FUNC(void) PyUnstable_PerfMapState_Fini(void) { #endif } -#ifdef __cplusplus -} -#endif - static PyMethodDef sys_methods[] = { /* Might as well keep this in alphabetic order */ diff --git a/Python/traceback.c b/Python/traceback.c index a75b7833af4e05..7e791d0a59bd82 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -1215,23 +1215,45 @@ dump_frame(int fd, _PyInterpreterFrame *frame) PUTS(fd, "\n"); } +static int +tstate_is_freed(PyThreadState *tstate) +{ + if (_PyMem_IsPtrFreed(tstate)) { + return 1; + } + if (_PyMem_IsPtrFreed(tstate->interp)) { + return 1; + } + return 0; +} + + +static int +interp_is_freed(PyInterpreterState *interp) +{ + return _PyMem_IsPtrFreed(interp); +} + + static void dump_traceback(int fd, PyThreadState *tstate, int write_header) { - _PyInterpreterFrame *frame; - unsigned int depth; - if (write_header) { PUTS(fd, "Stack (most recent call first):\n"); } - frame = tstate->current_frame; + if (tstate_is_freed(tstate)) { + PUTS(fd, " \n"); + return; + } + + _PyInterpreterFrame *frame = tstate->current_frame; if (frame == NULL) { PUTS(fd, " \n"); return; } - depth = 0; + unsigned int depth = 0; while (1) { if (MAX_FRAME_DEPTH <= depth) { PUTS(fd, " ...\n"); @@ -1295,9 +1317,6 @@ const char* _Py_DumpTracebackThreads(int fd, PyInterpreterState *interp, PyThreadState *current_tstate) { - PyThreadState *tstate; - unsigned int nthreads; - if (current_tstate == NULL) { /* _Py_DumpTracebackThreads() is called from signal handlers by faulthandler. @@ -1313,6 +1332,10 @@ _Py_DumpTracebackThreads(int fd, PyInterpreterState *interp, current_tstate = PyGILState_GetThisThreadState(); } + if (current_tstate != NULL && tstate_is_freed(current_tstate)) { + return "tstate is freed"; + } + if (interp == NULL) { if (current_tstate == NULL) { interp = _PyGILState_GetInterpreterStateUnsafe(); @@ -1327,14 +1350,18 @@ _Py_DumpTracebackThreads(int fd, PyInterpreterState *interp, } assert(interp != NULL); + if (interp_is_freed(interp)) { + return "interp is freed"; + } + /* Get the current interpreter from the current thread */ - tstate = PyInterpreterState_ThreadHead(interp); + PyThreadState *tstate = PyInterpreterState_ThreadHead(interp); if (tstate == NULL) return "unable to get the thread head state"; /* Dump the traceback of each thread */ tstate = PyInterpreterState_ThreadHead(interp); - nthreads = 0; + unsigned int nthreads = 0; _Py_BEGIN_SUPPRESS_IPH do { diff --git a/README.rst b/README.rst index 208bf8cec444a3..921da30a920168 100644 --- a/README.rst +++ b/README.rst @@ -177,7 +177,7 @@ is printed about a failed test or a traceback or core dump is produced, something is wrong. By default, tests are prevented from overusing resources like disk space and -memory. To enable these tests, run ``make testall``. +memory. To enable these tests, run ``make buildbottest``. If any tests fail, you can re-run the failing test(s) in verbose mode. For example, if ``test_os`` and ``test_gdb`` failed, you can run:: diff --git a/Tools/cases_generator/analysis.py b/Tools/cases_generator/analysis.py index b920c0aa8c1c8a..91dcba8ceee13d 100644 --- a/Tools/cases_generator/analysis.py +++ b/Tools/cases_generator/analysis.py @@ -414,3 +414,61 @@ def check_macro_components( case _: assert_never(uop) return components + + def report_non_viable_uops(self, jsonfile: str) -> None: + print("The following ops are not viable uops:") + skips = { + "CACHE", + "RESERVED", + "INTERPRETER_EXIT", + "JUMP_BACKWARD", + "LOAD_FAST_LOAD_FAST", + "LOAD_CONST_LOAD_FAST", + "STORE_FAST_STORE_FAST", + "_BINARY_OP_INPLACE_ADD_UNICODE", + "POP_JUMP_IF_TRUE", + "POP_JUMP_IF_FALSE", + "_ITER_JUMP_LIST", + "_ITER_JUMP_TUPLE", + "_ITER_JUMP_RANGE", + } + try: + # Secret feature: if bmraw.json exists, print and sort by execution count + counts = load_execution_counts(jsonfile) + except FileNotFoundError as err: + counts = {} + non_viable = [ + instr + for instr in self.instrs.values() + if instr.name not in skips + and not instr.name.startswith("INSTRUMENTED_") + and not instr.is_viable_uop() + ] + non_viable.sort(key=lambda instr: (-counts.get(instr.name, 0), instr.name)) + for instr in non_viable: + if instr.name in counts: + scount = f"{counts[instr.name]:,}" + else: + scount = "" + print(f" {scount:>15} {instr.name:<35}", end="") + if instr.name in self.families: + print(" (unspecialized)", end="") + elif instr.family is not None: + print(f" (specialization of {instr.family.name})", end="") + print() + + +def load_execution_counts(jsonfile: str) -> dict[str, int]: + import json + + with open(jsonfile) as f: + jsondata = json.load(f) + + # Look for keys like "opcode[LOAD_FAST].execution_count" + prefix = "opcode[" + suffix = "].execution_count" + res: dict[str, int] = {} + for key, value in jsondata.items(): + if key.startswith(prefix) and key.endswith(suffix): + res[key[len(prefix) : -len(suffix)]] = value + return res diff --git a/Tools/cases_generator/generate_cases.py b/Tools/cases_generator/generate_cases.py index 898736248a98f9..9192d1038ab7d6 100644 --- a/Tools/cases_generator/generate_cases.py +++ b/Tools/cases_generator/generate_cases.py @@ -92,6 +92,13 @@ description="Generate the code for the interpreter switch.", formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) + +arg_parser.add_argument( + "-v", + "--verbose", + help="Print list of non-viable uops and exit", + action="store_true", +) arg_parser.add_argument( "-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT ) @@ -865,6 +872,10 @@ def main() -> None: a.analyze() # Prints messages and sets a.errors on failure if a.errors: sys.exit(f"Found {a.errors} errors") + if args.verbose: + # Load execution counts from bmraw.json, if it exists + a.report_non_viable_uops("bmraw.json") + return # These raise OSError if output can't be written a.write_instructions(args.output, args.emit_line_directives) diff --git a/Tools/freeze/test/freeze.py b/Tools/freeze/test/freeze.py index 92e97cb261719c..cdf77c57bbb6ae 100644 --- a/Tools/freeze/test/freeze.py +++ b/Tools/freeze/test/freeze.py @@ -1,14 +1,22 @@ import os import os.path -import re import shlex import shutil import subprocess +import sysconfig +from test import support + + +def get_python_source_dir(): + src_dir = sysconfig.get_config_var('abs_srcdir') + if not src_dir: + src_dir = sysconfig.get_config_var('srcdir') + return os.path.abspath(src_dir) TESTS_DIR = os.path.dirname(__file__) TOOL_ROOT = os.path.dirname(TESTS_DIR) -SRCDIR = os.path.dirname(os.path.dirname(TOOL_ROOT)) +SRCDIR = get_python_source_dir() MAKE = shutil.which('make') FREEZE = os.path.join(TOOL_ROOT, 'freeze.py') @@ -75,56 +83,17 @@ def ensure_opt(args, name, value): def copy_source_tree(newroot, oldroot): - print(f'copying the source tree into {newroot}...') + print(f'copying the source tree from {oldroot} to {newroot}...') if os.path.exists(newroot): if newroot == SRCDIR: raise Exception('this probably isn\'t what you wanted') shutil.rmtree(newroot) - def ignore_non_src(src, names): - """Turns what could be a 1000M copy into a 100M copy.""" - # Don't copy the ~600M+ of needless git repo metadata. - # source only, ignore cached .pyc files. - subdirs_to_skip = {'.git', '__pycache__'} - if os.path.basename(src) == 'Doc': - # Another potential ~250M+ of non test related data. - subdirs_to_skip.add('build') - subdirs_to_skip.add('venv') - return subdirs_to_skip - - shutil.copytree(oldroot, newroot, ignore=ignore_non_src) + shutil.copytree(oldroot, newroot, ignore=support.copy_python_src_ignore) if os.path.exists(os.path.join(newroot, 'Makefile')): _run_quiet([MAKE, 'clean'], newroot) -def get_makefile_var(builddir, name): - regex = re.compile(rf'^{name} *=\s*(.*?)\s*$') - filename = os.path.join(builddir, 'Makefile') - try: - infile = open(filename, encoding='utf-8') - except FileNotFoundError: - return None - with infile: - for line in infile: - m = regex.match(line) - if m: - value, = m.groups() - return value or '' - return None - - -def get_config_var(builddir, name): - python = os.path.join(builddir, 'python') - if os.path.isfile(python): - cmd = [python, '-c', - f'import sysconfig; print(sysconfig.get_config_var("{name}"))'] - try: - return _run_stdout(cmd) - except subprocess.CalledProcessError: - pass - return get_makefile_var(builddir, name) - - ################################## # freezing @@ -151,10 +120,8 @@ def prepare(script=None, outdir=None): # Run configure. print(f'configuring python in {builddir}...') - cmd = [ - os.path.join(srcdir, 'configure'), - *shlex.split(get_config_var(SRCDIR, 'CONFIG_ARGS') or ''), - ] + config_args = shlex.split(sysconfig.get_config_var('CONFIG_ARGS') or '') + cmd = [os.path.join(srcdir, 'configure'), *config_args] ensure_opt(cmd, 'cache-file', os.path.join(outdir, 'python-config.cache')) prefix = os.path.join(outdir, 'python-installation') ensure_opt(cmd, 'prefix', prefix) diff --git a/Tools/patchcheck/patchcheck.py b/Tools/patchcheck/patchcheck.py index fa3a43af6e6048..e3959ce428c7c5 100755 --- a/Tools/patchcheck/patchcheck.py +++ b/Tools/patchcheck/patchcheck.py @@ -11,6 +11,13 @@ import untabify +def get_python_source_dir(): + src_dir = sysconfig.get_config_var('abs_srcdir') + if not src_dir: + src_dir = sysconfig.get_config_var('srcdir') + return os.path.abspath(src_dir) + + # Excluded directories which are copies of external libraries: # don't check their coding style EXCLUDE_DIRS = [ @@ -18,7 +25,7 @@ os.path.join('Modules', 'expat'), os.path.join('Modules', 'zlib'), ] -SRCDIR = sysconfig.get_config_var('srcdir') +SRCDIR = get_python_source_dir() def n_files_str(count): diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index fc261c770d6944..f066fb52cfd496 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -46,9 +46,9 @@ ] OPENSSL_RECENT_VERSIONS = [ - "1.1.1v", - "3.0.10", - "3.1.2", + "1.1.1w", + "3.0.11", + "3.1.3", ] LIBRESSL_OLD_VERSIONS = [ diff --git a/configure b/configure index abae542b528a23..0e5f3f64c680b2 100755 --- a/configure +++ b/configure @@ -1065,6 +1065,7 @@ with_suffix enable_shared with_static_libpython enable_profiling +enable_gil with_pydebug with_trace_refs enable_pystats @@ -1105,7 +1106,6 @@ with_openssl_rpath with_ssl_default_suites with_builtin_hashlib_hashes enable_test_modules -enable_gil ' ac_precious_vars='build_alias host_alias @@ -1792,6 +1792,8 @@ Optional Features: no) --enable-profiling enable C-level code profiling with gprof (default is no) + --disable-gil enable experimental support for running without the + GIL (default is no) --enable-pystats enable internal statistics gathering (default is no) --enable-optimizations enable expensive, stable optimizations (PGO, etc.) (default is no) @@ -1806,8 +1808,6 @@ Optional Features: use big digits (30 or 15 bits) for Python longs (default is 30)] --disable-test-modules don't build nor install test modules - --disable-gil enable experimental support for running without the - GIL (default is no) Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] @@ -7845,6 +7845,36 @@ fi ABIFLAGS="" +# Check for --disable-gil +# --disable-gil +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --disable-gil" >&5 +printf %s "checking for --disable-gil... " >&6; } +# Check whether --enable-gil was given. +if test ${enable_gil+y} +then : + enableval=$enable_gil; if test "x$enable_gil" = xyes +then : + disable_gil=no +else $as_nop + disable_gil=yes +fi +else $as_nop + disable_gil=no + +fi + +{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $disable_gil" >&5 +printf "%s\n" "$disable_gil" >&6; } + +if test "$disable_gil" = "yes" +then + +printf "%s\n" "#define Py_NOGIL 1" >>confdefs.h + + # Add "t" for "threaded" + ABIFLAGS="${ABIFLAGS}t" +fi + # Check for --with-pydebug { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-pydebug" >&5 printf %s "checking for --with-pydebug... " >&6; } @@ -23546,6 +23576,7 @@ printf "%s\n" "#define AC_APPLE_UNIVERSAL_BUILD 1" >>confdefs.h # # * The Python implementation (always 'cpython-' for us) # * The major and minor version numbers +# * --disable-gil (adds a 't') # * --with-pydebug (adds a 'd') # # Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc, @@ -23750,6 +23781,7 @@ fi + # Check whether --with-readline was given. if test ${with_readline+y} then : @@ -23772,6 +23804,22 @@ else $as_nop fi +# gh-105323: Need to handle the macOS editline as an alias of readline. +case $ac_sys_system/$ac_sys_release in #( + Darwin/*) : + ac_fn_c_check_type "$LINENO" "Function" "ac_cv_type_Function" "#include +" +if test "x$ac_cv_type_Function" = xyes +then : + printf "%s\n" "#define WITH_APPLE_EDITLINE 1" >>confdefs.h + +fi + ;; #( + *) : + + ;; +esac + if test "x$with_readline" = xreadline then : @@ -27724,34 +27772,6 @@ fi printf "%s\n" "$TEST_MODULES" >&6; } -# Check for --disable-gil -# --disable-gil -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --disable-gil" >&5 -printf %s "checking for --disable-gil... " >&6; } -# Check whether --enable-gil was given. -if test ${enable_gil+y} -then : - enableval=$enable_gil; if test "x$enable_gil" = xyes -then : - disable_gil=no -else $as_nop - disable_gil=yes -fi -else $as_nop - disable_gil=no - -fi - -{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $disable_gil" >&5 -printf "%s\n" "$disable_gil" >&6; } - -if test "$disable_gil" = "yes" -then - -printf "%s\n" "#define Py_NOGIL 1" >>confdefs.h - -fi - # gh-109054: Check if -latomic is needed to get atomic functions. # On Linux aarch64, GCC may require programs and libraries to be linked # explicitly to libatomic. Call _Py_atomic_or_uint64() which may require diff --git a/configure.ac b/configure.ac index 205a98a992279c..493868130414ee 100644 --- a/configure.ac +++ b/configure.ac @@ -1489,6 +1489,23 @@ fi AC_SUBST([ABIFLAGS]) ABIFLAGS="" +# Check for --disable-gil +# --disable-gil +AC_MSG_CHECKING([for --disable-gil]) +AC_ARG_ENABLE([gil], + [AS_HELP_STRING([--disable-gil], [enable experimental support for running without the GIL (default is no)])], + [AS_VAR_IF([enable_gil], [yes], [disable_gil=no], [disable_gil=yes])], [disable_gil=no] +) +AC_MSG_RESULT([$disable_gil]) + +if test "$disable_gil" = "yes" +then + AC_DEFINE([Py_NOGIL], [1], + [Define if you want to disable the GIL]) + # Add "t" for "threaded" + ABIFLAGS="${ABIFLAGS}t" +fi + # Check for --with-pydebug AC_MSG_CHECKING([for --with-pydebug]) AC_ARG_WITH([pydebug], @@ -5669,6 +5686,7 @@ AC_C_BIGENDIAN # # * The Python implementation (always 'cpython-' for us) # * The major and minor version numbers +# * --disable-gil (adds a 't') # * --with-pydebug (adds a 'd') # # Thus for example, Python 3.2 built with wide unicode, pydebug, and pymalloc, @@ -5814,6 +5832,7 @@ dnl library (tinfo ncursesw ncurses termcap). We now assume that libreadline dnl or readline.pc provide correct linker information. AH_TEMPLATE([WITH_EDITLINE], [Define to build the readline module against libedit.]) +AH_TEMPLATE([WITH_APPLE_EDITLINE], [Define to build the readline module against Apple BSD editline.]) AC_ARG_WITH( [readline], @@ -5830,6 +5849,15 @@ AC_ARG_WITH( [with_readline=readline] ) +# gh-105323: Need to handle the macOS editline as an alias of readline. +AS_CASE([$ac_sys_system/$ac_sys_release], + [Darwin/*], [AC_CHECK_TYPE([Function], + [AC_DEFINE([WITH_APPLE_EDITLINE])], + [], + [@%:@include ])], + [] +) + AS_VAR_IF([with_readline], [readline], [ PKG_CHECK_MODULES([LIBREADLINE], [readline], [ LIBREADLINE=readline @@ -6947,21 +6975,6 @@ AC_ARG_ENABLE([test-modules], AC_MSG_RESULT([$TEST_MODULES]) AC_SUBST([TEST_MODULES]) -# Check for --disable-gil -# --disable-gil -AC_MSG_CHECKING([for --disable-gil]) -AC_ARG_ENABLE([gil], - [AS_HELP_STRING([--disable-gil], [enable experimental support for running without the GIL (default is no)])], - [AS_VAR_IF([enable_gil], [yes], [disable_gil=no], [disable_gil=yes])], [disable_gil=no] -) -AC_MSG_RESULT([$disable_gil]) - -if test "$disable_gil" = "yes" -then - AC_DEFINE([Py_NOGIL], [1], - [Define if you want to disable the GIL]) -fi - # gh-109054: Check if -latomic is needed to get atomic functions. # On Linux aarch64, GCC may require programs and libraries to be linked # explicitly to libatomic. Call _Py_atomic_or_uint64() which may require diff --git a/pyconfig.h.in b/pyconfig.h.in index 86c72cc6b4e62a..c2c75c96dcaad1 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -1788,6 +1788,9 @@ /* Define if WINDOW in curses.h offers a field _flags. */ #undef WINDOW_HAS_FLAGS +/* Define to build the readline module against Apple BSD editline. */ +#undef WITH_APPLE_EDITLINE + /* Define if you want build the _decimal module using a coroutine-local rather than a thread-local context */ #undef WITH_DECIMAL_CONTEXTVAR