diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9162f9c7bb1576b..669844854b2fe5d 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -16,6 +16,9 @@ configure* @erlend-aasland @corona10 Makefile.pre.in @erlend-aasland Modules/Setup* @erlend-aasland +# argparse +**/*argparse* @savannahostrowski + # asyncio **/*asyncio* @1st1 @asvetlov @kumaraditya303 @willingc @@ -23,7 +26,7 @@ Modules/Setup* @erlend-aasland **/*context* @1st1 **/*genobject* @markshannon **/*hamt* @1st1 -**/*jit* @brandtbucher +**/*jit* @brandtbucher @savannahostrowski Objects/set* @rhettinger Objects/dict* @methane @markshannon Objects/typevarobject.c @JelleZijlstra diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f63c4606220494f..c854c13e12f9226 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -46,7 +46,7 @@ jobs: # reproducible: to get the same tools versions (autoconf, aclocal, ...) runs-on: ubuntu-24.04 container: - image: ghcr.io/python/autoconf:2024.10.16.11360930377 + image: ghcr.io/python/autoconf:2024.11.11.11786316759 timeout-minutes: 60 needs: check_source if: needs.check_source.outputs.run_tests == 'true' @@ -76,7 +76,7 @@ jobs: # Check for changes in regenerated files if test -n "$changes"; then echo "Generated files not up to date." - echo "Perhaps you forgot to run make regen-all or build.bat --regen. ;)" + echo "Perhaps you forgot to run make regen-configure ;)" echo "configure files must be regenerated with a specific version of autoconf." echo "$changes" echo "" diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 9ff3e5265004a10..32bb451b08d4138 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -582,6 +582,39 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. .. versionadded:: 3.14 +.. c:function:: int PyLong_IsPositive(PyObject *obj) + + Check if the integer object *obj* is positive (``obj > 0``). + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's positive and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: next + + +.. c:function:: int PyLong_IsNegative(PyObject *obj) + + Check if the integer object *obj* is negative (``obj < 0``). + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's negative and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: next + + +.. c:function:: int PyLong_IsZero(PyObject *obj) + + Check if the integer object *obj* is zero. + + If *obj* is an instance of :c:type:`PyLongObject` or its subtype, + return ``1`` when it's zero and ``0`` otherwise. Else set an + exception and return ``-1``. + + .. versionadded:: next + + .. c:function:: PyObject* PyLong_GetInfo(void) On success, return a read only :term:`named tuple`, that holds diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst index 630114a4339110f..1e1cf6e6bfd7e9b 100644 --- a/Doc/c-api/object.rst +++ b/Doc/c-api/object.rst @@ -575,3 +575,27 @@ Object Protocol has the :c:macro:`Py_TPFLAGS_MANAGED_DICT` flag set. .. versionadded:: 3.13 + +.. c:function:: int PyUnstable_Object_EnableDeferredRefcount(PyObject *obj) + + Enable `deferred reference counting `_ on *obj*, + if supported by the runtime. In the :term:`free-threaded ` build, + this allows the interpreter to avoid reference count adjustments to *obj*, + which may improve multi-threaded performance. The tradeoff is + that *obj* will only be deallocated by the tracing garbage collector. + + This function returns ``1`` if deferred reference counting is enabled on *obj* + (including when it was enabled before the call), + and ``0`` if deferred reference counting is not supported or if the hint was + ignored by the runtime. This function is thread-safe, and cannot fail. + + This function does nothing on builds with the :term:`GIL` enabled, which do + not support deferred reference counting. This also does nothing if *obj* is not + an object tracked by the garbage collector (see :func:`gc.is_tracked` and + :c:func:`PyObject_GC_IsTracked`). + + This function is intended to be used soon after *obj* is created, + by the code that creates it. + + .. versionadded:: next + diff --git a/Doc/conf.py b/Doc/conf.py index 73d7d5db26ff7b9..738c9901eef06fd 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -67,10 +67,7 @@ # General substitutions. project = 'Python' -if sphinx.version_info[:2] >= (8, 1): - copyright = "2001-%Y, Python Software Foundation" -else: - copyright = f"2001-{time.strftime('%Y')}, Python Software Foundation" +copyright = "2001 Python Software Foundation" # We look for the Include/patchlevel.h file in the current Python source tree # and replace the values accordingly. diff --git a/Doc/copyright.rst b/Doc/copyright.rst index 8629ed1fc38009f..9210d5f50ed8415 100644 --- a/Doc/copyright.rst +++ b/Doc/copyright.rst @@ -4,7 +4,7 @@ Copyright Python and this documentation is: -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Doc/library/aifc.rst b/Doc/library/aifc.rst new file mode 100644 index 000000000000000..a756d679036ecbf --- /dev/null +++ b/Doc/library/aifc.rst @@ -0,0 +1,15 @@ +:mod:`!aifc` --- Read and write AIFF and AIFC files +=================================================== + +.. module:: aifc + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!aifc` module was +`Python 3.12 `_. diff --git a/Doc/library/asynchat.rst b/Doc/library/asynchat.rst new file mode 100644 index 000000000000000..5e5c3a99fe66f1b --- /dev/null +++ b/Doc/library/asynchat.rst @@ -0,0 +1,17 @@ +:mod:`!asynchat` --- Asynchronous socket command/response handler +================================================================= + +.. module:: asynchat + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +Applications should use the :mod:`asyncio` module instead. + +The last version of Python that provided the :mod:`!asynchat` module was +`Python 3.11 `_. diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 3ace6eda4d7f291..9f1aec148f8750b 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -1797,7 +1797,7 @@ By default asyncio is configured to use :class:`EventLoop`. .. seealso:: `MSDN documentation on I/O Completion Ports - `_. + `_. .. class:: EventLoop diff --git a/Doc/library/asyncore.rst b/Doc/library/asyncore.rst new file mode 100644 index 000000000000000..22c9881c3cca362 --- /dev/null +++ b/Doc/library/asyncore.rst @@ -0,0 +1,17 @@ +:mod:`!asyncore` --- Asynchronous socket handler +================================================ + +.. module:: asyncore + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +Applications should use the :mod:`asyncio` module instead. + +The last version of Python that provided the :mod:`!asyncore` module was +`Python 3.11 `_. diff --git a/Doc/library/audioop.rst b/Doc/library/audioop.rst new file mode 100644 index 000000000000000..3bc580b0bd34332 --- /dev/null +++ b/Doc/library/audioop.rst @@ -0,0 +1,15 @@ +:mod:`!audioop` --- Manipulate raw audio data +============================================= + +.. module:: audioop + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!audioop` module was +`Python 3.12 `_. diff --git a/Doc/library/cgi.rst b/Doc/library/cgi.rst new file mode 100644 index 000000000000000..f9108fa954a906f --- /dev/null +++ b/Doc/library/cgi.rst @@ -0,0 +1,19 @@ +:mod:`!cgi` --- Common Gateway Interface support +================================================ + +.. module:: cgi + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A fork of the module on PyPI can be used instead: :pypi:`legacy-cgi`. +This is a copy of the cgi module, no longer maintained or supported by the core +Python team. + +The last version of Python that provided the :mod:`!cgi` module was +`Python 3.12 `_. diff --git a/Doc/library/cgitb.rst b/Doc/library/cgitb.rst new file mode 100644 index 000000000000000..fc646aa4c48acd0 --- /dev/null +++ b/Doc/library/cgitb.rst @@ -0,0 +1,19 @@ +:mod:`!cgitb` --- Traceback manager for CGI scripts +=================================================== + +.. module:: cgitb + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A fork of the module on PyPI can now be used instead: :pypi:`legacy-cgi`. +This is a copy of the cgi module, no longer maintained or supported by the core +Python team. + +The last version of Python that provided the :mod:`!cgitb` module was +`Python 3.12 `_. diff --git a/Doc/library/chunk.rst b/Doc/library/chunk.rst new file mode 100644 index 000000000000000..9950a0ea70649a8 --- /dev/null +++ b/Doc/library/chunk.rst @@ -0,0 +1,15 @@ +:mod:`!chunk` --- Read IFF chunked data +======================================= + +.. module:: chunk + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!chunk` module was +`Python 3.12 `_. diff --git a/Doc/library/crypt.rst b/Doc/library/crypt.rst new file mode 100644 index 000000000000000..9ff37196ccf69ff --- /dev/null +++ b/Doc/library/crypt.rst @@ -0,0 +1,20 @@ +:mod:`!crypt` --- Function to check Unix passwords +================================================== + +.. module:: crypt + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Applications can use the :mod:`hashlib` module from the standard library. +Other possible replacements are third-party libraries from PyPI: +:pypi:`legacycrypt`, :pypi:`bcrypt`, :pypi:`argon2-cffi`, or :pypi:`passlib`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!crypt` module was +`Python 3.12 `_. diff --git a/Doc/library/distutils.rst b/Doc/library/distutils.rst new file mode 100644 index 000000000000000..af63e035bf3c4ac --- /dev/null +++ b/Doc/library/distutils.rst @@ -0,0 +1,17 @@ +:mod:`!distutils` --- Building and installing Python modules +============================================================ + +.. module:: distutils + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.10 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.10. The removal was decided in :pep:`632`, +which has `migration advice +`_. + +The last version of Python that provided the :mod:`!distutils` module was +`Python 3.11 `_. diff --git a/Doc/library/functools.rst b/Doc/library/functools.rst index e26a2226aa947a7..a9aceee41700048 100644 --- a/Doc/library/functools.rst +++ b/Doc/library/functools.rst @@ -453,7 +453,7 @@ The :mod:`functools` module defines the following functions: .. versionadded:: 3.4 -.. function:: reduce(function, iterable[, initial], /) +.. function:: reduce(function, iterable, /[, initial]) Apply *function* of two arguments cumulatively to the items of *iterable*, from left to right, so as to reduce the iterable to a single value. For example, @@ -468,7 +468,7 @@ The :mod:`functools` module defines the following functions: initial_missing = object() - def reduce(function, iterable, initial=initial_missing, /): + def reduce(function, iterable, /, initial=initial_missing): it = iter(iterable) if initial is initial_missing: value = next(it) @@ -481,6 +481,9 @@ The :mod:`functools` module defines the following functions: See :func:`itertools.accumulate` for an iterator that yields all intermediate values. + .. versionchanged:: next + *initial* is now supported as a keyword argument. + .. decorator:: singledispatch Transform a function into a :term:`single-dispatch >> args ['a1', 'a2'] +Optional arguments should be specified explicitly: + +.. doctest:: + + >>> s = '-Con -C --color=off --color a1 a2' + >>> args = s.split() + >>> args + ['-Con', '-C', '--color=off', '--color', 'a1', 'a2'] + >>> optlist, args = getopt.getopt(args, 'C::', ['color=?']) + >>> optlist + [('-C', 'on'), ('-C', ''), ('--color', 'off'), ('--color', '')] + >>> args + ['a1', 'a2'] + +The order of options and non-option arguments can be preserved: + +.. doctest:: + + >>> s = 'a1 -x a2 a3 a4 --long a5 a6' + >>> args = s.split() + >>> args + ['a1', '-x', 'a2', 'a3', 'a4', '--long', 'a5', 'a6'] + >>> optlist, args = getopt.gnu_getopt(args, '-x:', ['long=']) + >>> optlist + [(None, ['a1']), ('-x', 'a2'), (None, ['a3', 'a4']), ('--long', 'a5')] + >>> args + ['a6'] + In a script, typical usage is something like this: .. testcode:: diff --git a/Doc/library/imghdr.rst b/Doc/library/imghdr.rst new file mode 100644 index 000000000000000..56f26355f42558c --- /dev/null +++ b/Doc/library/imghdr.rst @@ -0,0 +1,19 @@ +:mod:`!imghdr` --- Determine the type of an image +================================================= + +.. module:: imghdr + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party libraries from PyPI: +:pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!imghdr` module was +`Python 3.12 `_. diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst new file mode 100644 index 000000000000000..3dc4c568b1ae2f1 --- /dev/null +++ b/Doc/library/imp.rst @@ -0,0 +1,18 @@ +:mod:`!imp` --- Access the import internals +=========================================== + +.. module:: imp + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.4 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.4. + +The :ref:`removal notice ` includes guidance for +migrating code from :mod:`!imp` to :mod:`importlib`. + +The last version of Python that provided the :mod:`!imp` module was +`Python 3.11 `_. diff --git a/Doc/library/index.rst b/Doc/library/index.rst index 0b348ae6f5c8c0a..951fbcf13fbb139 100644 --- a/Doc/library/index.rst +++ b/Doc/library/index.rst @@ -75,4 +75,5 @@ the `Python Package Index `_. unix.rst cmdline.rst superseded.rst + removed.rst security_warnings.rst diff --git a/Doc/library/mailcap.rst b/Doc/library/mailcap.rst new file mode 100644 index 000000000000000..4467da146a5a05a --- /dev/null +++ b/Doc/library/mailcap.rst @@ -0,0 +1,15 @@ +:mod:`!mailcap` --- Mailcap file handling +========================================= + +.. module:: mailcap + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!mailcap` module was +`Python 3.12 `_. diff --git a/Doc/library/msilib.rst b/Doc/library/msilib.rst new file mode 100644 index 000000000000000..eb1ac551ded456b --- /dev/null +++ b/Doc/library/msilib.rst @@ -0,0 +1,15 @@ +:mod:`!msilib` --- Read and write Microsoft Installer files +=========================================================== + +.. module:: msilib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!msilib` module was +`Python 3.12 `_. diff --git a/Doc/library/nis.rst b/Doc/library/nis.rst new file mode 100644 index 000000000000000..dcc36dd43fc313a --- /dev/null +++ b/Doc/library/nis.rst @@ -0,0 +1,15 @@ +:mod:`!nis` --- Interface to Sun’s NIS (Yellow Pages) +===================================================== + +.. module:: nis + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!nis` module was +`Python 3.12 `_. diff --git a/Doc/library/nntplib.rst b/Doc/library/nntplib.rst new file mode 100644 index 000000000000000..8053fe8cb8b9e12 --- /dev/null +++ b/Doc/library/nntplib.rst @@ -0,0 +1,15 @@ +:mod:`!nntplib` --- NNTP protocol client +======================================== + +.. module:: nntplib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!nntplib` module was +`Python 3.12 `_. diff --git a/Doc/library/ossaudiodev.rst b/Doc/library/ossaudiodev.rst new file mode 100644 index 000000000000000..320adbeff825399 --- /dev/null +++ b/Doc/library/ossaudiodev.rst @@ -0,0 +1,15 @@ +:mod:`!ossaudiodev` --- Access to OSS-compatible audio devices +============================================================== + +.. module:: ossaudiodev + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!ossaudiodev` module was +`Python 3.12 `_. diff --git a/Doc/library/pipes.rst b/Doc/library/pipes.rst new file mode 100644 index 000000000000000..d9bcc3a5d99c9b9 --- /dev/null +++ b/Doc/library/pipes.rst @@ -0,0 +1,17 @@ +:mod:`!pipes` --- Interface to shell pipelines +============================================== + +.. module:: pipes + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Applications should use the :mod:`subprocess` module instead. + +The last version of Python that provided the :mod:`!pipes` module was +`Python 3.12 `_. diff --git a/Doc/library/pprint.rst b/Doc/library/pprint.rst index 1b3498e51f766dd..2985f31bacb47a5 100644 --- a/Doc/library/pprint.rst +++ b/Doc/library/pprint.rst @@ -267,7 +267,7 @@ let's fetch information about a project from `PyPI `_:: >>> import json >>> import pprint >>> from urllib.request import urlopen - >>> with urlopen('https://pypi.org/pypi/sampleproject/json') as resp: + >>> with urlopen('https://pypi.org/pypi/sampleproject/1.2.0/json') as resp: ... project_info = json.load(resp)['info'] In its basic form, :func:`~pprint.pp` shows the whole object:: diff --git a/Doc/library/removed.rst b/Doc/library/removed.rst new file mode 100644 index 000000000000000..4d75842eca1a03f --- /dev/null +++ b/Doc/library/removed.rst @@ -0,0 +1,39 @@ +:tocdepth: 1 + +.. _removed: + +*************** +Removed Modules +*************** + +The modules described in this chapter have been removed from the Python +standard library. They are documented here to help people find replacements. + + +.. toctree:: + :maxdepth: 1 + + aifc.rst + asynchat.rst + asyncore.rst + audioop.rst + cgi.rst + cgitb.rst + chunk.rst + crypt.rst + distutils.rst + imghdr.rst + imp.rst + mailcap.rst + msilib.rst + nis.rst + nntplib.rst + ossaudiodev.rst + pipes.rst + smtpd.rst + sndhdr.rst + spwd.rst + sunau.rst + telnetlib.rst + uu.rst + xdrlib.rst diff --git a/Doc/library/smtpd.rst b/Doc/library/smtpd.rst new file mode 100644 index 000000000000000..c704f4a241b4692 --- /dev/null +++ b/Doc/library/smtpd.rst @@ -0,0 +1,18 @@ +:mod:`!smtpd` --- SMTP Server +============================= + +.. module:: smtpd + :synopsis: Removed in 3.12. + :deprecated: + +.. deprecated-removed:: 3.6 3.12 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.12 ` after +being deprecated in Python 3.6. The removal was decided in :pep:`594`. + +A possible replacement is the third-party :pypi:`aiosmtpd` library. This +library is not maintained or supported by the Python core team. + +The last version of Python that provided the :mod:`!smtpd` module was +`Python 3.11 `_. diff --git a/Doc/library/sndhdr.rst b/Doc/library/sndhdr.rst new file mode 100644 index 000000000000000..6b71db4f6338a8f --- /dev/null +++ b/Doc/library/sndhdr.rst @@ -0,0 +1,19 @@ +:mod:`!sndhdr` --- Determine type of sound file +=============================================== + +.. module:: sndhdr + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party modules from PyPI: +:pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic`. +These are not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!sndhdr` module was +`Python 3.12 `_. diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst index 0c7b9328648f668..6358d140484c780 100644 --- a/Doc/library/socket.rst +++ b/Doc/library/socket.rst @@ -928,7 +928,9 @@ The :mod:`socket` module also offers various network-related services: .. versionadded:: 3.7 -.. function:: getaddrinfo(host, port, family=0, type=0, proto=0, flags=0) +.. function:: getaddrinfo(host, port, family=AF_UNSPEC, type=0, proto=0, flags=0) + + This function wraps the C function ``getaddrinfo`` of the underlying system. Translate the *host*/*port* argument into a sequence of 5-tuples that contain all the necessary arguments for creating a socket connected to that service. @@ -938,8 +940,10 @@ The :mod:`socket` module also offers various network-related services: and *port*, you can pass ``NULL`` to the underlying C API. The *family*, *type* and *proto* arguments can be optionally specified - in order to narrow the list of addresses returned. Passing zero as a - value for each of these arguments selects the full range of results. + in order to provide options and limit the list of addresses returned. + Pass their default values (:data:`AF_UNSPEC`, 0, and 0, respectively) + to not limit the results. See the note below for details. + The *flags* argument can be one or several of the ``AI_*`` constants, and will influence how results are computed and returned. For example, :const:`AI_NUMERICHOST` will disable domain name resolution @@ -959,6 +963,29 @@ The :mod:`socket` module also offers various network-related services: :const:`AF_INET6`), and is meant to be passed to the :meth:`socket.connect` method. + .. note:: + + If you intend to use results from :func:`!getaddrinfo` to create a socket + (rather than, for example, retrieve *canonname*), + consider limiting the results by *type* (e.g. :data:`SOCK_STREAM` or + :data:`SOCK_DGRAM`) and/or *proto* (e.g. :data:`IPPROTO_TCP` or + :data:`IPPROTO_UDP`) that your application can handle. + + The behavior with default values of *family*, *type*, *proto* + and *flags* is system-specific. + + Many systems (for example, most Linux configurations) will return a sorted + list of all matching addresses. + These addresses should generally be tried in order until a connection succeeds + (possibly tried in parallel, for example, using a `Happy Eyeballs`_ algorithm). + In these cases, limiting the *type* and/or *proto* can help eliminate + unsuccessful or unusable connecton attempts. + + Some systems will, however, only return a single address. + (For example, this was reported on Solaris and AIX configurations.) + On these systems, limiting the *type* and/or *proto* helps ensure that + this address is usable. + .. audit-event:: socket.getaddrinfo host,port,family,type,protocol socket.getaddrinfo The following example fetches address information for a hypothetical TCP @@ -978,6 +1005,8 @@ The :mod:`socket` module also offers various network-related services: for IPv6 multicast addresses, string representing an address will not contain ``%scope_id`` part. +.. _Happy Eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs + .. function:: getfqdn([name]) Return a fully qualified domain name for *name*. If *name* is omitted or empty, diff --git a/Doc/library/spwd.rst b/Doc/library/spwd.rst new file mode 100644 index 000000000000000..c16854bb380e521 --- /dev/null +++ b/Doc/library/spwd.rst @@ -0,0 +1,18 @@ +:mod:`!spwd` --- The shadow password database +============================================= + +.. module:: spwd + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +A possible replacement is the third-party library :pypi:`python-pam`. +This library is not supported or maintained by the Python core team. + +The last version of Python that provided the :mod:`!spwd` module was +`Python 3.12 `_. diff --git a/Doc/library/sunau.rst b/Doc/library/sunau.rst new file mode 100644 index 000000000000000..feb7768f8bdd68c --- /dev/null +++ b/Doc/library/sunau.rst @@ -0,0 +1,15 @@ +:mod:`!sunau` --- Read and write Sun AU files +============================================= + +.. module:: sunau + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!sunau` module was +`Python 3.12 `_. diff --git a/Doc/library/telnetlib.rst b/Doc/library/telnetlib.rst new file mode 100644 index 000000000000000..6971ad33ff9751c --- /dev/null +++ b/Doc/library/telnetlib.rst @@ -0,0 +1,19 @@ +:mod:`!telnetlib` --- Telnet client +=================================== + +.. module:: telnetlib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +Possible replacements are third-party libraries from PyPI: :pypi:`telnetlib3` +or :pypi:`Exscript`. These are not supported or maintained by the Python core +team. + +The last version of Python that provided the :mod:`!telnetlib` module was +`Python 3.12 `_. diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 9cd5db768e9853c..6265c2214eaa0dd 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -390,7 +390,7 @@ Functions threads ready to run, the function returns immediately, and the thread continues execution. On Windows 8.1 and newer the implementation uses a `high-resolution timer - `_ + `_ which provides resolution of 100 nanoseconds. If *secs* is zero, ``Sleep(0)`` is used. Unix implementation: diff --git a/Doc/library/tomllib.rst b/Doc/library/tomllib.rst index 521a7a17fb3e8bc..4b88b2e29e78220 100644 --- a/Doc/library/tomllib.rst +++ b/Doc/library/tomllib.rst @@ -60,9 +60,36 @@ This module defines the following functions: The following exceptions are available: -.. exception:: TOMLDecodeError +.. exception:: TOMLDecodeError(msg, doc, pos) - Subclass of :exc:`ValueError`. + Subclass of :exc:`ValueError` with the following additional attributes: + + .. attribute:: msg + + The unformatted error message. + + .. attribute:: doc + + The TOML document being parsed. + + .. attribute:: pos + + The index of *doc* where parsing failed. + + .. attribute:: lineno + + The line corresponding to *pos*. + + .. attribute:: colno + + The column corresponding to *pos*. + + .. versionchanged:: next + Added the *msg*, *doc* and *pos* parameters. + Added the :attr:`msg`, :attr:`doc`, :attr:`pos`, :attr:`lineno` and :attr:`colno` attributes. + + .. deprecated:: next + Passing free-form positional arguments is deprecated. Examples diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst index fb5353e1895bf9d..0501dc8733b2cdf 100644 --- a/Doc/library/urllib.parse.rst +++ b/Doc/library/urllib.parse.rst @@ -239,6 +239,10 @@ or on combining URL components into a URL string. query parameter separator. This has been changed to allow only a single separator key, with ``&`` as the default separator. + .. deprecated:: 3.14 + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings and byte-like objects and ``None`` is now deprecated. + .. function:: parse_qsl(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&') @@ -745,6 +749,10 @@ task isn't already covered by the URL parsing functions above. .. versionchanged:: 3.5 Added the *quote_via* parameter. + .. deprecated:: 3.14 + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings and byte-like objects and ``None`` is now deprecated. + .. seealso:: diff --git a/Doc/library/uu.rst b/Doc/library/uu.rst new file mode 100644 index 000000000000000..0636d180294d474 --- /dev/null +++ b/Doc/library/uu.rst @@ -0,0 +1,15 @@ +:mod:`!uu` --- Encode and decode uuencode files +=============================================== + +.. module:: uu + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!uu` module was +`Python 3.12 `_. diff --git a/Doc/library/uuid.rst b/Doc/library/uuid.rst index 0f2d7820cb25c82..6166c22caedf812 100644 --- a/Doc/library/uuid.rst +++ b/Doc/library/uuid.rst @@ -1,8 +1,8 @@ -:mod:`!uuid` --- UUID objects according to :rfc:`4122` +:mod:`!uuid` --- UUID objects according to :rfc:`9562` ====================================================== .. module:: uuid - :synopsis: UUID objects (universally unique identifiers) according to RFC 4122 + :synopsis: UUID objects (universally unique identifiers) according to RFC 9562 .. moduleauthor:: Ka-Ping Yee .. sectionauthor:: George Yoshida @@ -12,7 +12,8 @@ This module provides immutable :class:`UUID` objects (the :class:`UUID` class) and the functions :func:`uuid1`, :func:`uuid3`, :func:`uuid4`, :func:`uuid5` for -generating version 1, 3, 4, and 5 UUIDs as specified in :rfc:`4122`. +generating version 1, 3, 4, 5, and 8 UUIDs as specified in :rfc:`9562` (which +supersedes :rfc:`4122`). If all you want is a unique ID, you should probably call :func:`uuid1` or :func:`uuid4`. Note that :func:`uuid1` may compromise privacy since it creates @@ -65,7 +66,7 @@ which relays any information about the UUID's safety, using this enumeration: Exactly one of *hex*, *bytes*, *bytes_le*, *fields*, or *int* must be given. The *version* argument is optional; if given, the resulting UUID will have its - variant and version number set according to :rfc:`4122`, overriding bits in the + variant and version number set according to :rfc:`9562`, overriding bits in the given *hex*, *bytes*, *bytes_le*, *fields*, or *int*. Comparison of UUID objects are made by way of comparing their @@ -137,7 +138,7 @@ which relays any information about the UUID's safety, using this enumeration: .. attribute:: UUID.urn - The UUID as a URN as specified in :rfc:`4122`. + The UUID as a URN as specified in :rfc:`9562`. .. attribute:: UUID.variant @@ -149,9 +150,13 @@ which relays any information about the UUID's safety, using this enumeration: .. attribute:: UUID.version - The UUID version number (1 through 5, meaningful only when the variant is + The UUID version number (1 through 8, meaningful only when the variant is :const:`RFC_4122`). + .. versionchanged:: next + Added UUID version 8. + + .. attribute:: UUID.is_safe An enumeration of :class:`SafeUUID` which indicates whether the platform @@ -216,6 +221,23 @@ The :mod:`uuid` module defines the following functions: .. index:: single: uuid5 + +.. function:: uuid8(a=None, b=None, c=None) + + Generate a pseudo-random UUID according to + :rfc:`RFC 9562, §5.8 <9562#section-5.8>`. + + When specified, the parameters *a*, *b* and *c* are expected to be + positive integers of 48, 12 and 62 bits respectively. If they exceed + their expected bit count, only their least significant bits are kept; + non-specified arguments are substituted for a pseudo-random integer of + appropriate size. + + .. versionadded:: next + +.. index:: single: uuid8 + + The :mod:`uuid` module defines the following namespace identifiers for use with :func:`uuid3` or :func:`uuid5`. @@ -252,7 +274,9 @@ of the :attr:`~UUID.variant` attribute: .. data:: RFC_4122 - Specifies the UUID layout given in :rfc:`4122`. + Specifies the UUID layout given in :rfc:`4122`. This constant is kept + for backward compatibility even though :rfc:`4122` has been superseded + by :rfc:`9562`. .. data:: RESERVED_MICROSOFT @@ -267,7 +291,7 @@ of the :attr:`~UUID.variant` attribute: .. seealso:: - :rfc:`4122` - A Universally Unique IDentifier (UUID) URN Namespace + :rfc:`9562` - A Universally Unique IDentifier (UUID) URN Namespace This specification defines a Uniform Resource Name namespace for UUIDs, the internal format of UUIDs, and methods of generating UUIDs. @@ -283,7 +307,7 @@ The :mod:`uuid` module can be executed as a script from the command line. .. code-block:: sh - python -m uuid [-h] [-u {uuid1,uuid3,uuid4,uuid5}] [-n NAMESPACE] [-N NAME] + python -m uuid [-h] [-u {uuid1,uuid3,uuid4,uuid5,uuid8}] [-n NAMESPACE] [-N NAME] The following options are accepted: diff --git a/Doc/library/xdrlib.rst b/Doc/library/xdrlib.rst new file mode 100644 index 000000000000000..59b801c8e4072e8 --- /dev/null +++ b/Doc/library/xdrlib.rst @@ -0,0 +1,15 @@ +:mod:`!xdrlib` --- Encode and decode XDR data +============================================= + +.. module:: xdrlib + :synopsis: Removed in 3.13. + :deprecated: + +.. deprecated-removed:: 3.11 3.13 + +This module is no longer part of the Python standard library. +It was :ref:`removed in Python 3.13 ` after +being deprecated in Python 3.11. The removal was decided in :pep:`594`. + +The last version of Python that provided the :mod:`!xdrlib` module was +`Python 3.12 `_. diff --git a/Doc/license.rst b/Doc/license.rst index 674ac5f56e6f971..428dc22b817ebe6 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -100,7 +100,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release| analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python |release| alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2024 Python Software Foundation; All Rights + copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights Reserved" are retained in Python |release| alone or in any derivative version prepared by Licensee. diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 1b1e9f479cbe086..e73ce44270b0821 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -534,18 +534,15 @@ is semantically equivalent to:: enter = type(manager).__enter__ exit = type(manager).__exit__ value = enter(manager) - hit_except = False try: TARGET = value SUITE except: - hit_except = True if not exit(manager, *sys.exc_info()): raise - finally: - if not hit_except: - exit(manager, None, None, None) + else: + exit(manager, None, None, None) With more than one item, the context managers are processed as if multiple :keyword:`with` statements were nested:: diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 63b8dd73127984a..7c95b207b1aed22 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -1155,7 +1155,8 @@ a user-defined function: first thing the code block will do is bind the formal parameters to the arguments; this is described in section :ref:`function`. When the code block executes a :keyword:`return` statement, this specifies the return value of the - function call. + function call. If execution reaches the end of the code block without + executing a :keyword:`return` statement, the return value is ``None``. a built-in function or method: .. index:: diff --git a/Doc/reference/simple_stmts.rst b/Doc/reference/simple_stmts.rst index 24df4a6ba7b6784..a005395bfc402eb 100644 --- a/Doc/reference/simple_stmts.rst +++ b/Doc/reference/simple_stmts.rst @@ -966,25 +966,14 @@ The :keyword:`!global` statement .. productionlist:: python-grammar global_stmt: "global" `identifier` ("," `identifier`)* -The :keyword:`global` statement is a declaration which holds for the entire -current code block. It means that the listed identifiers are to be interpreted -as globals. It would be impossible to assign to a global variable without +The :keyword:`global` causes the listed identifiers to be interpreted +as globals. It would be impossible to assign to a global variable without :keyword:`!global`, although free variables may refer to globals without being declared global. -Names listed in a :keyword:`global` statement must not be used in the same code -block textually preceding that :keyword:`!global` statement. - -Names listed in a :keyword:`global` statement must not be defined as formal -parameters, or as targets in :keyword:`with` statements or :keyword:`except` clauses, or in a :keyword:`for` target list, :keyword:`class` -definition, function definition, :keyword:`import` statement, or -:term:`variable annotations `. - -.. impl-detail:: - - The current implementation does not enforce some of these restrictions, but - programs should not abuse this freedom, as future implementations may enforce - them or silently change the meaning of the program. +The global statement applies to the entire scope of a function or +class body. A :exc:`SyntaxError` is raised if a variable is used or +assigned to prior to its global declaration in the scope. .. index:: pair: built-in function; exec diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index daaf8822af1161d..1a6322d72341ff8 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -435,7 +435,7 @@ When writing to the Windows Registry, the following behaviors exist: For more detail on the technical basis for these limitations, please consult Microsoft's documentation on packaged full-trust apps, currently available at `docs.microsoft.com/en-us/windows/msix/desktop/desktop-to-uwp-behind-the-scenes -`_ +`_ .. _windows-nuget: @@ -536,7 +536,7 @@ dependents, such as Idle), pip and the Python documentation are not included. .. note:: The embedded distribution does not include the `Microsoft C Runtime - `_ and it is + `_ and it is the responsibility of the application installer to provide this. The runtime may have already been installed on a user's system previously or automatically via Windows Update, and can be detected by finding @@ -679,13 +679,13 @@ System variables, you need non-restricted access to your machine .. seealso:: - https://docs.microsoft.com/en-us/windows/win32/procthread/environment-variables + https://learn.microsoft.com/windows/win32/procthread/environment-variables Overview of environment variables on Windows - https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/set_1 + https://learn.microsoft.com/windows-server/administration/windows-commands/set_1 The ``set`` command, for temporarily modifying environment variables - https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/setx + https://learn.microsoft.com/windows-server/administration/windows-commands/setx The ``setx`` command, for permanently modifying environment variables @@ -1291,13 +1291,13 @@ is a collection of modules for advanced Windows-specific support. This includes utilities for: * `Component Object Model - `_ + `_ (COM) * Win32 API calls * Registry * Event log * `Microsoft Foundation Classes - `_ + `_ (MFC) user interfaces `PythonWin ` + for *list* and *dict* types gain previously overlooked missing methods: + + * :meth:`!clear` and :meth:`!copy` for proxies of :class:`list`. + * :meth:`~dict.fromkeys`, ``reversed(d)``, ``d | {}``, ``{} | d``, + ``d |= {'b': 2}`` for proxies of :class:`dict`. + + (Contributed by Roy Hyunjin Han for :gh:`103134`) + + operator -------- @@ -458,6 +520,14 @@ unittest (Contributed by Jacob Walls in :gh:`80958`.) +uuid +---- + +* Add support for UUID version 8 via :func:`uuid.uuid8` as specified + in :rfc:`9562`. + (Contributed by Bénédikt Tran in :gh:`89083`.) + + .. Add improved modules above alphabetically, not here at the end. Optimizations @@ -506,14 +576,6 @@ Deprecated as a single positional argument. (Contributed by Serhiy Storchaka in :gh:`109218`.) -* :mod:`multiprocessing` and :mod:`concurrent.futures`: - The default start method (see :ref:`multiprocessing-start-methods`) changed - away from *fork* to *forkserver* on platforms where it was not already - *spawn* (Windows & macOS). If you require the threading incompatible *fork* - start method you must explicitly specify it when using :mod:`multiprocessing` - or :mod:`concurrent.futures` APIs. - (Contributed by Gregory P. Smith in :gh:`84559`.) - * :mod:`os`: :term:`Soft deprecate ` :func:`os.popen` and :func:`os.spawn* ` functions. They should no longer be used to @@ -524,6 +586,13 @@ Deprecated Deprecate :meth:`symtable.Class.get_methods` due to the lack of interest. (Contributed by Bénédikt Tran in :gh:`119698`.) +* :mod:`urllib.parse`: + Accepting objects with false values (like ``0`` and ``[]``) except empty + strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions + :func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now + deprecated. + (Contributed by Serhiy Storchaka in :gh:`116897`.) + .. Add deprecations above alphabetically, not here at the end. .. include:: ../deprecations/pending-removal-in-3.15.rst @@ -756,6 +825,11 @@ New features an interned string and deallocate it during module shutdown. (Contributed by Eddie Elizondo in :gh:`113601`.) +* Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` + and :c:func:`PyLong_IsZero` for checking if :c:type:`PyLongObject` + is positive, negative, or zero, respectively. + (Contribued by James Roy and Sergey B Kirpichev in :gh:`126061`.) + * Add new functions to convert C ```` numbers from/to Python :class:`int`: @@ -819,6 +893,9 @@ New features * Add :c:func:`PyType_Freeze` function to make a type immutable. (Contributed by Victor Stinner in :gh:`121654`.) +* Add :c:func:`PyUnstable_Object_EnableDeferredRefcount` for enabling + deferred reference counting, as outlined in :pep:`703`. + Porting to Python 3.14 ---------------------- diff --git a/Doc/whatsnew/3.8.rst b/Doc/whatsnew/3.8.rst index fc9f49e65af8473..bdc4ca5cab52450 100644 --- a/Doc/whatsnew/3.8.rst +++ b/Doc/whatsnew/3.8.rst @@ -936,7 +936,7 @@ Add option ``--json-lines`` to parse every input line as a separate JSON object. logging ------- -Added a *force* keyword argument to :func:`logging.basicConfig` +Added a *force* keyword argument to :func:`logging.basicConfig`. When set to true, any existing handlers attached to the root logger are removed and closed before carrying out the configuration specified by the other arguments. diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index c1214d5e3714ead..4d6e618f831ad8a 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -61,6 +61,24 @@ PyAPI_FUNC(PyObject*) PyLong_FromUnsignedNativeBytes(const void* buffer, PyAPI_FUNC(int) PyUnstable_Long_IsCompact(const PyLongObject* op); PyAPI_FUNC(Py_ssize_t) PyUnstable_Long_CompactValue(const PyLongObject* op); +/* PyLong_IsPositive. Check if the integer object is positive. + + - On success, return 1 if *obj is positive, and 0 otherwise. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsPositive(PyObject *obj); + +/* PyLong_IsNegative. Check if the integer object is negative. + + - On success, return 1 if *obj is negative, and 0 otherwise. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsNegative(PyObject *obj); + +/* PyLong_IsZero. Check if the integer object is zero. + + - On success, return 1 if *obj is zero, and 0 if it is non-zero. + - On failure, set an exception, and return -1. */ +PyAPI_FUNC(int) PyLong_IsZero(PyObject *obj); + /* PyLong_GetSign. Get the sign of an integer object: 0, -1 or +1 for zero, negative or positive integer, respectively. diff --git a/Include/cpython/object.h b/Include/cpython/object.h index f0f61796cd3ec80..e4797029da431e5 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -527,3 +527,10 @@ typedef enum { typedef int (*PyRefTracer)(PyObject *, PyRefTracerEvent event, void *); PyAPI_FUNC(int) PyRefTracer_SetTracer(PyRefTracer tracer, void *data); PyAPI_FUNC(PyRefTracer) PyRefTracer_GetTracer(void**); + +/* Enable PEP-703 deferred reference counting on the object. + * + * Returns 1 if deferred reference counting was successfully enabled, and + * 0 if the runtime ignored it. This function cannot fail. + */ +PyAPI_FUNC(int) PyUnstable_Object_EnableDeferredRefcount(PyObject *); diff --git a/Include/internal/pycore_crossinterp.h b/Include/internal/pycore_crossinterp.h index e91e911feb38cca..69a60d73e05c269 100644 --- a/Include/internal/pycore_crossinterp.h +++ b/Include/internal/pycore_crossinterp.h @@ -11,6 +11,7 @@ extern "C" { #include "pycore_lock.h" // PyMutex #include "pycore_pyerrors.h" + /**************/ /* exceptions */ /**************/ @@ -38,14 +39,14 @@ extern int _Py_CallInInterpreterAndRawFree( /* cross-interpreter data */ /**************************/ -typedef struct _xid _PyXIData_t; -typedef PyObject *(*xid_newobjectfunc)(_PyXIData_t *); +typedef struct _xidata _PyXIData_t; +typedef PyObject *(*xid_newobjfunc)(_PyXIData_t *); typedef void (*xid_freefunc)(void *); // _PyXIData_t is similar to Py_buffer as an effectively // opaque struct that holds data outside the object machinery. This // is necessary to pass safely between interpreters in the same process. -struct _xid { +struct _xidata { // data is the cross-interpreter-safe derivation of a Python object // (see _PyObject_GetXIData). It will be NULL if the // new_object func (below) encodes the data. @@ -71,7 +72,7 @@ struct _xid { // interpreter given the data. The resulting object (a new // reference) will be equivalent to the original object. This field // is required. - xid_newobjectfunc new_object; + xid_newobjfunc new_object; // free is called when the data is released. If it is NULL then // nothing will be done to free the data. For some types this is // okay (e.g. bytes) and for those types this field should be set @@ -99,9 +100,26 @@ typedef int (*xidatafunc)(PyThreadState *tstate, PyObject *, _PyXIData_t *); typedef struct _xid_lookup_state _PyXIData_lookup_t; -PyAPI_FUNC(xidatafunc) _PyXIData_Lookup(PyObject *); -PyAPI_FUNC(int) _PyObject_CheckXIData(PyObject *); -PyAPI_FUNC(int) _PyObject_GetXIData(PyObject *, _PyXIData_t *); +typedef struct { + _PyXIData_lookup_t *global; + _PyXIData_lookup_t *local; + PyObject *PyExc_NotShareableError; +} _PyXIData_lookup_context_t; + +PyAPI_FUNC(int) _PyXIData_GetLookupContext( + PyInterpreterState *, + _PyXIData_lookup_context_t *); + +PyAPI_FUNC(xidatafunc) _PyXIData_Lookup( + _PyXIData_lookup_context_t *, + PyObject *); +PyAPI_FUNC(int) _PyObject_CheckXIData( + _PyXIData_lookup_context_t *, + PyObject *); +PyAPI_FUNC(int) _PyObject_GetXIData( + _PyXIData_lookup_context_t *, + PyObject *, + _PyXIData_t *); /* using cross-interpreter data */ @@ -116,11 +134,11 @@ PyAPI_FUNC(int) _PyXIData_ReleaseAndRawFree(_PyXIData_t *); PyAPI_FUNC(void) _PyXIData_Init( _PyXIData_t *data, PyInterpreterState *interp, void *shared, PyObject *obj, - xid_newobjectfunc new_object); + xid_newobjfunc new_object); PyAPI_FUNC(int) _PyXIData_InitWithSize( _PyXIData_t *, PyInterpreterState *interp, const size_t, PyObject *, - xid_newobjectfunc); + xid_newobjfunc); PyAPI_FUNC(void) _PyXIData_Clear( PyInterpreterState *, _PyXIData_t *); // Normally the Init* functions are sufficient. The only time @@ -154,25 +172,38 @@ PyAPI_FUNC(void) _PyXIData_Clear( PyInterpreterState *, _PyXIData_t *); /* runtime state & lifecycle */ /*****************************/ -struct _xi_runtime_state { +typedef struct { // builtin types _PyXIData_lookup_t data_lookup; -}; +} _PyXI_global_state_t; -struct _xi_state { +typedef struct { // heap types _PyXIData_lookup_t data_lookup; - // heap types - PyObject *PyExc_NotShareableError; -}; + struct xi_exceptions { + // static types + PyObject *PyExc_InterpreterError; + PyObject *PyExc_InterpreterNotFoundError; + // heap types + PyObject *PyExc_NotShareableError; + } exceptions; +} _PyXI_state_t; + +#define _PyXI_GET_GLOBAL_STATE(interp) (&(interp)->runtime->xi) +#define _PyXI_GET_STATE(interp) (&(interp)->xi) +#ifndef Py_BUILD_CORE_MODULE extern PyStatus _PyXI_Init(PyInterpreterState *interp); extern void _PyXI_Fini(PyInterpreterState *interp); extern PyStatus _PyXI_InitTypes(PyInterpreterState *interp); extern void _PyXI_FiniTypes(PyInterpreterState *interp); +#endif // Py_BUILD_CORE_MODULE -#define _PyInterpreterState_GetXIState(interp) (&(interp)->xi) +int _Py_xi_global_state_init(_PyXI_global_state_t *); +void _Py_xi_global_state_fini(_PyXI_global_state_t *); +int _Py_xi_state_init(_PyXI_state_t *, PyInterpreterState *); +void _Py_xi_state_fini(_PyXI_state_t *, PyInterpreterState *); /***************************/ diff --git a/Include/internal/pycore_crossinterp_data_registry.h b/Include/internal/pycore_crossinterp_data_registry.h index 2990c6af62e952a..bbad4de770857f1 100644 --- a/Include/internal/pycore_crossinterp_data_registry.h +++ b/Include/internal/pycore_crossinterp_data_registry.h @@ -7,30 +7,35 @@ // alternative would be to add a tp_* slot for a class's // xidatafunc. It would be simpler and more efficient. -struct _xidregitem; +struct _xid_regitem; -struct _xidregitem { - struct _xidregitem *prev; - struct _xidregitem *next; +typedef struct _xid_regitem { + struct _xid_regitem *prev; + struct _xid_regitem *next; /* This can be a dangling pointer, but only if weakref is set. */ PyTypeObject *cls; /* This is NULL for builtin types. */ PyObject *weakref; size_t refcount; xidatafunc getdata; -}; +} _PyXIData_regitem_t; -struct _xidregistry { +typedef struct { int global; /* builtin types or heap types */ int initialized; PyMutex mutex; - struct _xidregitem *head; -}; + _PyXIData_regitem_t *head; +} _PyXIData_registry_t; -PyAPI_FUNC(int) _PyXIData_RegisterClass(PyTypeObject *, xidatafunc); -PyAPI_FUNC(int) _PyXIData_UnregisterClass(PyTypeObject *); +PyAPI_FUNC(int) _PyXIData_RegisterClass( + _PyXIData_lookup_context_t *, + PyTypeObject *, + xidatafunc); +PyAPI_FUNC(int) _PyXIData_UnregisterClass( + _PyXIData_lookup_context_t *, + PyTypeObject *); struct _xid_lookup_state { // XXX Remove this field once we have a tp_* slot. - struct _xidregistry registry; + _PyXIData_registry_t registry; }; diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index 290ba95e1a0ad7c..318c712bdfa1749 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -21,6 +21,7 @@ extern int _PyImport_SetModuleString(const char *name, PyObject* module); extern void _PyImport_AcquireLock(PyInterpreterState *interp); extern void _PyImport_ReleaseLock(PyInterpreterState *interp); +extern void _PyImport_ReInitLock(PyInterpreterState *interp); // This is used exclusively for the sys and builtins modules: extern int _PyImport_FixupBuiltin( diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 9e3b4299693bbc6..824b865eda60df8 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -16,7 +16,7 @@ extern "C" { #include "pycore_code.h" // struct callable_cache #include "pycore_codecs.h" // struct codecs_state #include "pycore_context.h" // struct _Py_context_state -#include "pycore_crossinterp.h" // struct _xidregistry +#include "pycore_crossinterp.h" // _PyXI_state_t #include "pycore_dict_state.h" // struct _Py_dict_state #include "pycore_dtoa.h" // struct _dtoa_state #include "pycore_exceptions.h" // struct _Py_exc_state @@ -205,7 +205,7 @@ struct _is { freefunc co_extra_freefuncs[MAX_CO_EXTRA_USERS]; /* cross-interpreter data and utils */ - struct _xi_state xi; + _PyXI_state_t xi; #ifdef HAVE_FORK PyObject *before_forkers; diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index fade55945b7dbf3..edcd75a55b686b7 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -141,6 +141,12 @@ _PyThreadState_GET(void) #endif } +static inline int +_PyThreadState_IsAttached(PyThreadState *tstate) +{ + return (_Py_atomic_load_int_relaxed(&tstate->state) == _Py_THREAD_ATTACHED); +} + // Attaches the current thread to the interpreter. // // This may block while acquiring the GIL (if the GIL is enabled) or while diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index 7f592aa6cf9f057..2f2cec22cf1589c 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -11,7 +11,7 @@ extern "C" { #include "pycore_atexit.h" // struct _atexit_runtime_state #include "pycore_audit.h" // _Py_AuditHookEntry #include "pycore_ceval_state.h" // struct _ceval_runtime_state -#include "pycore_crossinterp.h" // struct _xidregistry +#include "pycore_crossinterp.h" // _PyXI_global_state_t #include "pycore_debug_offsets.h" // _Py_DebugOffsets #include "pycore_faulthandler.h" // struct _faulthandler_runtime_state #include "pycore_floatobject.h" // struct _Py_float_runtime_state @@ -106,7 +106,7 @@ typedef struct pyruntimestate { tools. */ /* cross-interpreter data and utils */ - struct _xi_runtime_state xi; + _PyXI_global_state_t xi; struct _pymem_allocators allocators; struct _obmalloc_global_state obmalloc; diff --git a/InternalDocs/garbage_collector.md b/InternalDocs/garbage_collector.md index d624cf4befd31a8..377a846428ae0c1 100644 --- a/InternalDocs/garbage_collector.md +++ b/InternalDocs/garbage_collector.md @@ -108,7 +108,7 @@ As is explained later in the [Optimization: reusing fields to save memory](#optimization-reusing-fields-to-save-memory) section, these two extra fields are normally used to keep doubly linked lists of all the objects tracked by the garbage collector (these lists are the GC generations, more on -that in the [Optimization: generations](#Optimization-generations) section), but +that in the [Optimization: incremental collection](#Optimization-incremental-collection) section), but they are also reused to fulfill other purposes when the full doubly linked list structure is not needed as a memory optimization. @@ -351,38 +351,90 @@ follows these steps in order: the reference counts fall to 0, triggering the destruction of all unreachable objects. -Optimization: generations -========================= +Optimization: incremental collection +==================================== -In order to limit the time each garbage collection takes, the GC -implementation for the default build uses a popular optimization: -generations. The main idea behind this concept is the assumption that most -objects have a very short lifespan and can thus be collected soon after their -creation. This has proven to be very close to the reality of many Python +In order to bound the length of each garbage collection pause, the GC implementation +for the default build uses incremental collection with two generations. + +Generational garbage collection takes advantage of what is known as the weak +generational hypothesis: Most objects die young. +This has proven to be very close to the reality of many Python programs as many temporary objects are created and destroyed very quickly. To take advantage of this fact, all container objects are segregated into -three spaces/generations. Every new -object starts in the first generation (generation 0). The previous algorithm is -executed only over the objects of a particular generation and if an object -survives a collection of its generation it will be moved to the next one -(generation 1), where it will be surveyed for collection less often. If -the same object survives another GC round in this new generation (generation 1) -it will be moved to the last generation (generation 2) where it will be -surveyed the least often. - -The GC implementation for the free-threaded build does not use multiple -generations. Every collection operates on the entire heap. +two generations: young and old. Every new object starts in the young generation. +Each garbage collection scans the entire young generation and part of the old generation. + +The time taken to scan the young generation can be controlled by controlling its +size, but the size of the old generation cannot be controlled. +In order to keep pause times down, scanning of the old generation of the heap +occurs in increments. + +To keep track of what has been scanned, the old generation contains two lists: + +* Those objects that have not yet been scanned, referred to as the `pending` list. +* Those objects that have been scanned, referred to as the `visited` list. + +To detect and collect all unreachable objects in the heap, the garbage collector +must scan the whole heap. This whole heap scan is called a full scavenge. + +Increments +---------- + +Each full scavenge is performed in a series of increments. +For each full scavenge, the combined increments will cover the whole heap. + +Each increment is made up of: + +* The young generation +* The old generation's least recently scanned objects +* All objects reachable from those objects that have not yet been scanned this full scavenge + +The surviving objects (those that are not collected) are moved to the back of the +`visited` list in the old generation. + +When a full scavenge starts, no objects in the heap are considered to have been scanned, +so all objects in the old generation must be in the `pending` space. +When all objects in the heap have been scanned a cycle ends, and all objects are moved +to the `pending` list again. To avoid having to traverse the entire list, which list is +`pending` and which is `visited` is determined by a field in the `GCState` struct. +The `visited` and `pending` lists can be swapped by toggling this bit. + +Correctness +----------- + +The [algorithm for identifying cycles](#Identifying-reference-cycles) will find all +unreachable cycles in a list of objects, but will not find any cycles that are +even partly outside of that list. +Therefore, to be guaranteed that a full scavenge will find all unreachable cycles, +each cycle must be fully contained within a single increment. + +To make sure that no partial cycles are included in the increment we perform a +[transitive closure](https://en.wikipedia.org/wiki/Transitive_closure) +over reachable, unscanned objects from the initial increment. +Since the transitive closure of objects reachable from an object must be a (non-strict) +superset of any unreachable cycle including that object, we are guaranteed that a +transitive closure cannot contain any partial cycles. +We can exclude scanned objects, as they must have been reachable when scanned. +If a scanned object becomes part of an unreachable cycle after being scanned, it will +not be collected this at this time, but it will be collected in the next full scavenge. + +> [!NOTE] +> The GC implementation for the free-threaded build does not use incremental collection. +> Every collection operates on the entire heap. In order to decide when to run, the collector keeps track of the number of object allocations and deallocations since the last collection. When the number of -allocations minus the number of deallocations exceeds `threshold_0`, -collection starts. Initially only generation 0 is examined. If generation 0 has -been examined more than `threshold_1` times since generation 1 has been -examined, then generation 1 is examined as well. With generation 2, -things are a bit more complicated; see -[Collecting the oldest generation](#Collecting-the-oldest-generation) for -more information. These thresholds can be examined using the +allocations minus the number of deallocations exceeds `threshold0`, +collection starts. `threshold1` determines the fraction of the old +collection that is included in the increment. +The fraction is inversely proportional to `threshold1`, +as historically a larger `threshold1` meant that old generation +collections were performed less frequently. +`threshold2` is ignored. + +These thresholds can be examined using the [`gc.get_threshold()`](https://docs.python.org/3/library/gc.html#gc.get_threshold) function: @@ -402,8 +454,8 @@ specifically in a generation by calling `gc.collect(generation=NUM)`. ... pass ... - # Move everything to the last generation so it's easier to inspect - # the younger generations. + # Move everything to the old generation so it's easier to inspect + # the young generation. >>> gc.collect() 0 @@ -413,40 +465,24 @@ specifically in a generation by calling `gc.collect(generation=NUM)`. >>> x = MyObj() >>> x.self = x - # Initially the object is in the youngest generation. + # Initially the object is in the young generation. >>> gc.get_objects(generation=0) [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] # After a collection of the youngest generation the object - # moves to the next generation. + # moves to the old generation. >>> gc.collect(generation=0) 0 >>> gc.get_objects(generation=0) [] >>> gc.get_objects(generation=1) + [] + >>> gc.get_objects(generation=2) [..., <__main__.MyObj object at 0x7fbcc12a3400>, ...] ``` -Collecting the oldest generation --------------------------------- - -In addition to the various configurable thresholds, the GC only triggers a full -collection of the oldest generation if the ratio `long_lived_pending / long_lived_total` -is above a given value (hardwired to 25%). The reason is that, while "non-full" -collections (that is, collections of the young and middle generations) will always -examine roughly the same number of objects (determined by the aforementioned -thresholds) the cost of a full collection is proportional to the total -number of long-lived objects, which is virtually unbounded. Indeed, it has -been remarked that doing a full collection every of object -creations entails a dramatic performance degradation in workloads which consist -of creating and storing lots of long-lived objects (for example, building a large list -of GC-tracked objects would show quadratic performance, instead of linear as -expected). Using the above ratio, instead, yields amortized linear performance -in the total number of objects (the effect of which can be summarized thusly: -"each full garbage collection is more and more costly as the number of objects -grows, but we do fewer and fewer of them"). Optimization: reusing fields to save memory =========================================== @@ -588,9 +624,9 @@ heap. be more difficult. -> [!NOTE] +> [!NOTE] > **Document history** -> +> > Pablo Galindo Salgado - Original author -> +> > Irit Katriel - Convert to Markdown diff --git a/LICENSE b/LICENSE index 14603b95c2e23b5..20cf39097c68baa 100644 --- a/LICENSE +++ b/LICENSE @@ -83,7 +83,7 @@ grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved" +i.e., "Copyright (c) 2001 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on diff --git a/Lib/_pyrepl/_minimal_curses.py b/Lib/_pyrepl/_minimal_curses.py index 849617bf7585e46..d884f880f50ac75 100644 --- a/Lib/_pyrepl/_minimal_curses.py +++ b/Lib/_pyrepl/_minimal_curses.py @@ -34,7 +34,7 @@ def _find_clib() -> str: clib.setupterm.restype = ctypes.c_int clib.tigetstr.argtypes = [ctypes.c_char_p] -clib.tigetstr.restype = ctypes.POINTER(ctypes.c_char) +clib.tigetstr.restype = ctypes.c_ssize_t clib.tparm.argtypes = [ctypes.c_char_p] + 9 * [ctypes.c_int] # type: ignore[operator] clib.tparm.restype = ctypes.c_char_p @@ -56,7 +56,7 @@ def tigetstr(cap): if not isinstance(cap, bytes): cap = cap.encode("ascii") result = clib.tigetstr(cap) - if ctypes.cast(result, ctypes.c_void_p).value == ERR: + if result == ERR: return None return ctypes.cast(result, ctypes.c_char_p).value diff --git a/Lib/argparse.py b/Lib/argparse.py index 072cd5e7dc0d06e..5ecfdca17175e38 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -863,6 +863,9 @@ def __init__(self, _option_strings.append(option_string) if option_string.startswith('--'): + if option_string.startswith('--no-'): + raise ValueError(f'invalid option name {option_string!r} ' + f'for BooleanOptionalAction') option_string = '--no-' + option_string[2:] _option_strings.append(option_string) diff --git a/Lib/email/__init__.py b/Lib/email/__init__.py index 9fa477830041859..6d597006e5eefe1 100644 --- a/Lib/email/__init__.py +++ b/Lib/email/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py index 36625e35ffb6a7e..84917038874ba1f 100644 --- a/Lib/email/_parseaddr.py +++ b/Lib/email/_parseaddr.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Contact: email-sig@python.org """Email address parsing code. diff --git a/Lib/email/base64mime.py b/Lib/email/base64mime.py index d440de95255bf13..a5a3f737a97b519 100644 --- a/Lib/email/base64mime.py +++ b/Lib/email/base64mime.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Ben Gertzfield # Contact: email-sig@python.org diff --git a/Lib/email/charset.py b/Lib/email/charset.py index cfd5a0c456e497c..5036c3f58a5633c 100644 --- a/Lib/email/charset.py +++ b/Lib/email/charset.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Ben Gertzfield, Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/encoders.py b/Lib/email/encoders.py index 17bd1ab7b19f325..55741a22a07b204 100644 --- a/Lib/email/encoders.py +++ b/Lib/email/encoders.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/errors.py b/Lib/email/errors.py index 02aa5eced6ae461..6bc744bd59c5bb4 100644 --- a/Lib/email/errors.py +++ b/Lib/email/errors.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/feedparser.py b/Lib/email/feedparser.py index 06d6b4a3afcd070..b2bc4afc1cc26f1 100644 --- a/Lib/email/feedparser.py +++ b/Lib/email/feedparser.py @@ -1,4 +1,4 @@ -# Copyright (C) 2004-2006 Python Software Foundation +# Copyright (C) 2004 Python Software Foundation # Authors: Baxter, Wouters and Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/generator.py b/Lib/email/generator.py index 205caf0fe9e81db..ab5bd0653e440c4 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/header.py b/Lib/email/header.py index 66a1d46db50c459..113a81f41314ec1 100644 --- a/Lib/email/header.py +++ b/Lib/email/header.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2007 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Ben Gertzfield, Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/iterators.py b/Lib/email/iterators.py index 2f436aefc2300b6..08ede3ec679613a 100644 --- a/Lib/email/iterators.py +++ b/Lib/email/iterators.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/message.py b/Lib/email/message.py index 08192c50a8ff5cb..a58afc5fe5f68e5 100644 --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/application.py b/Lib/email/mime/application.py index f67cbad3f034076..9a9d213d2a940d1 100644 --- a/Lib/email/mime/application.py +++ b/Lib/email/mime/application.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Keith Dart # Contact: email-sig@python.org diff --git a/Lib/email/mime/audio.py b/Lib/email/mime/audio.py index aa0c4905cbb2b4d..85f4a955238c520 100644 --- a/Lib/email/mime/audio.py +++ b/Lib/email/mime/audio.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Anthony Baxter # Contact: email-sig@python.org diff --git a/Lib/email/mime/base.py b/Lib/email/mime/base.py index f601f621cec3933..da4c6e591a5cb85 100644 --- a/Lib/email/mime/base.py +++ b/Lib/email/mime/base.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/image.py b/Lib/email/mime/image.py index 4b7f2f9cbad4252..dab9685848172b3 100644 --- a/Lib/email/mime/image.py +++ b/Lib/email/mime/image.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/message.py b/Lib/email/mime/message.py index 61836b5a7861fca..13d9ff599f86dbb 100644 --- a/Lib/email/mime/message.py +++ b/Lib/email/mime/message.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/multipart.py b/Lib/email/mime/multipart.py index 47fc218e1ae032c..1abb84d5fed0bb2 100644 --- a/Lib/email/mime/multipart.py +++ b/Lib/email/mime/multipart.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/nonmultipart.py b/Lib/email/mime/nonmultipart.py index a41386eb148c0c8..5beab3a441e2bc6 100644 --- a/Lib/email/mime/nonmultipart.py +++ b/Lib/email/mime/nonmultipart.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/mime/text.py b/Lib/email/mime/text.py index 7672b7891386009..aa4da7f8217e433 100644 --- a/Lib/email/mime/text.py +++ b/Lib/email/mime/text.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/email/parser.py b/Lib/email/parser.py index 475aa2b1a66680f..039f03cba74fa0c 100644 --- a/Lib/email/parser.py +++ b/Lib/email/parser.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2007 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw, Thomas Wouters, Anthony Baxter # Contact: email-sig@python.org diff --git a/Lib/email/quoprimime.py b/Lib/email/quoprimime.py index 500bbc5151769d3..27c7ea55c7871fd 100644 --- a/Lib/email/quoprimime.py +++ b/Lib/email/quoprimime.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2006 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Ben Gertzfield # Contact: email-sig@python.org diff --git a/Lib/email/utils.py b/Lib/email/utils.py index f276303197396b1..7eab74dc0db9df6 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 585afc85836c065..645ad998129348e 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -10,7 +10,7 @@ __all__ = ["version", "bootstrap"] -_PIP_VERSION = "24.2" +_PIP_VERSION = "24.3.1" # Directory of system wheel packages. Some Linux distribution packaging # policies recommend against bundling dependencies. For example, Fedora diff --git a/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl b/Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl similarity index 84% rename from Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl rename to Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl index 542cdd1e7284ae5..5f1d35be6dd56b0 100644 Binary files a/Lib/ensurepip/_bundled/pip-24.2-py3-none-any.whl and b/Lib/ensurepip/_bundled/pip-24.3.1-py3-none-any.whl differ diff --git a/Lib/functools.py b/Lib/functools.py index 27abd622a8cff1e..eff6540c7f606e0 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -6,7 +6,7 @@ # Written by Nick Coghlan , # Raymond Hettinger , # and Łukasz Langa . -# Copyright (C) 2006-2024 Python Software Foundation. +# Copyright (C) 2006 Python Software Foundation. # See C source code for _functools credits/copyright __all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES', @@ -236,7 +236,7 @@ def __ge__(self, other): def reduce(function, sequence, initial=_initial_missing): """ - reduce(function, iterable[, initial], /) -> value + reduce(function, iterable, /[, initial]) -> value Apply a function of two arguments cumulatively to the items of an iterable, from left to right. diff --git a/Lib/getopt.py b/Lib/getopt.py index 1df5b96472a45cf..a9c452a601ee816 100644 --- a/Lib/getopt.py +++ b/Lib/getopt.py @@ -24,10 +24,6 @@ # TODO for gnu_getopt(): # # - GNU getopt_long_only mechanism -# - allow the caller to specify ordering -# - RETURN_IN_ORDER option -# - GNU extension with '-' as first character of option string -# - optional arguments, specified by double colons # - an option string with a W followed by semicolon should # treat "-W foo" as "--foo" @@ -58,12 +54,14 @@ def getopt(args, shortopts, longopts = []): running program. Typically, this means "sys.argv[1:]". shortopts is the string of option letters that the script wants to recognize, with options that require an argument followed by a - colon (i.e., the same format that Unix getopt() uses). If + colon and options that accept an optional argument followed by + two colons (i.e., the same format that Unix getopt() uses). If specified, longopts is a list of strings with the names of the long options which should be supported. The leading '--' characters should not be included in the option name. Options which require an argument should be followed by an equal sign - ('='). + ('='). Options which accept an optional argument should be + followed by an equal sign and question mark ('=?'). The return value consists of two elements: the first is a list of (option, value) pairs; the second is the list of program arguments @@ -115,8 +113,13 @@ def gnu_getopt(args, shortopts, longopts = []): else: longopts = list(longopts) + return_in_order = False + if shortopts.startswith('-'): + shortopts = shortopts[1:] + all_options_first = False + return_in_order = True # Allow options after non-option arguments? - if shortopts.startswith('+'): + elif shortopts.startswith('+'): shortopts = shortopts[1:] all_options_first = True elif os.environ.get("POSIXLY_CORRECT"): @@ -130,8 +133,14 @@ def gnu_getopt(args, shortopts, longopts = []): break if args[0][:2] == '--': + if return_in_order and prog_args: + opts.append((None, prog_args)) + prog_args = [] opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) elif args[0][:1] == '-' and args[0] != '-': + if return_in_order and prog_args: + opts.append((None, prog_args)) + prog_args = [] opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) else: if all_options_first: @@ -153,7 +162,7 @@ def do_longs(opts, opt, longopts, args): has_arg, opt = long_has_args(opt, longopts) if has_arg: - if optarg is None: + if optarg is None and has_arg != '?': if not args: raise GetoptError(_('option --%s requires argument') % opt, opt) optarg, args = args[0], args[1:] @@ -174,6 +183,8 @@ def long_has_args(opt, longopts): return False, opt elif opt + '=' in possibilities: return True, opt + elif opt + '=?' in possibilities: + return '?', opt # No exact match, so better be unique. if len(possibilities) > 1: # XXX since possibilities contains all valid continuations, might be @@ -181,6 +192,8 @@ def long_has_args(opt, longopts): raise GetoptError(_('option --%s not a unique prefix') % opt, opt) assert len(possibilities) == 1 unique_match = possibilities[0] + if unique_match.endswith('=?'): + return '?', unique_match[:-2] has_arg = unique_match.endswith('=') if has_arg: unique_match = unique_match[:-1] @@ -189,8 +202,9 @@ def long_has_args(opt, longopts): def do_shorts(opts, optstring, shortopts, args): while optstring != '': opt, optstring = optstring[0], optstring[1:] - if short_has_arg(opt, shortopts): - if optstring == '': + has_arg = short_has_arg(opt, shortopts) + if has_arg: + if optstring == '' and has_arg != '?': if not args: raise GetoptError(_('option -%s requires argument') % opt, opt) @@ -204,7 +218,11 @@ def do_shorts(opts, optstring, shortopts, args): def short_has_arg(opt, shortopts): for i in range(len(shortopts)): if opt == shortopts[i] != ':': - return shortopts.startswith(':', i+1) + if not shortopts.startswith(':', i+1): + return False + if shortopts.startswith('::', i+1): + return '?' + return True raise GetoptError(_('option -%s not recognized') % opt, opt) if __name__ == '__main__': diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 1b76328429f63a2..fa36159711846fc 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -209,7 +209,11 @@ def _write_atomic(path, data, mode=0o666): # We first write data to a temporary file, and then use os.replace() to # perform an atomic rename. with _io.FileIO(fd, 'wb') as file: - file.write(data) + bytes_written = file.write(data) + if bytes_written != len(data): + # Raise an OSError so the 'except' below cleans up the partially + # written file. + raise OSError("os.write() didn't write the full pyc file") _os.replace(path_tmp, path) except OSError: try: diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index d7c4e8444f8decf..210d2264757d084 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -479,6 +479,7 @@ def _default_mime_types(): '.m3u8' : 'application/vnd.apple.mpegurl', '.xls' : 'application/vnd.ms-excel', '.xlb' : 'application/vnd.ms-excel', + '.eot' : 'application/vnd.ms-fontobject', '.ppt' : 'application/vnd.ms-powerpoint', '.pot' : 'application/vnd.ms-powerpoint', '.ppa' : 'application/vnd.ms-powerpoint', @@ -534,6 +535,7 @@ def _default_mime_types(): '.ass' : 'audio/aac', '.au' : 'audio/basic', '.snd' : 'audio/basic', + '.mka' : 'audio/matroska', '.mp3' : 'audio/mpeg', '.mp2' : 'audio/mpeg', '.opus' : 'audio/opus', @@ -542,6 +544,10 @@ def _default_mime_types(): '.aiff' : 'audio/x-aiff', '.ra' : 'audio/x-pn-realaudio', '.wav' : 'audio/x-wav', + '.otf' : 'font/otf', + '.ttf' : 'font/ttf', + '.woff' : 'font/woff', + '.woff2' : 'font/woff2', '.avif' : 'image/avif', '.bmp' : 'image/bmp', '.gif' : 'image/gif', @@ -595,6 +601,8 @@ def _default_mime_types(): '.sgml' : 'text/x-sgml', '.vcf' : 'text/x-vcard', '.xml' : 'text/xml', + '.mkv' : 'video/matroska', + '.mk3d' : 'video/matroska-3d', '.mp4' : 'video/mp4', '.mpeg' : 'video/mpeg', '.m1v' : 'video/mpeg', diff --git a/Lib/multiprocessing/managers.py b/Lib/multiprocessing/managers.py index a5d2f53613952e1..040f4674d735c04 100644 --- a/Lib/multiprocessing/managers.py +++ b/Lib/multiprocessing/managers.py @@ -759,22 +759,29 @@ class BaseProxy(object): _address_to_local = {} _mutex = util.ForkAwareThreadLock() + # Each instance gets a `_serial` number. Unlike `id(...)`, this number + # is never reused. + _next_serial = 1 + def __init__(self, token, serializer, manager=None, authkey=None, exposed=None, incref=True, manager_owned=False): with BaseProxy._mutex: - tls_idset = BaseProxy._address_to_local.get(token.address, None) - if tls_idset is None: - tls_idset = util.ForkAwareLocal(), ProcessLocalSet() - BaseProxy._address_to_local[token.address] = tls_idset + tls_serials = BaseProxy._address_to_local.get(token.address, None) + if tls_serials is None: + tls_serials = util.ForkAwareLocal(), ProcessLocalSet() + BaseProxy._address_to_local[token.address] = tls_serials + + self._serial = BaseProxy._next_serial + BaseProxy._next_serial += 1 # self._tls is used to record the connection used by this # thread to communicate with the manager at token.address - self._tls = tls_idset[0] + self._tls = tls_serials[0] - # self._idset is used to record the identities of all shared - # objects for which the current process owns references and + # self._all_serials is a set used to record the identities of all + # shared objects for which the current process owns references and # which are in the manager at token.address - self._idset = tls_idset[1] + self._all_serials = tls_serials[1] self._token = token self._id = self._token.id @@ -857,20 +864,20 @@ def _incref(self): dispatch(conn, None, 'incref', (self._id,)) util.debug('INCREF %r', self._token.id) - self._idset.add(self._id) + self._all_serials.add(self._serial) state = self._manager and self._manager._state self._close = util.Finalize( self, BaseProxy._decref, - args=(self._token, self._authkey, state, - self._tls, self._idset, self._Client), + args=(self._token, self._serial, self._authkey, state, + self._tls, self._all_serials, self._Client), exitpriority=10 ) @staticmethod - def _decref(token, authkey, state, tls, idset, _Client): - idset.discard(token.id) + def _decref(token, serial, authkey, state, tls, idset, _Client): + idset.discard(serial) # check whether manager is still alive if state is None or state.value == State.STARTED: diff --git a/Lib/ntpath.py b/Lib/ntpath.py index 1b1873f08b608bf..5481bb8888ef591 100644 --- a/Lib/ntpath.py +++ b/Lib/ntpath.py @@ -553,28 +553,21 @@ def normpath(path): return prefix + sep.join(comps) -def _abspath_fallback(path): - """Return the absolute version of a path as a fallback function in case - `nt._getfullpathname` is not available or raises OSError. See bpo-31047 for - more. - - """ - - path = os.fspath(path) - if not isabs(path): - if isinstance(path, bytes): - cwd = os.getcwdb() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - # Return an absolute path. try: from nt import _getfullpathname except ImportError: # not running on Windows - mock up something sensible - abspath = _abspath_fallback + def abspath(path): + """Return the absolute version of a path.""" + path = os.fspath(path) + if not isabs(path): + if isinstance(path, bytes): + cwd = os.getcwdb() + else: + cwd = os.getcwd() + path = join(cwd, path) + return normpath(path) else: # use native Windows method on Windows def abspath(path): @@ -582,7 +575,27 @@ def abspath(path): try: return _getfullpathname(normpath(path)) except (OSError, ValueError): - return _abspath_fallback(path) + # See gh-75230, handle outside for cleaner traceback + pass + path = os.fspath(path) + if not isabs(path): + if isinstance(path, bytes): + sep = b'\\' + getcwd = os.getcwdb + else: + sep = '\\' + getcwd = os.getcwd + drive, root, path = splitroot(path) + # Either drive or root can be nonempty, but not both. + if drive or root: + try: + path = join(_getfullpathname(drive + root), path) + except (OSError, ValueError): + # Drive "\0:" cannot exist; use the root directory. + path = drive + sep + path + else: + path = join(getcwd(), path) + return normpath(path) try: from nt import _findfirstfile, _getfinalpathname, readlink as _nt_readlink diff --git a/Lib/nturl2path.py b/Lib/nturl2path.py index 2f9fec7893afd1b..9ecabff21c33e14 100644 --- a/Lib/nturl2path.py +++ b/Lib/nturl2path.py @@ -44,20 +44,21 @@ def pathname2url(p): import urllib.parse # First, clean up some special forms. We are going to sacrifice # the additional information anyway - if p[:4] == '\\\\?\\': + p = p.replace('\\', '/') + if p[:4] == '//?/': p = p[4:] - if p[:4].upper() == 'UNC\\': - p = '\\\\' + p[4:] + if p[:4].upper() == 'UNC/': + p = '//' + p[4:] elif p[1:2] != ':': raise OSError('Bad path: ' + p) if not ':' in p: - # No drive specifier, just convert slashes and quote the name - return urllib.parse.quote(p.replace('\\', '/')) + # No DOS drive specified, just quote the pathname + return urllib.parse.quote(p) comp = p.split(':', maxsplit=2) if len(comp) != 2 or len(comp[0]) > 1: error = 'Bad path: ' + p raise OSError(error) drive = urllib.parse.quote(comp[0].upper()) - tail = urllib.parse.quote(comp[1].replace('\\', '/')) + tail = urllib.parse.quote(comp[1]) return '///' + drive + ':' + tail diff --git a/Lib/optparse.py b/Lib/optparse.py index 04112eca37c801a..cbe3451ced8bc37 100644 --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -43,7 +43,7 @@ __copyright__ = """ Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. -Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. +Copyright (c) 2002 Python Software Foundation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are diff --git a/Lib/posixpath.py b/Lib/posixpath.py index fccca4e066b76f1..db72ded88260565 100644 --- a/Lib/posixpath.py +++ b/Lib/posixpath.py @@ -412,6 +412,10 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, # very fast way of spelling list(reversed(...)). rest = filename.split(sep)[::-1] + # Number of unprocessed parts in 'rest'. This can differ from len(rest) + # later, because 'rest' might contain markers for unresolved symlinks. + part_count = len(rest) + # The resolved path, which is absolute throughout this function. # Note: getcwd() returns a normalized and symlink-free path. path = sep if filename.startswith(sep) else getcwd() @@ -426,12 +430,13 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, # by *maxlinks*, this is used instead of *seen* to detect symlink loops. link_count = 0 - while rest: + while part_count: name = rest.pop() if name is None: # resolved symlink target seen[rest.pop()] = path continue + part_count -= 1 if not name or name == curdir: # current dir continue @@ -444,8 +449,11 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, else: newpath = path + sep + name try: - st = lstat(newpath) - if not stat.S_ISLNK(st.st_mode): + st_mode = lstat(newpath).st_mode + if not stat.S_ISLNK(st_mode): + if strict and part_count and not stat.S_ISDIR(st_mode): + raise OSError(errno.ENOTDIR, os.strerror(errno.ENOTDIR), + newpath) path = newpath continue elif maxlinks is not None: @@ -487,7 +495,9 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir, rest.append(newpath) rest.append(None) # Push the unresolved symlink target parts onto the stack. - rest.extend(target.split(sep)[::-1]) + target_parts = target.split(sep)[::-1] + rest.extend(target_parts) + part_count += len(target_parts) return path diff --git a/Lib/re/_compiler.py b/Lib/re/_compiler.py index 29109f8812ee7be..20dd561d1c1520f 100644 --- a/Lib/re/_compiler.py +++ b/Lib/re/_compiler.py @@ -255,11 +255,11 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): while True: try: if op is LITERAL: - if fixup: - lo = fixup(av) - charmap[lo] = 1 - if fixes and lo in fixes: - for k in fixes[lo]: + if fixup: # IGNORECASE and not LOCALE + av = fixup(av) + charmap[av] = 1 + if fixes and av in fixes: + for k in fixes[av]: charmap[k] = 1 if not hascased and iscased(av): hascased = True @@ -267,7 +267,7 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): charmap[av] = 1 elif op is RANGE: r = range(av[0], av[1]+1) - if fixup: + if fixup: # IGNORECASE and not LOCALE if fixes: for i in map(fixup, r): charmap[i] = 1 @@ -298,8 +298,7 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): # Character set contains non-BMP character codes. # For range, all BMP characters in the range are already # proceeded. - if fixup: - hascased = True + if fixup: # IGNORECASE and not LOCALE # For now, IN_UNI_IGNORE+LITERAL and # IN_UNI_IGNORE+RANGE_UNI_IGNORE work for all non-BMP # characters, because two characters (at least one of @@ -310,7 +309,13 @@ def _optimize_charset(charset, iscased=None, fixup=None, fixes=None): # Also, both c.lower() and c.lower().upper() are single # characters for every non-BMP character. if op is RANGE: - op = RANGE_UNI_IGNORE + if fixes: # not ASCII + op = RANGE_UNI_IGNORE + hascased = True + else: + assert op is LITERAL + if not hascased and iscased(av): + hascased = True tail.append((op, av)) break diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index bcb024d8386fd14..8329a848a900883 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -2464,6 +2464,19 @@ def test_list_isinstance(self): a = self.list() self.assertIsInstance(a, collections.abc.MutableSequence) + # MutableSequence also has __iter__, but we can iterate over + # ListProxy using __getitem__ instead. Adding __iter__ to ListProxy + # would change the behavior of a list modified during iteration. + mutable_sequence_methods = ( + '__contains__', '__delitem__', '__getitem__', '__iadd__', + '__len__', '__reversed__', '__setitem__', 'append', + 'clear', 'count', 'extend', 'index', 'insert', 'pop', 'remove', + 'reverse', + ) + for name in mutable_sequence_methods: + with self.subTest(name=name): + self.assertTrue(callable(getattr(a, name))) + def test_list_iter(self): a = self.list(list(range(10))) it = iter(a) @@ -2508,6 +2521,15 @@ def test_dict_isinstance(self): a = self.dict() self.assertIsInstance(a, collections.abc.MutableMapping) + mutable_mapping_methods = ( + '__contains__', '__delitem__', '__eq__', '__getitem__', '__iter__', + '__len__', '__ne__', '__setitem__', 'clear', 'get', 'items', + 'keys', 'pop', 'popitem', 'setdefault', 'update', 'values', + ) + for name in mutable_mapping_methods: + with self.subTest(name=name): + self.assertTrue(callable(getattr(a, name))) + def test_dict_iter(self): d = self.dict() indices = list(range(65, 70)) diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index 8bef04cba811388..0c94fcc19070711 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -148,7 +148,7 @@ def __init__(self, **kwargs) -> None: self.randomize = False self.fromfile = None self.fail_env_changed = False - self.use_resources = None + self.use_resources: list[str] = [] self.trace = False self.coverdir = 'coverage' self.runleaks = False @@ -403,8 +403,6 @@ def _parse_args(args, **kwargs): raise TypeError('%r is an invalid keyword argument ' 'for this function' % k) setattr(ns, k, v) - if ns.use_resources is None: - ns.use_resources = [] parser = _create_parser() # Issue #14191: argparse doesn't support "intermixed" positional and diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 133eba8ffe8e698..49209b0cec756ec 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -123,7 +123,7 @@ def __init__(self, ns: Namespace, _add_python_opts: bool = False): self.python_cmd = None self.coverage: bool = ns.trace self.coverage_dir: StrPath | None = ns.coverdir - self.tmp_dir: StrPath | None = ns.tempdir + self._tmp_dir: StrPath | None = ns.tempdir # Randomize self.randomize: bool = ns.randomize @@ -159,6 +159,8 @@ def log(self, line: str = '') -> None: self.logger.log(line) def find_tests(self, tests: TestList | None = None) -> tuple[TestTuple, TestList | None]: + if tests is None: + tests = [] if self.single_test_run: self.next_single_filename = os.path.join(self.tmp_dir, 'pynexttest') try: @@ -454,6 +456,11 @@ def finalize_tests(self, coverage: trace.CoverageResults | None) -> None: self.results.write_junit(self.junit_filename) def display_summary(self) -> None: + if self.first_runtests is None: + raise ValueError( + "Should never call `display_summary()` before calling `_run_test()`" + ) + duration = time.perf_counter() - self.logger.start_time filtered = bool(self.match_tests) @@ -708,7 +715,15 @@ def _init(self): strip_py_suffix(self.cmdline_args) - self.tmp_dir = get_temp_dir(self.tmp_dir) + self._tmp_dir = get_temp_dir(self._tmp_dir) + + @property + def tmp_dir(self) -> StrPath: + if self._tmp_dir is None: + raise ValueError( + "Should never use `.tmp_dir` before calling `.main()`" + ) + return self._tmp_dir def main(self, tests: TestList | None = None) -> NoReturn: if self.want_add_python_opts: diff --git a/Lib/test/libregrtest/mypy.ini b/Lib/test/libregrtest/mypy.ini index 22c7c7a9acef148..da75a27158a6003 100644 --- a/Lib/test/libregrtest/mypy.ini +++ b/Lib/test/libregrtest/mypy.ini @@ -22,10 +22,8 @@ disallow_untyped_defs = False check_untyped_defs = False warn_return_any = False -disable_error_code = return - # Enable --strict-optional for these ASAP: -[mypy-Lib.test.libregrtest.main.*,Lib.test.libregrtest.run_workers.*] +[mypy-Lib.test.libregrtest.run_workers.*] strict_optional = False # Various internal modules that typeshed deliberately doesn't have stubs for: diff --git a/Lib/test/libregrtest/run_workers.py b/Lib/test/libregrtest/run_workers.py index 387ddf9614cf793..dcc817ae9aceb62 100644 --- a/Lib/test/libregrtest/run_workers.py +++ b/Lib/test/libregrtest/run_workers.py @@ -211,6 +211,7 @@ def _run_process(self, runtests: WorkerRunTests, output_fd: int, # on reading closed stdout raise ExitThread raise + return None except: self._kill() raise @@ -544,6 +545,7 @@ def _get_result(self) -> QueueOutput | None: running = get_running(self.workers) if running: self.log(running) + return None def display_result(self, mp_result: MultiprocessResult) -> None: result = mp_result.result diff --git a/Lib/test/support/i18n_helper.py b/Lib/test/support/i18n_helper.py new file mode 100644 index 000000000000000..2e304f29e8ba7fd --- /dev/null +++ b/Lib/test/support/i18n_helper.py @@ -0,0 +1,63 @@ +import re +import subprocess +import sys +import unittest +from pathlib import Path +from test.support import REPO_ROOT, TEST_HOME_DIR, requires_subprocess +from test.test_tools import skip_if_missing + + +pygettext = Path(REPO_ROOT) / 'Tools' / 'i18n' / 'pygettext.py' + +msgid_pattern = re.compile(r'msgid(.*?)(?:msgid_plural|msgctxt|msgstr)', + re.DOTALL) +msgid_string_pattern = re.compile(r'"((?:\\"|[^"])*)"') + + +def _generate_po_file(path, *, stdout_only=True): + res = subprocess.run([sys.executable, pygettext, + '--no-location', '-o', '-', path], + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if stdout_only: + return res.stdout + return res + + +def _extract_msgids(po): + msgids = [] + for msgid in msgid_pattern.findall(po): + msgid_string = ''.join(msgid_string_pattern.findall(msgid)) + msgid_string = msgid_string.replace(r'\"', '"') + if msgid_string: + msgids.append(msgid_string) + return sorted(msgids) + + +def _get_snapshot_path(module_name): + return Path(TEST_HOME_DIR) / 'translationdata' / module_name / 'msgids.txt' + + +@requires_subprocess() +class TestTranslationsBase(unittest.TestCase): + + def assertMsgidsEqual(self, module): + '''Assert that msgids extracted from a given module match a + snapshot. + + ''' + skip_if_missing('i18n') + res = _generate_po_file(module.__file__, stdout_only=False) + self.assertEqual(res.returncode, 0) + self.assertEqual(res.stderr, '') + msgids = _extract_msgids(res.stdout) + snapshot_path = _get_snapshot_path(module.__name__) + snapshot = snapshot_path.read_text().splitlines() + self.assertListEqual(msgids, snapshot) + + +def update_translation_snapshots(module): + contents = _generate_po_file(module.__file__) + msgids = _extract_msgids(contents) + snapshot_path = _get_snapshot_path(module.__name__) + snapshot_path.write_text('\n'.join(msgids)) diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py index 14cd50bd30502ce..bf3165e2341949f 100644 --- a/Lib/test/test__interpreters.py +++ b/Lib/test/test__interpreters.py @@ -551,6 +551,24 @@ def test_still_running(self): self.assertTrue(_interpreters.is_running(interp)) +class CommonTests(TestBase): + def setUp(self): + super().setUp() + self.id = _interpreters.create() + + def test_signatures(self): + # for method in ['exec', 'run_string', 'run_func']: + msg = "expected 'shared' to be a dict" + with self.assertRaisesRegex(TypeError, msg): + _interpreters.exec(self.id, 'a', 1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.exec(self.id, 'a', shared=1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.run_string(self.id, 'a', shared=1) + with self.assertRaisesRegex(TypeError, msg): + _interpreters.run_func(self.id, lambda: None, shared=1) + + class RunStringTests(TestBase): def setUp(self): diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index ba9876570385d35..358cfb1c56aae49 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -7,10 +7,8 @@ import operator import os import py_compile -import re import shutil import stat -import subprocess import sys import textwrap import tempfile @@ -19,15 +17,11 @@ import warnings from enum import StrEnum -from pathlib import Path -from test.support import REPO_ROOT -from test.support import TEST_HOME_DIR from test.support import captured_stderr from test.support import import_helper from test.support import os_helper -from test.support import requires_subprocess from test.support import script_helper -from test.test_tools import skip_if_missing +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots from unittest import mock @@ -789,6 +783,13 @@ def test_const(self): self.assertIn("got an unexpected keyword argument 'const'", str(cm.exception)) + def test_invalid_name(self): + parser = argparse.ArgumentParser() + with self.assertRaises(ValueError) as cm: + parser.add_argument('--no-foo', action=argparse.BooleanOptionalAction) + self.assertEqual(str(cm.exception), + "invalid option name '--no-foo' for BooleanOptionalAction") + class TestBooleanOptionalActionRequired(ParserTestCase): """Tests BooleanOptionalAction required""" @@ -7049,50 +7050,10 @@ def test_directory_in_zipfile_compiled(self): # Translation tests # ================= -pygettext = Path(REPO_ROOT) / 'Tools' / 'i18n' / 'pygettext.py' -snapshot_path = Path(TEST_HOME_DIR) / 'translationdata' / 'argparse' / 'msgids.txt' - -msgid_pattern = re.compile(r'msgid(.*?)(?:msgid_plural|msgctxt|msgstr)', re.DOTALL) -msgid_string_pattern = re.compile(r'"((?:\\"|[^"])*)"') - - -@requires_subprocess() -class TestTranslations(unittest.TestCase): +class TestTranslations(TestTranslationsBase): def test_translations(self): - # Test messages extracted from the argparse module against a snapshot - skip_if_missing('i18n') - res = generate_po_file(stdout_only=False) - self.assertEqual(res.returncode, 0) - self.assertEqual(res.stderr, '') - msgids = extract_msgids(res.stdout) - snapshot = snapshot_path.read_text().splitlines() - self.assertListEqual(msgids, snapshot) - - -def generate_po_file(*, stdout_only=True): - res = subprocess.run([sys.executable, pygettext, - '--no-location', '-o', '-', argparse.__file__], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) - if stdout_only: - return res.stdout - return res - - -def extract_msgids(po): - msgids = [] - for msgid in msgid_pattern.findall(po): - msgid_string = ''.join(msgid_string_pattern.findall(msgid)) - msgid_string = msgid_string.replace(r'\"', '"') - if msgid_string: - msgids.append(msgid_string) - return sorted(msgids) - - -def update_translation_snapshots(): - contents = generate_po_file() - msgids = extract_msgids(contents) - snapshot_path.write_text('\n'.join(msgids)) + self.assertMsgidsEqual(argparse) def tearDownModule(): @@ -7104,6 +7065,6 @@ def tearDownModule(): if __name__ == '__main__': # To regenerate translation snapshots if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': - update_translation_snapshots() + update_translation_snapshots(argparse) sys.exit(0) unittest.main() diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index ae938d12c9401b1..cb38a69e390f3ab 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -3910,6 +3910,8 @@ def test_memoryview_check_released(self): self.assertRaises(ValueError, memoryview, m) # memoryview.cast() self.assertRaises(ValueError, m.cast, 'c') + # memoryview.__iter__() + self.assertRaises(ValueError, m.__iter__) # getbuffer() self.assertRaises(ValueError, ndarray, m) # memoryview.tolist() diff --git a/Lib/test/test_capi/test_long.py b/Lib/test/test_capi/test_long.py index 925fccd660bde38..a77094588a0edf7 100644 --- a/Lib/test/test_capi/test_long.py +++ b/Lib/test/test_capi/test_long.py @@ -643,6 +643,51 @@ def test_long_getsign(self): # CRASHES getsign(NULL) + def test_long_ispositive(self): + # Test PyLong_IsPositive() + ispositive = _testcapi.pylong_ispositive + self.assertEqual(ispositive(1), 1) + self.assertEqual(ispositive(123), 1) + self.assertEqual(ispositive(-1), 0) + self.assertEqual(ispositive(0), 0) + self.assertEqual(ispositive(True), 1) + self.assertEqual(ispositive(False), 0) + self.assertEqual(ispositive(IntSubclass(-1)), 0) + self.assertRaises(TypeError, ispositive, 1.0) + self.assertRaises(TypeError, ispositive, Index(123)) + + # CRASHES ispositive(NULL) + + def test_long_isnegative(self): + # Test PyLong_IsNegative() + isnegative = _testcapi.pylong_isnegative + self.assertEqual(isnegative(1), 0) + self.assertEqual(isnegative(123), 0) + self.assertEqual(isnegative(-1), 1) + self.assertEqual(isnegative(0), 0) + self.assertEqual(isnegative(True), 0) + self.assertEqual(isnegative(False), 0) + self.assertEqual(isnegative(IntSubclass(-1)), 1) + self.assertRaises(TypeError, isnegative, 1.0) + self.assertRaises(TypeError, isnegative, Index(123)) + + # CRASHES isnegative(NULL) + + def test_long_iszero(self): + # Test PyLong_IsZero() + iszero = _testcapi.pylong_iszero + self.assertEqual(iszero(1), 0) + self.assertEqual(iszero(-1), 0) + self.assertEqual(iszero(0), 1) + self.assertEqual(iszero(True), 0) + self.assertEqual(iszero(False), 1) + self.assertEqual(iszero(IntSubclass(-1)), 0) + self.assertEqual(iszero(IntSubclass(0)), 1) + self.assertRaises(TypeError, iszero, 1.0) + self.assertRaises(TypeError, iszero, Index(123)) + + # CRASHES iszero(NULL) + def test_long_asint32(self): # Test PyLong_AsInt32() and PyLong_FromInt32() to_int32 = _testlimitedcapi.pylong_asint32 diff --git a/Lib/test/test_capi/test_object.py b/Lib/test/test_capi/test_object.py index cc9c9b688f00e21..a38b203ed12fa27 100644 --- a/Lib/test/test_capi/test_object.py +++ b/Lib/test/test_capi/test_object.py @@ -1,10 +1,13 @@ import enum import unittest +from test import support from test.support import import_helper from test.support import os_helper +from test.support import threading_helper _testlimitedcapi = import_helper.import_module('_testlimitedcapi') _testcapi = import_helper.import_module('_testcapi') +_testinternalcapi = import_helper.import_module('_testinternalcapi') class Constant(enum.IntEnum): @@ -131,5 +134,48 @@ def test_ClearWeakRefsNoCallbacks_no_weakref_support(self): _testcapi.pyobject_clear_weakrefs_no_callbacks(obj) +class EnableDeferredRefcountingTest(unittest.TestCase): + """Test PyUnstable_Object_EnableDeferredRefcount""" + @support.requires_resource("cpu") + def test_enable_deferred_refcount(self): + from threading import Thread + + self.assertEqual(_testcapi.pyobject_enable_deferred_refcount("not tracked"), 0) + foo = [] + self.assertEqual(_testcapi.pyobject_enable_deferred_refcount(foo), int(support.Py_GIL_DISABLED)) + + # Make sure reference counting works on foo now + self.assertEqual(foo, []) + if support.Py_GIL_DISABLED: + self.assertTrue(_testinternalcapi.has_deferred_refcount(foo)) + + # Make sure that PyUnstable_Object_EnableDeferredRefcount is thread safe + def silly_func(obj): + self.assertIn( + _testcapi.pyobject_enable_deferred_refcount(obj), + (0, 1) + ) + + silly_list = [1, 2, 3] + threads = [ + Thread(target=silly_func, args=(silly_list,)) for _ in range(5) + ] + + with threading_helper.catch_threading_exception() as cm: + for t in threads: + t.start() + + for i in range(10): + silly_list.append(i) + + for t in threads: + t.join() + + self.assertIsNone(cm.exc_value) + + if support.Py_GIL_DISABLED: + self.assertTrue(_testinternalcapi.has_deferred_refcount(silly_list)) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index ce5c03659f19795..4af8f7f480e759c 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001,2002 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # csv package unit tests import copy diff --git a/Lib/test/test_email/test_asian_codecs.py b/Lib/test/test_email/test_asian_codecs.py index 1e0caeeaed0810a..ca44f54c69b39bc 100644 --- a/Lib/test/test_email/test_asian_codecs.py +++ b/Lib/test/test_email/test_asian_codecs.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2006 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # Contact: email-sig@python.org # email package unit tests for (optional) Asian codecs diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 65ddbabcaa19978..abe9ef2e94409f8 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -1,4 +1,4 @@ -# Copyright (C) 2001-2010 Python Software Foundation +# Copyright (C) 2001 Python Software Foundation # Contact: email-sig@python.org # email package unit tests diff --git a/Lib/test/test_email/torture_test.py b/Lib/test/test_email/torture_test.py index 9cf9362c9b77e02..d15948a38b25dd3 100644 --- a/Lib/test/test_email/torture_test.py +++ b/Lib/test/test_email/torture_test.py @@ -1,4 +1,4 @@ -# Copyright (C) 2002-2004 Python Software Foundation +# Copyright (C) 2002 Python Software Foundation # # A torture test of the email package. This should not be run as part of the # standard Python test suite since it requires several meg of email messages diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index d590af090abc6ef..6d60f6941c4c5d5 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -1005,6 +1005,29 @@ def __getitem__(self, i): d = {"one": 1, "two": 2, "three": 3} self.assertEqual(self.reduce(add, d), "".join(d.keys())) + # test correctness of keyword usage of `initial` in `reduce` + def test_initial_keyword(self): + def add(x, y): + return x + y + self.assertEqual( + self.reduce(add, ['a', 'b', 'c'], ''), + self.reduce(add, ['a', 'b', 'c'], initial=''), + ) + self.assertEqual( + self.reduce(add, [['a', 'c'], [], ['d', 'w']], []), + self.reduce(add, [['a', 'c'], [], ['d', 'w']], initial=[]), + ) + self.assertEqual( + self.reduce(lambda x, y: x*y, range(2,8), 1), + self.reduce(lambda x, y: x*y, range(2,8), initial=1), + ) + self.assertEqual( + self.reduce(lambda x, y: x*y, range(2,21), 1), + self.reduce(lambda x, y: x*y, range(2,21), initial=1), + ) + self.assertRaises(TypeError, self.reduce, add, [0, 1], initial="") + self.assertEqual(self.reduce(42, "", initial="1"), "1") # func is never called with one item + @unittest.skipUnless(c_functools, 'requires the C _functools module') class TestReduceC(TestReduce, unittest.TestCase): diff --git a/Lib/test/test_getopt.py b/Lib/test/test_getopt.py index c8b3442de4aa77b..ed967ad27619ae3 100644 --- a/Lib/test/test_getopt.py +++ b/Lib/test/test_getopt.py @@ -1,11 +1,12 @@ # test_getopt.py # David Goodger 2000-08-19 -from test.support.os_helper import EnvironmentVarGuard import doctest -import unittest - import getopt +import sys +import unittest +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots +from test.support.os_helper import EnvironmentVarGuard sentinel = object() @@ -19,21 +20,34 @@ def assertError(self, *args, **kwargs): self.assertRaises(getopt.GetoptError, *args, **kwargs) def test_short_has_arg(self): - self.assertTrue(getopt.short_has_arg('a', 'a:')) - self.assertFalse(getopt.short_has_arg('a', 'a')) + self.assertIs(getopt.short_has_arg('a', 'a:'), True) + self.assertIs(getopt.short_has_arg('a', 'a'), False) + self.assertEqual(getopt.short_has_arg('a', 'a::'), '?') self.assertError(getopt.short_has_arg, 'a', 'b') def test_long_has_args(self): has_arg, option = getopt.long_has_args('abc', ['abc=']) - self.assertTrue(has_arg) + self.assertIs(has_arg, True) self.assertEqual(option, 'abc') has_arg, option = getopt.long_has_args('abc', ['abc']) - self.assertFalse(has_arg) + self.assertIs(has_arg, False) self.assertEqual(option, 'abc') + has_arg, option = getopt.long_has_args('abc', ['abc=?']) + self.assertEqual(has_arg, '?') + self.assertEqual(option, 'abc') + + has_arg, option = getopt.long_has_args('abc', ['abcd=']) + self.assertIs(has_arg, True) + self.assertEqual(option, 'abcd') + has_arg, option = getopt.long_has_args('abc', ['abcd']) - self.assertFalse(has_arg) + self.assertIs(has_arg, False) + self.assertEqual(option, 'abcd') + + has_arg, option = getopt.long_has_args('abc', ['abcd=?']) + self.assertEqual(has_arg, '?') self.assertEqual(option, 'abcd') self.assertError(getopt.long_has_args, 'abc', ['def']) @@ -49,9 +63,9 @@ def test_do_shorts(self): self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, []) - #opts, args = getopt.do_shorts([], 'a=1', 'a:', []) - #self.assertEqual(opts, [('-a', '1')]) - #self.assertEqual(args, []) + opts, args = getopt.do_shorts([], 'a=1', 'a:', []) + self.assertEqual(opts, [('-a', '=1')]) + self.assertEqual(args, []) opts, args = getopt.do_shorts([], 'a', 'a:', ['1']) self.assertEqual(opts, [('-a', '1')]) @@ -61,6 +75,14 @@ def test_do_shorts(self): self.assertEqual(opts, [('-a', '1')]) self.assertEqual(args, ['2']) + opts, args = getopt.do_shorts([], 'a', 'a::', ['1']) + self.assertEqual(opts, [('-a', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_shorts([], 'a1', 'a::', []) + self.assertEqual(opts, [('-a', '1')]) + self.assertEqual(args, []) + self.assertError(getopt.do_shorts, [], 'a1', 'a', []) self.assertError(getopt.do_shorts, [], 'a', 'a:', []) @@ -77,6 +99,22 @@ def test_do_longs(self): self.assertEqual(opts, [('--abcd', '1')]) self.assertEqual(args, []) + opts, args = getopt.do_longs([], 'abc', ['abc=?'], ['1']) + self.assertEqual(opts, [('--abc', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_longs([], 'abc', ['abcd=?'], ['1']) + self.assertEqual(opts, [('--abcd', '')]) + self.assertEqual(args, ['1']) + + opts, args = getopt.do_longs([], 'abc=1', ['abc=?'], []) + self.assertEqual(opts, [('--abc', '1')]) + self.assertEqual(args, []) + + opts, args = getopt.do_longs([], 'abc=1', ['abcd=?'], []) + self.assertEqual(opts, [('--abcd', '1')]) + self.assertEqual(args, []) + opts, args = getopt.do_longs([], 'abc', ['ab', 'abc', 'abcd'], []) self.assertEqual(opts, [('--abc', '')]) self.assertEqual(args, []) @@ -95,7 +133,7 @@ def test_getopt(self): # note: the empty string between '-a' and '--beta' is significant: # it simulates an empty string option argument ('-a ""') on the # command line. - cmdline = ['-a', '1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', + cmdline = ['-a1', '-b', '--alpha=2', '--beta', '-a', '3', '-a', '', '--beta', 'arg1', 'arg2'] opts, args = getopt.getopt(cmdline, 'a:b', ['alpha=', 'beta']) @@ -106,33 +144,53 @@ def test_getopt(self): # accounted for in the code that calls getopt(). self.assertEqual(args, ['arg1', 'arg2']) + cmdline = ['-a1', '--alpha=2', '--alpha=', '-a', '--alpha', 'arg1', 'arg2'] + opts, args = getopt.getopt(cmdline, 'a::', ['alpha=?']) + self.assertEqual(opts, [('-a', '1'), ('--alpha', '2'), ('--alpha', ''), + ('-a', ''), ('--alpha', '')]) + self.assertEqual(args, ['arg1', 'arg2']) + self.assertError(getopt.getopt, cmdline, 'a:b', ['alpha', 'beta']) def test_gnu_getopt(self): # Test handling of GNU style scanning mode. - cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2'] + cmdline = ['-a', 'arg1', '-b', '1', '--alpha', '--beta=2', '--beta', + '3', 'arg2'] # GNU style opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) - self.assertEqual(args, ['arg1']) - self.assertEqual(opts, [('-a', ''), ('-b', '1'), - ('--alpha', ''), ('--beta', '2')]) + self.assertEqual(args, ['arg1', 'arg2']) + self.assertEqual(opts, [('-a', ''), ('-b', '1'), ('--alpha', ''), + ('--beta', '2'), ('--beta', '3')]) + + opts, args = getopt.gnu_getopt(cmdline, 'ab::', ['alpha', 'beta=?']) + self.assertEqual(args, ['arg1', '1', '3', 'arg2']) + self.assertEqual(opts, [('-a', ''), ('-b', ''), ('--alpha', ''), + ('--beta', '2'), ('--beta', '')]) # recognize "-" as an argument opts, args = getopt.gnu_getopt(['-a', '-', '-b', '-'], 'ab:', []) self.assertEqual(args, ['-']) self.assertEqual(opts, [('-a', ''), ('-b', '-')]) + # Return positional arguments intermixed with options. + opts, args = getopt.gnu_getopt(cmdline, '-ab:', ['alpha', 'beta=']) + self.assertEqual(args, ['arg2']) + self.assertEqual(opts, [('-a', ''), (None, ['arg1']), ('-b', '1'), ('--alpha', ''), + ('--beta', '2'), ('--beta', '3')]) + # Posix style via + opts, args = getopt.gnu_getopt(cmdline, '+ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) - self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) + self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2', + '--beta', '3', 'arg2']) # Posix style via POSIXLY_CORRECT self.env["POSIXLY_CORRECT"] = "1" opts, args = getopt.gnu_getopt(cmdline, 'ab:', ['alpha', 'beta=']) self.assertEqual(opts, [('-a', '')]) - self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2']) + self.assertEqual(args, ['arg1', '-b', '1', '--alpha', '--beta=2', + '--beta', '3', 'arg2']) def test_issue4629(self): longopts, shortopts = getopt.getopt(['--help='], '', ['help=']) @@ -173,10 +231,20 @@ def test_libref_examples(): ['a1', 'a2'] """ + +class TestTranslations(TestTranslationsBase): + def test_translations(self): + self.assertMsgidsEqual(getopt) + + def load_tests(loader, tests, pattern): tests.addTest(doctest.DocTestSuite()) return tests -if __name__ == "__main__": +if __name__ == '__main__': + # To regenerate translation snapshots + if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': + update_translation_snapshots(getopt) + sys.exit(0) unittest.main() diff --git a/Lib/test/test_global.py b/Lib/test/test_global.py index f5b38c25ea07288..11d0bd54e8b69b9 100644 --- a/Lib/test/test_global.py +++ b/Lib/test/test_global.py @@ -1,7 +1,19 @@ -"""Verify that warnings are issued for global statements following use.""" +"""This module includes tests for syntax errors that occur when a name +declared as `global` is used in ways that violate the language +specification, such as after assignment, usage, or annotation. The tests +verify that syntax errors are correctly raised for improper `global` +statements following variable use or assignment within functions. +Additionally, it tests various name-binding scenarios for global +variables to ensure correct behavior. +See `test_scope.py` for additional related behavioral tests covering +variable scoping and usage in different contexts. +""" + +import contextlib from test.support import check_syntax_error from test.support.warnings_helper import check_warnings +from types import SimpleNamespace import unittest import warnings @@ -12,40 +24,185 @@ def setUp(self): self.enterContext(check_warnings()) warnings.filterwarnings("error", module="") - def test1(self): - prog_text_1 = """\ -def wrong1(): - a = 1 - b = 2 - global a - global b + ###################################################### + ### Syntax error cases as covered in Python/symtable.c + ###################################################### + + def test_name_param(self): + prog_text = """\ +def fn(name_param): + global name_param """ - check_syntax_error(self, prog_text_1, lineno=4, offset=5) + check_syntax_error(self, prog_text, lineno=2, offset=5) - def test2(self): - prog_text_2 = """\ -def wrong2(): - print(x) - global x + def test_name_after_assign(self): + prog_text = """\ +def fn(): + name_assign = 1 + global name_assign """ - check_syntax_error(self, prog_text_2, lineno=3, offset=5) + check_syntax_error(self, prog_text, lineno=3, offset=5) - def test3(self): - prog_text_3 = """\ -def wrong3(): - print(x) - x = 2 - global x + def test_name_after_use(self): + prog_text = """\ +def fn(): + print(name_use) + global name_use """ - check_syntax_error(self, prog_text_3, lineno=4, offset=5) + check_syntax_error(self, prog_text, lineno=3, offset=5) - def test4(self): - prog_text_4 = """\ -global x -x = 2 + def test_name_annot(self): + prog_text_3 = """\ +def fn(): + name_annot: int + global name_annot """ - # this should work - compile(prog_text_4, "", "exec") + check_syntax_error(self, prog_text_3, lineno=3, offset=5) + + ############################################################# + ### Tests for global variables across all name binding cases, + ### as described in executionmodel.rst + ############################################################# + + def test_assignment_statement(self): + global name_assignment_statement + value = object() + name_assignment_statement = value + self.assertIs(globals()["name_assignment_statement"], value) + del name_assignment_statement + + def test_unpacking_assignment(self): + global name_unpacking_assignment + value = object() + _, name_unpacking_assignment = [None, value] + self.assertIs(globals()["name_unpacking_assignment"], value) + del name_unpacking_assignment + + def test_assignment_expression(self): + global name_assignment_expression + value = object() + if name_assignment_expression := value: + pass + self.assertIs(globals()["name_assignment_expression"], value) + del name_assignment_expression + + def test_iteration_variable(self): + global name_iteration_variable + value = object() + for name_iteration_variable in [value]: + pass + self.assertIs(globals()["name_iteration_variable"], value) + del name_iteration_variable + + def test_func_def(self): + global name_func_def + + def name_func_def(): + pass + + value = name_func_def + self.assertIs(globals()["name_func_def"], value) + del name_func_def + + def test_class_def(self): + global name_class_def + + class name_class_def: + pass + + value = name_class_def + self.assertIs(globals()["name_class_def"], value) + del name_class_def + + def test_type_alias(self): + global name_type_alias + type name_type_alias = tuple[int, int] + value = name_type_alias + self.assertIs(globals()["name_type_alias"], value) + del name_type_alias + + def test_caught_exception(self): + global name_caught_exc + + try: + 1 / 0 + except ZeroDivisionError as name_caught_exc: + value = name_caught_exc + # `name_caught_exc` is cleared automatically after the except block + self.assertIs(globals()["name_caught_exc"], value) + + def test_caught_exception_group(self): + global name_caught_exc_group + try: + try: + 1 / 0 + except ZeroDivisionError as exc: + raise ExceptionGroup("eg", [exc]) + except* ZeroDivisionError as name_caught_exc_group: + value = name_caught_exc_group + # `name_caught_exc` is cleared automatically after the except block + self.assertIs(globals()["name_caught_exc_group"], value) + + def test_enter_result(self): + global name_enter_result + value = object() + with contextlib.nullcontext(value) as name_enter_result: + pass + self.assertIs(globals()["name_enter_result"], value) + del name_enter_result + + def test_import_result(self): + global name_import_result + value = contextlib + import contextlib as name_import_result + + self.assertIs(globals()["name_import_result"], value) + del name_import_result + + def test_match(self): + global name_match + value = object() + match value: + case name_match: + pass + self.assertIs(globals()["name_match"], value) + del name_match + + def test_match_as(self): + global name_match_as + value = object() + match value: + case _ as name_match_as: + pass + self.assertIs(globals()["name_match_as"], value) + del name_match_as + + def test_match_seq(self): + global name_match_seq + value = object() + match (None, value): + case (_, name_match_seq): + pass + self.assertIs(globals()["name_match_seq"], value) + del name_match_seq + + def test_match_map(self): + global name_match_map + value = object() + match {"key": value}: + case {"key": name_match_map}: + pass + self.assertIs(globals()["name_match_map"], value) + del name_match_map + + def test_match_attr(self): + global name_match_attr + value = object() + match SimpleNamespace(key=value): + case SimpleNamespace(key=name_match_attr): + pass + self.assertIs(globals()["name_match_attr"], value) + del name_match_attr def setUpModule(): diff --git a/Lib/test/test_importlib/test_util.py b/Lib/test/test_importlib/test_util.py index 668042782bdc5f9..0bdd1b4b82e5447 100644 --- a/Lib/test/test_importlib/test_util.py +++ b/Lib/test/test_importlib/test_util.py @@ -6,12 +6,14 @@ importlib_util = util.import_importlib('importlib.util') import importlib.util +from importlib import _bootstrap_external import os import pathlib import re import string import sys from test import support +from test.support import os_helper import textwrap import types import unittest @@ -775,5 +777,35 @@ def test_complete_multi_phase_init_module(self): self.run_with_own_gil(script) +class MiscTests(unittest.TestCase): + def test_atomic_write_should_notice_incomplete_writes(self): + import _pyio + + oldwrite = os.write + seen_write = False + + truncate_at_length = 100 + + # Emulate an os.write that only writes partial data. + def write(fd, data): + nonlocal seen_write + seen_write = True + return oldwrite(fd, data[:truncate_at_length]) + + # Need to patch _io to be _pyio, so that io.FileIO is affected by the + # os.write patch. + with (support.swap_attr(_bootstrap_external, '_io', _pyio), + support.swap_attr(os, 'write', write)): + with self.assertRaises(OSError): + # Make sure we write something longer than the point where we + # truncate. + content = b'x' * (truncate_at_length * 2) + _bootstrap_external._write_atomic(os_helper.TESTFN, content) + assert seen_write + + with self.assertRaises(OSError): + os.stat(support.os_helper.TESTFN) # Check that the file did not get written. + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_itertools.py b/Lib/test/test_itertools.py index a52e1d3fa142d98..b94d688738f9e82 100644 --- a/Lib/test/test_itertools.py +++ b/Lib/test/test_itertools.py @@ -494,6 +494,8 @@ def test_count(self): self.assertEqual(take(2, zip('abc',count(-3))), [('a', -3), ('b', -2)]) self.assertRaises(TypeError, count, 2, 3, 4) self.assertRaises(TypeError, count, 'a') + self.assertEqual(take(3, count(maxsize)), + [maxsize, maxsize + 1, maxsize + 2]) self.assertEqual(take(10, count(maxsize-5)), list(range(maxsize-5, maxsize+5))) self.assertEqual(take(10, count(-maxsize-5)), @@ -540,6 +542,12 @@ def test_count_with_step(self): self.assertEqual(take(20, count(-maxsize-15, 3)), take(20, range(-maxsize-15,-maxsize+100, 3))) self.assertEqual(take(3, count(10, maxsize+5)), list(range(10, 10+3*(maxsize+5), maxsize+5))) + self.assertEqual(take(3, count(maxsize, 2)), + [maxsize, maxsize + 2, maxsize + 4]) + self.assertEqual(take(3, count(maxsize, maxsize)), + [maxsize, 2 * maxsize, 3 * maxsize]) + self.assertEqual(take(3, count(-maxsize, maxsize)), + [-maxsize, 0, maxsize]) self.assertEqual(take(3, count(2, 1.25)), [2, 3.25, 4.5]) self.assertEqual(take(3, count(2, 3.25-4j)), [2, 5.25-4j, 8.5-8j]) self.assertEqual(take(3, count(Decimal('1.1'), Decimal('.1'))), diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 58f6a4dfae08ba2..c4bb8dfb1a74229 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -223,26 +223,38 @@ def test_guess_known_extensions(self): def test_preferred_extension(self): def check_extensions(): - self.assertEqual(mimetypes.guess_extension('application/octet-stream'), '.bin') - self.assertEqual(mimetypes.guess_extension('application/postscript'), '.ps') - self.assertEqual(mimetypes.guess_extension('application/vnd.apple.mpegurl'), '.m3u') - self.assertEqual(mimetypes.guess_extension('application/vnd.ms-excel'), '.xls') - self.assertEqual(mimetypes.guess_extension('application/vnd.ms-powerpoint'), '.ppt') - self.assertEqual(mimetypes.guess_extension('application/x-texinfo'), '.texi') - self.assertEqual(mimetypes.guess_extension('application/x-troff'), '.roff') - self.assertEqual(mimetypes.guess_extension('application/xml'), '.xsl') - self.assertEqual(mimetypes.guess_extension('audio/mpeg'), '.mp3') - self.assertEqual(mimetypes.guess_extension('image/avif'), '.avif') - self.assertEqual(mimetypes.guess_extension('image/webp'), '.webp') - self.assertEqual(mimetypes.guess_extension('image/jpeg'), '.jpg') - self.assertEqual(mimetypes.guess_extension('image/tiff'), '.tiff') - self.assertEqual(mimetypes.guess_extension('message/rfc822'), '.eml') - self.assertEqual(mimetypes.guess_extension('text/html'), '.html') - self.assertEqual(mimetypes.guess_extension('text/plain'), '.txt') - self.assertEqual(mimetypes.guess_extension('text/rtf'), '.rtf') - self.assertEqual(mimetypes.guess_extension('text/x-rst'), '.rst') - self.assertEqual(mimetypes.guess_extension('video/mpeg'), '.mpeg') - self.assertEqual(mimetypes.guess_extension('video/quicktime'), '.mov') + for mime_type, ext in ( + ("application/octet-stream", ".bin"), + ("application/postscript", ".ps"), + ("application/vnd.apple.mpegurl", ".m3u"), + ("application/vnd.ms-excel", ".xls"), + ("application/vnd.ms-fontobject", ".eot"), + ("application/vnd.ms-powerpoint", ".ppt"), + ("application/x-texinfo", ".texi"), + ("application/x-troff", ".roff"), + ("application/xml", ".xsl"), + ("audio/matroska", ".mka"), + ("audio/mpeg", ".mp3"), + ("font/otf", ".otf"), + ("font/ttf", ".ttf"), + ("font/woff", ".woff"), + ("font/woff2", ".woff2"), + ("image/avif", ".avif"), + ("image/webp", ".webp"), + ("image/jpeg", ".jpg"), + ("image/tiff", ".tiff"), + ("message/rfc822", ".eml"), + ("text/html", ".html"), + ("text/plain", ".txt"), + ("text/rtf", ".rtf"), + ("text/x-rst", ".rst"), + ("video/matroska", ".mkv"), + ("video/matroska-3d", ".mk3d"), + ("video/mpeg", ".mpeg"), + ("video/quicktime", ".mov"), + ): + with self.subTest(mime_type=mime_type, ext=ext): + self.assertEqual(mimetypes.guess_extension(mime_type), ext) check_extensions() mimetypes.init() diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index 64cbfaaaaa06904..4f59184dfcfdc70 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -806,6 +806,9 @@ def test_abspath(self): tester('ntpath.abspath("C:\\spam. . .")', "C:\\spam") tester('ntpath.abspath("C:/nul")', "\\\\.\\nul") tester('ntpath.abspath("C:\\nul")', "\\\\.\\nul") + self.assertTrue(ntpath.isabs(ntpath.abspath("C:spam"))) + self.assertEqual(ntpath.abspath("C:\x00"), ntpath.join(ntpath.abspath("C:"), "\x00")) + self.assertEqual(ntpath.abspath("\x00:spam"), "\x00:\\spam") tester('ntpath.abspath("//..")', "\\\\") tester('ntpath.abspath("//../")', "\\\\..\\") tester('ntpath.abspath("//../..")', "\\\\..\\") diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py index 28b274462388eda..8655a0537a5e56a 100644 --- a/Lib/test/test_optparse.py +++ b/Lib/test/test_optparse.py @@ -15,7 +15,7 @@ from io import StringIO from test import support from test.support import os_helper - +from test.support.i18n_helper import TestTranslationsBase, update_translation_snapshots import optparse from optparse import make_option, Option, \ @@ -1656,5 +1656,14 @@ def test__all__(self): support.check__all__(self, optparse, not_exported=not_exported) +class TestTranslations(TestTranslationsBase): + def test_translations(self): + self.assertMsgidsEqual(optparse) + + if __name__ == '__main__': + # To regenerate translation snapshots + if len(sys.argv) > 1 and sys.argv[1] == '--snapshot-update': + update_translation_snapshots(optparse) + sys.exit(0) unittest.main() diff --git a/Lib/test/test_plistlib.py b/Lib/test/test_plistlib.py index b231b05f864ab9e..a0c76e5dec5ebe0 100644 --- a/Lib/test/test_plistlib.py +++ b/Lib/test/test_plistlib.py @@ -1,4 +1,4 @@ -# Copyright (C) 2003-2013 Python Software Foundation +# Copyright (C) 2003 Python Software Foundation import copy import operator import pickle diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index ca5cf42f8fcd710..b39255ebc79ac19 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -695,6 +695,65 @@ def test_realpath_unreadable_symlink(self): os.chmod(ABSTFN, 0o755, follow_symlinks=False) os.unlink(ABSTFN) + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_file(self): + try: + with open(ABSTFN, 'w') as f: + f.write('test_posixpath wuz ere') + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN) + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + + @os_helper.skip_unless_symlink + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_symlink_to_file(self): + try: + with open(ABSTFN + "1", 'w') as f: + f.write('test_posixpath wuz ere') + os.symlink(ABSTFN + "1", ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "1") + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "1") + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "1") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "1") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "1/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + + @os_helper.skip_unless_symlink + @skip_if_ABSTFN_contains_backslash + def test_realpath_nonterminal_symlink_to_symlinks_to_file(self): + try: + with open(ABSTFN + "2", 'w') as f: + f.write('test_posixpath wuz ere') + os.symlink(ABSTFN + "2", ABSTFN + "1") + os.symlink(ABSTFN + "1", ABSTFN) + self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "2") + self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2") + self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "2") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True) + self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "2") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True) + self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN)) + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True) + self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "2/subdir") + self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True) + finally: + os_helper.unlink(ABSTFN) + def test_relpath(self): (real_getcwd, os.getcwd) = (os.getcwd, lambda: r"/home/user/bar") try: diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py index ff95f54026e1720..7bc702ec89a4a7b 100644 --- a/Lib/test/test_re.py +++ b/Lib/test/test_re.py @@ -1136,6 +1136,39 @@ def test_ignore_case_set(self): self.assertTrue(re.match(br'[19a]', b'a', re.I)) self.assertTrue(re.match(br'[19a]', b'A', re.I)) self.assertTrue(re.match(br'[19A]', b'a', re.I)) + self.assertTrue(re.match(r'[19\xc7]', '\xc7', re.I)) + self.assertTrue(re.match(r'[19\xc7]', '\xe7', re.I)) + self.assertTrue(re.match(r'[19\xe7]', '\xc7', re.I)) + self.assertTrue(re.match(r'[19\xe7]', '\xe7', re.I)) + self.assertTrue(re.match(r'[19\u0400]', '\u0400', re.I)) + self.assertTrue(re.match(r'[19\u0400]', '\u0450', re.I)) + self.assertTrue(re.match(r'[19\u0450]', '\u0400', re.I)) + self.assertTrue(re.match(r'[19\u0450]', '\u0450', re.I)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010428', re.I)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010428', re.I)) + + self.assertTrue(re.match(br'[19A]', b'A', re.I)) + self.assertTrue(re.match(br'[19a]', b'a', re.I)) + self.assertTrue(re.match(br'[19a]', b'A', re.I)) + self.assertTrue(re.match(br'[19A]', b'a', re.I)) + self.assertTrue(re.match(r'[19A]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[19a]', 'a', re.I|re.A)) + self.assertTrue(re.match(r'[19a]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[19A]', 'a', re.I|re.A)) + self.assertTrue(re.match(r'[19\xc7]', '\xc7', re.I|re.A)) + self.assertIsNone(re.match(r'[19\xc7]', '\xe7', re.I|re.A)) + self.assertIsNone(re.match(r'[19\xe7]', '\xc7', re.I|re.A)) + self.assertTrue(re.match(r'[19\xe7]', '\xe7', re.I|re.A)) + self.assertTrue(re.match(r'[19\u0400]', '\u0400', re.I|re.A)) + self.assertIsNone(re.match(r'[19\u0400]', '\u0450', re.I|re.A)) + self.assertIsNone(re.match(r'[19\u0450]', '\u0400', re.I|re.A)) + self.assertTrue(re.match(r'[19\u0450]', '\u0450', re.I|re.A)) + self.assertTrue(re.match(r'[19\U00010400]', '\U00010400', re.I|re.A)) + self.assertIsNone(re.match(r'[19\U00010400]', '\U00010428', re.I|re.A)) + self.assertIsNone(re.match(r'[19\U00010428]', '\U00010400', re.I|re.A)) + self.assertTrue(re.match(r'[19\U00010428]', '\U00010428', re.I|re.A)) # Two different characters have the same lowercase. assert 'K'.lower() == '\u212a'.lower() == 'k' # 'K' @@ -1172,8 +1205,10 @@ def test_ignore_case_range(self): self.assertTrue(re.match(br'[9-a]', b'_', re.I)) self.assertIsNone(re.match(br'[9-A]', b'_', re.I)) self.assertTrue(re.match(r'[\xc0-\xde]', '\xd7', re.I)) + self.assertTrue(re.match(r'[\xc0-\xde]', '\xe7', re.I)) self.assertIsNone(re.match(r'[\xc0-\xde]', '\xf7', re.I)) self.assertTrue(re.match(r'[\xe0-\xfe]', '\xf7', re.I)) + self.assertTrue(re.match(r'[\xe0-\xfe]', '\xc7', re.I)) self.assertIsNone(re.match(r'[\xe0-\xfe]', '\xd7', re.I)) self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0450', re.I)) self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0400', re.I)) @@ -1184,6 +1219,26 @@ def test_ignore_case_range(self): self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010428', re.I)) self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010400', re.I)) + self.assertTrue(re.match(r'[\xc0-\xde]', '\xd7', re.I|re.A)) + self.assertIsNone(re.match(r'[\xc0-\xde]', '\xe7', re.I|re.A)) + self.assertTrue(re.match(r'[\xe0-\xfe]', '\xf7', re.I|re.A)) + self.assertIsNone(re.match(r'[\xe0-\xfe]', '\xc7', re.I|re.A)) + self.assertTrue(re.match(r'[\u0430-\u045f]', '\u0450', re.I|re.A)) + self.assertIsNone(re.match(r'[\u0430-\u045f]', '\u0400', re.I|re.A)) + self.assertIsNone(re.match(r'[\u0400-\u042f]', '\u0450', re.I|re.A)) + self.assertTrue(re.match(r'[\u0400-\u042f]', '\u0400', re.I|re.A)) + self.assertTrue(re.match(r'[\U00010428-\U0001044f]', '\U00010428', re.I|re.A)) + self.assertIsNone(re.match(r'[\U00010428-\U0001044f]', '\U00010400', re.I|re.A)) + self.assertIsNone(re.match(r'[\U00010400-\U00010427]', '\U00010428', re.I|re.A)) + self.assertTrue(re.match(r'[\U00010400-\U00010427]', '\U00010400', re.I|re.A)) + + self.assertTrue(re.match(r'[N-\x7f]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\x7f]', 'Z', re.I|re.A)) + self.assertTrue(re.match(r'[N-\uffff]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\uffff]', 'Z', re.I|re.A)) + self.assertTrue(re.match(r'[N-\U00010000]', 'A', re.I|re.A)) + self.assertTrue(re.match(r'[n-\U00010000]', 'Z', re.I|re.A)) + # Two different characters have the same lowercase. assert 'K'.lower() == '\u212a'.lower() == 'k' # 'K' self.assertTrue(re.match(r'[J-M]', '\u212a', re.I)) diff --git a/Lib/test/test_tomllib/test_error.py b/Lib/test/test_tomllib/test_error.py index d2ef59a29ca350c..3a8587492859ca6 100644 --- a/Lib/test/test_tomllib/test_error.py +++ b/Lib/test/test_tomllib/test_error.py @@ -49,7 +49,9 @@ def test_type_error(self): self.assertEqual(str(exc_info.exception), "Expected str object, not 'bool'") def test_module_name(self): - self.assertEqual(tomllib.TOMLDecodeError().__module__, tomllib.__name__) + self.assertEqual( + tomllib.TOMLDecodeError("", "", 0).__module__, tomllib.__name__ + ) def test_invalid_parse_float(self): def dict_returner(s: str) -> dict: @@ -64,3 +66,33 @@ def list_returner(s: str) -> list: self.assertEqual( str(exc_info.exception), "parse_float must not return dicts or lists" ) + + def test_deprecated_tomldecodeerror(self): + for args in [ + (), + ("err msg",), + (None,), + (None, "doc"), + ("err msg", None), + (None, "doc", None), + ("err msg", "doc", None), + ("one", "two", "three", "four"), + ("one", "two", 3, "four", "five"), + ]: + with self.assertWarns(DeprecationWarning): + e = tomllib.TOMLDecodeError(*args) # type: ignore[arg-type] + self.assertEqual(e.args, args) + + def test_tomldecodeerror(self): + msg = "error parsing" + doc = "v=1\n[table]\nv='val'" + pos = 13 + formatted_msg = "error parsing (at line 3, column 2)" + e = tomllib.TOMLDecodeError(msg, doc, pos) + self.assertEqual(e.args, (formatted_msg,)) + self.assertEqual(str(e), formatted_msg) + self.assertEqual(e.msg, msg) + self.assertEqual(e.doc, doc) + self.assertEqual(e.pos, pos) + self.assertEqual(e.lineno, 3) + self.assertEqual(e.colno, 2) diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 2f1f9e86a0bce4e..aa42beca5f9256b 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -8,6 +8,7 @@ import inspect import itertools import operator +import os import pickle import re import sys @@ -4252,9 +4253,15 @@ def test_builtin_protocol_allowlist(self): class CustomProtocol(TestCase, Protocol): pass + class CustomPathLikeProtocol(os.PathLike, Protocol): + pass + class CustomContextManager(typing.ContextManager, Protocol): pass + class CustomAsyncIterator(typing.AsyncIterator, Protocol): + pass + def test_non_runtime_protocol_isinstance_check(self): class P(Protocol): x: int diff --git a/Lib/test/test_unittest/testmock/testpatch.py b/Lib/test/test_unittest/testmock/testpatch.py index f26e74ce0bc1ba7..037c021e6eafcfa 100644 --- a/Lib/test/test_unittest/testmock/testpatch.py +++ b/Lib/test/test_unittest/testmock/testpatch.py @@ -745,6 +745,54 @@ def test_stop_idempotent(self): self.assertIsNone(patcher.stop()) + def test_exit_idempotent(self): + patcher = patch(foo_name, 'bar', 3) + with patcher: + patcher.stop() + + + def test_second_start_failure(self): + patcher = patch(foo_name, 'bar', 3) + patcher.start() + try: + self.assertRaises(RuntimeError, patcher.start) + finally: + patcher.stop() + + + def test_second_enter_failure(self): + patcher = patch(foo_name, 'bar', 3) + with patcher: + self.assertRaises(RuntimeError, patcher.start) + + + def test_second_start_after_stop(self): + patcher = patch(foo_name, 'bar', 3) + patcher.start() + patcher.stop() + patcher.start() + patcher.stop() + + + def test_property_setters(self): + mock_object = Mock() + mock_bar = mock_object.bar + patcher = patch.object(mock_object, 'bar', 'x') + with patcher: + self.assertEqual(patcher.is_local, False) + self.assertIs(patcher.target, mock_object) + self.assertEqual(patcher.temp_original, mock_bar) + patcher.is_local = True + patcher.target = mock_bar + patcher.temp_original = mock_object + self.assertEqual(patcher.is_local, True) + self.assertIs(patcher.target, mock_bar) + self.assertEqual(patcher.temp_original, mock_object) + # if changes are left intact, they may lead to disruption as shown below (it might be what someone needs though) + self.assertEqual(mock_bar.bar, mock_object) + self.assertEqual(mock_object.bar, 'x') + + def test_patchobject_start_stop(self): original = something patcher = patch.object(PTModule, 'something', 'foo') @@ -1098,7 +1146,7 @@ def test_new_callable_patch(self): self.assertIsNot(m1, m2) for mock in m1, m2: - self.assertNotCallable(m1) + self.assertNotCallable(mock) def test_new_callable_patch_object(self): @@ -1111,7 +1159,7 @@ def test_new_callable_patch_object(self): self.assertIsNot(m1, m2) for mock in m1, m2: - self.assertNotCallable(m1) + self.assertNotCallable(mock) def test_new_callable_keyword_arguments(self): diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 28369b21db06d45..66e948fc3a06bed 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -1542,6 +1542,11 @@ def test_pathname2url_win(self): self.assertEqual(fn('\\\\some\\share\\'), '//some/share/') self.assertEqual(fn('\\\\some\\share\\a\\b.c'), '//some/share/a/b.c') self.assertEqual(fn('\\\\some\\share\\a\\b%#c\xe9'), '//some/share/a/b%25%23c%C3%A9') + # Alternate path separator + self.assertEqual(fn('C:/a/b.c'), '///C:/a/b.c') + self.assertEqual(fn('//some/share/a/b.c'), '//some/share/a/b.c') + self.assertEqual(fn('//?/C:/dir'), '///C:/dir') + self.assertEqual(fn('//?/unc/server/share/dir'), '//server/share/dir') # Round-tripping urls = ['///C:', '///folder/test/', diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index 297fb4831c16bfc..4516bdea6adb198 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -1314,9 +1314,17 @@ def test_parse_qsl_bytes(self): def test_parse_qsl_false_value(self): kwargs = dict(keep_blank_values=True, strict_parsing=True) - for x in '', b'', None, 0, 0.0, [], {}, memoryview(b''): + for x in '', b'', None, memoryview(b''): self.assertEqual(urllib.parse.parse_qsl(x, **kwargs), []) self.assertRaises(ValueError, urllib.parse.parse_qsl, x, separator=1) + for x in 0, 0.0, [], {}: + with self.assertWarns(DeprecationWarning) as cm: + self.assertEqual(urllib.parse.parse_qsl(x, **kwargs), []) + self.assertEqual(cm.filename, __file__) + with self.assertWarns(DeprecationWarning) as cm: + self.assertEqual(urllib.parse.parse_qs(x, **kwargs), {}) + self.assertEqual(cm.filename, __file__) + self.assertRaises(ValueError, urllib.parse.parse_qsl, x, separator=1) def test_parse_qsl_errors(self): self.assertRaises(TypeError, urllib.parse.parse_qsl, list(b'a=b')) diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py index e177464c00f7a65..7bd26a8ca34b628 100755 --- a/Lib/test/test_uuid.py +++ b/Lib/test/test_uuid.py @@ -8,8 +8,10 @@ import io import os import pickle +import random import sys import weakref +from itertools import product from unittest import mock py_uuid = import_helper.import_fresh_module('uuid', blocked=['_uuid']) @@ -267,7 +269,7 @@ def test_exceptions(self): # Version number out of range. badvalue(lambda: self.uuid.UUID('00'*16, version=0)) - badvalue(lambda: self.uuid.UUID('00'*16, version=6)) + badvalue(lambda: self.uuid.UUID('00'*16, version=42)) # Integer value out of range. badvalue(lambda: self.uuid.UUID(int=-1)) @@ -681,6 +683,37 @@ def test_uuid5(self): equal(u, self.uuid.UUID(v)) equal(str(u), v) + def test_uuid8(self): + equal = self.assertEqual + u = self.uuid.uuid8() + + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 8) + + for (_, hi, mid, lo) in product( + range(10), # repeat 10 times + [None, 0, random.getrandbits(48)], + [None, 0, random.getrandbits(12)], + [None, 0, random.getrandbits(62)], + ): + u = self.uuid.uuid8(hi, mid, lo) + equal(u.variant, self.uuid.RFC_4122) + equal(u.version, 8) + if hi is not None: + equal((u.int >> 80) & 0xffffffffffff, hi) + if mid is not None: + equal((u.int >> 64) & 0xfff, mid) + if lo is not None: + equal(u.int & 0x3fffffffffffffff, lo) + + def test_uuid8_uniqueness(self): + # Test that UUIDv8-generated values are unique + # (up to a negligible probability of failure). + u1 = self.uuid.uuid8() + u2 = self.uuid.uuid8() + self.assertNotEqual(u1.int, u2.int) + self.assertEqual(u1.version, u2.version) + @support.requires_fork() def testIssue8621(self): # On at least some versions of OSX self.uuid.uuid4 generates diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 8b59630717e790d..4e3c877896f295e 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -533,6 +533,18 @@ def test_skip_file_prefixes(self): warning_tests.package("prefix02", stacklevel=3) self.assertIn("unittest", w[-1].filename) + def test_skip_file_prefixes_file_path(self): + # see: gh-126209 + with warnings_state(self.module): + skipped = warning_tests.__file__ + with original_warnings.catch_warnings( + record=True, module=self.module, + ) as w: + warning_tests.outer("msg", skip_file_prefixes=(skipped,)) + + self.assertEqual(len(w), 1) + self.assertNotEqual(w[-1].filename, skipped) + def test_skip_file_prefixes_type_errors(self): with warnings_state(self.module): warn = warning_tests.warnings.warn diff --git a/Lib/test/test_warnings/data/stacklevel.py b/Lib/test/test_warnings/data/stacklevel.py index c6dd24733b3b747..fe36242d3d20c22 100644 --- a/Lib/test/test_warnings/data/stacklevel.py +++ b/Lib/test/test_warnings/data/stacklevel.py @@ -4,11 +4,13 @@ import warnings from test.test_warnings.data import package_helper -def outer(message, stacklevel=1): - inner(message, stacklevel) -def inner(message, stacklevel=1): - warnings.warn(message, stacklevel=stacklevel) +def outer(message, stacklevel=1, skip_file_prefixes=()): + inner(message, stacklevel, skip_file_prefixes) + +def inner(message, stacklevel=1, skip_file_prefixes=()): + warnings.warn(message, stacklevel=stacklevel, + skip_file_prefixes=skip_file_prefixes) def package(message, *, stacklevel): package_helper.inner_api(message, stacklevel=stacklevel, diff --git a/Lib/test/translationdata/getopt/msgids.txt b/Lib/test/translationdata/getopt/msgids.txt new file mode 100644 index 000000000000000..1ffab1f31abad5e --- /dev/null +++ b/Lib/test/translationdata/getopt/msgids.txt @@ -0,0 +1,6 @@ +option -%s not recognized +option -%s requires argument +option --%s must not have an argument +option --%s not a unique prefix +option --%s not recognized +option --%s requires argument \ No newline at end of file diff --git a/Lib/test/translationdata/optparse/msgids.txt b/Lib/test/translationdata/optparse/msgids.txt new file mode 100644 index 000000000000000..ac5317c736af8ce --- /dev/null +++ b/Lib/test/translationdata/optparse/msgids.txt @@ -0,0 +1,14 @@ +%prog [options] +%s option does not take a value +Options +Usage +Usage: %s\n +ambiguous option: %s (%s?) +complex +floating-point +integer +no such option: %s +option %s: invalid %s value: %r +option %s: invalid choice: %r (choose from %s) +show program's version number and exit +show this help message and exit \ No newline at end of file diff --git a/Lib/textwrap.py b/Lib/textwrap.py index 7ca393d1c371aad..1bf07aa46cad99f 100644 --- a/Lib/textwrap.py +++ b/Lib/textwrap.py @@ -2,7 +2,7 @@ """ # Copyright (C) 1999-2001 Gregory P. Ward. -# Copyright (C) 2002, 2003 Python Software Foundation. +# Copyright (C) 2002 Python Software Foundation. # Written by Greg Ward import re diff --git a/Lib/tomllib/_parser.py b/Lib/tomllib/_parser.py index 5671326646ca5a6..4d208bcfb4a9a67 100644 --- a/Lib/tomllib/_parser.py +++ b/Lib/tomllib/_parser.py @@ -8,6 +8,7 @@ import string from types import MappingProxyType from typing import Any, BinaryIO, NamedTuple +import warnings from ._re import ( RE_DATETIME, @@ -50,8 +51,68 @@ ) +class DEPRECATED_DEFAULT: + """Sentinel to be used as default arg during deprecation + period of TOMLDecodeError's free-form arguments.""" + + class TOMLDecodeError(ValueError): - """An error raised if a document is not valid TOML.""" + """An error raised if a document is not valid TOML. + + Adds the following attributes to ValueError: + msg: The unformatted error message + doc: The TOML document being parsed + pos: The index of doc where parsing failed + lineno: The line corresponding to pos + colno: The column corresponding to pos + """ + + def __init__( + self, + msg: str = DEPRECATED_DEFAULT, # type: ignore[assignment] + doc: str = DEPRECATED_DEFAULT, # type: ignore[assignment] + pos: Pos = DEPRECATED_DEFAULT, # type: ignore[assignment] + *args: Any, + ): + if ( + args + or not isinstance(msg, str) + or not isinstance(doc, str) + or not isinstance(pos, int) + ): + warnings.warn( + "Free-form arguments for TOMLDecodeError are deprecated. " + "Please set 'msg' (str), 'doc' (str) and 'pos' (int) arguments only.", + DeprecationWarning, + stacklevel=2, + ) + if pos is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = pos, *args + if doc is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = doc, *args + if msg is not DEPRECATED_DEFAULT: # type: ignore[comparison-overlap] + args = msg, *args + ValueError.__init__(self, *args) + return + + lineno = doc.count("\n", 0, pos) + 1 + if lineno == 1: + colno = pos + 1 + else: + colno = pos - doc.rindex("\n", 0, pos) + + if pos >= len(doc): + coord_repr = "end of document" + else: + coord_repr = f"line {lineno}, column {colno}" + errmsg = f"{msg} (at {coord_repr})" + ValueError.__init__(self, errmsg) + + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno def load(fp: BinaryIO, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: @@ -118,7 +179,7 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n pos, header = create_dict_rule(src, pos, out) pos = skip_chars(src, pos, TOML_WS) elif char != "#": - raise suffixed_err(src, pos, "Invalid statement") + raise TOMLDecodeError("Invalid statement", src, pos) # 3. Skip comment pos = skip_comment(src, pos) @@ -129,8 +190,8 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n except IndexError: break if char != "\n": - raise suffixed_err( - src, pos, "Expected newline or end of document after a statement" + raise TOMLDecodeError( + "Expected newline or end of document after a statement", src, pos ) pos += 1 @@ -256,12 +317,12 @@ def skip_until( except ValueError: new_pos = len(src) if error_on_eof: - raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None + raise TOMLDecodeError(f"Expected {expect!r}", src, new_pos) from None if not error_on.isdisjoint(src[pos:new_pos]): while src[pos] not in error_on: pos += 1 - raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") + raise TOMLDecodeError(f"Found invalid character {src[pos]!r}", src, pos) return new_pos @@ -292,15 +353,17 @@ def create_dict_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot declare {key} twice") + raise TOMLDecodeError(f"Cannot declare {key} twice", src, pos) out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: out.data.get_or_create_nest(key) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if not src.startswith("]", pos): - raise suffixed_err(src, pos, "Expected ']' at the end of a table declaration") + raise TOMLDecodeError( + "Expected ']' at the end of a table declaration", src, pos + ) return pos + 1, key @@ -310,7 +373,7 @@ def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos) # Free the namespace now that it points to another empty list item... out.flags.unset_all(key) # ...but this key precisely is still prohibited from table declaration @@ -318,10 +381,12 @@ def create_list_rule(src: str, pos: Pos, out: Output) -> tuple[Pos, Key]: try: out.data.append_nest_to_list(key) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if not src.startswith("]]", pos): - raise suffixed_err(src, pos, "Expected ']]' at the end of an array declaration") + raise TOMLDecodeError( + "Expected ']]' at the end of an array declaration", src, pos + ) return pos + 2, key @@ -336,22 +401,22 @@ def key_value_rule( for cont_key in relative_path_cont_keys: # Check that dotted key syntax does not redefine an existing table if out.flags.is_(cont_key, Flags.EXPLICIT_NEST): - raise suffixed_err(src, pos, f"Cannot redefine namespace {cont_key}") + raise TOMLDecodeError(f"Cannot redefine namespace {cont_key}", src, pos) # Containers in the relative path can't be opened with the table syntax or # dotted key/value syntax in following table sections. out.flags.add_pending(cont_key, Flags.EXPLICIT_NEST) if out.flags.is_(abs_key_parent, Flags.FROZEN): - raise suffixed_err( - src, pos, f"Cannot mutate immutable namespace {abs_key_parent}" + raise TOMLDecodeError( + f"Cannot mutate immutable namespace {abs_key_parent}", src, pos ) try: nest = out.data.get_or_create_nest(abs_key_parent) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if key_stem in nest: - raise suffixed_err(src, pos, "Cannot overwrite a value") + raise TOMLDecodeError("Cannot overwrite a value", src, pos) # Mark inline table and array namespaces recursively immutable if isinstance(value, (dict, list)): out.flags.set(header + key, Flags.FROZEN, recursive=True) @@ -368,7 +433,7 @@ def parse_key_value_pair( except IndexError: char = None if char != "=": - raise suffixed_err(src, pos, "Expected '=' after a key in a key/value pair") + raise TOMLDecodeError("Expected '=' after a key in a key/value pair", src, pos) pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, value = parse_value(src, pos, parse_float) @@ -406,7 +471,7 @@ def parse_key_part(src: str, pos: Pos) -> tuple[Pos, str]: return parse_literal_str(src, pos) if char == '"': return parse_one_line_basic_str(src, pos) - raise suffixed_err(src, pos, "Invalid initial character for a key part") + raise TOMLDecodeError("Invalid initial character for a key part", src, pos) def parse_one_line_basic_str(src: str, pos: Pos) -> tuple[Pos, str]: @@ -430,7 +495,7 @@ def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos, list] if c == "]": return pos + 1, array if c != ",": - raise suffixed_err(src, pos, "Unclosed array") + raise TOMLDecodeError("Unclosed array", src, pos) pos += 1 pos = skip_comments_and_array_ws(src, pos) @@ -450,20 +515,20 @@ def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> tuple[Pos pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] if flags.is_(key, Flags.FROZEN): - raise suffixed_err(src, pos, f"Cannot mutate immutable namespace {key}") + raise TOMLDecodeError(f"Cannot mutate immutable namespace {key}", src, pos) try: nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) except KeyError: - raise suffixed_err(src, pos, "Cannot overwrite a value") from None + raise TOMLDecodeError("Cannot overwrite a value", src, pos) from None if key_stem in nest: - raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") + raise TOMLDecodeError(f"Duplicate inline table key {key_stem!r}", src, pos) nest[key_stem] = value pos = skip_chars(src, pos, TOML_WS) c = src[pos : pos + 1] if c == "}": return pos + 1, nested_dict.dict if c != ",": - raise suffixed_err(src, pos, "Unclosed inline table") + raise TOMLDecodeError("Unclosed inline table", src, pos) if isinstance(value, (dict, list)): flags.set(key, Flags.FROZEN, recursive=True) pos += 1 @@ -485,7 +550,7 @@ def parse_basic_str_escape( except IndexError: return pos, "" if char != "\n": - raise suffixed_err(src, pos, "Unescaped '\\' in a string") + raise TOMLDecodeError("Unescaped '\\' in a string", src, pos) pos += 1 pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) return pos, "" @@ -496,7 +561,7 @@ def parse_basic_str_escape( try: return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] except KeyError: - raise suffixed_err(src, pos, "Unescaped '\\' in a string") from None + raise TOMLDecodeError("Unescaped '\\' in a string", src, pos) from None def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: @@ -506,11 +571,13 @@ def parse_basic_str_escape_multiline(src: str, pos: Pos) -> tuple[Pos, str]: def parse_hex_char(src: str, pos: Pos, hex_len: int) -> tuple[Pos, str]: hex_str = src[pos : pos + hex_len] if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): - raise suffixed_err(src, pos, "Invalid hex value") + raise TOMLDecodeError("Invalid hex value", src, pos) pos += hex_len hex_int = int(hex_str, 16) if not is_unicode_scalar_value(hex_int): - raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") + raise TOMLDecodeError( + "Escaped character is not a Unicode scalar value", src, pos + ) return pos, chr(hex_int) @@ -567,7 +634,7 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: try: char = src[pos] except IndexError: - raise suffixed_err(src, pos, "Unterminated string") from None + raise TOMLDecodeError("Unterminated string", src, pos) from None if char == '"': if not multiline: return pos + 1, result + src[start_pos:pos] @@ -582,7 +649,7 @@ def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> tuple[Pos, str]: start_pos = pos continue if char in error_on: - raise suffixed_err(src, pos, f"Illegal character {char!r}") + raise TOMLDecodeError(f"Illegal character {char!r}", src, pos) pos += 1 @@ -630,7 +697,7 @@ def parse_value( # noqa: C901 try: datetime_obj = match_to_datetime(datetime_match) except ValueError as e: - raise suffixed_err(src, pos, "Invalid date or datetime") from e + raise TOMLDecodeError("Invalid date or datetime", src, pos) from e return datetime_match.end(), datetime_obj localtime_match = RE_LOCALTIME.match(src, pos) if localtime_match: @@ -651,24 +718,7 @@ def parse_value( # noqa: C901 if first_four in {"-inf", "+inf", "-nan", "+nan"}: return pos + 4, parse_float(first_four) - raise suffixed_err(src, pos, "Invalid value") - - -def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: - """Return a `TOMLDecodeError` where error message is suffixed with - coordinates in source.""" - - def coord_repr(src: str, pos: Pos) -> str: - if pos >= len(src): - return "end of document" - line = src.count("\n", 0, pos) + 1 - if line == 1: - column = pos + 1 - else: - column = pos - src.rindex("\n", 0, pos) - return f"line {line}, column {column}" - - return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") + raise TOMLDecodeError("Invalid value", src, pos) def is_unicode_scalar_value(codepoint: int) -> bool: diff --git a/Lib/typing.py b/Lib/typing.py index c924c7670425529..938e52922aee034 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -1940,9 +1940,11 @@ def _allow_reckless_class_checks(depth=2): _PROTO_ALLOWLIST = { 'collections.abc': [ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', + 'AsyncIterator', 'Hashable', 'Sized', 'Container', 'Collection', + 'Reversible', 'Buffer', ], 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], + 'os': ['PathLike'], } diff --git a/Lib/unittest/__init__.py b/Lib/unittest/__init__.py index 324e5d038aef03a..78ff6bb4fdcce5d 100644 --- a/Lib/unittest/__init__.py +++ b/Lib/unittest/__init__.py @@ -27,7 +27,7 @@ def testMultiply(self): http://docs.python.org/library/unittest.html Copyright (c) 1999-2003 Steve Purcell -Copyright (c) 2003-2010 Python Software Foundation +Copyright (c) 2003 Python Software Foundation This module is free software, and you may redistribute it and/or modify it under the same terms as Python itself, so long as this copyright message and disclaimer are retained in their original form. diff --git a/Lib/unittest/async_case.py b/Lib/unittest/async_case.py index bd06eb3207697a7..6000af1cef0a78c 100644 --- a/Lib/unittest/async_case.py +++ b/Lib/unittest/async_case.py @@ -5,6 +5,7 @@ from .case import TestCase +__unittest = True class IsolatedAsyncioTestCase(TestCase): # Names intentionally have a long prefix diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 21ca061a77c26f5..55cb4b1f6aff901 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -1360,6 +1360,7 @@ def __init__( self.autospec = autospec self.kwargs = kwargs self.additional_patchers = [] + self.is_started = False def copy(self): @@ -1472,6 +1473,9 @@ def get_original(self): def __enter__(self): """Perform the patch.""" + if self.is_started: + raise RuntimeError("Patch is already started") + new, spec, spec_set = self.new, self.spec, self.spec_set autospec, kwargs = self.autospec, self.kwargs new_callable = self.new_callable @@ -1603,6 +1607,7 @@ def __enter__(self): self.temp_original = original self.is_local = local self._exit_stack = contextlib.ExitStack() + self.is_started = True try: setattr(self.target, self.attribute, new_attr) if self.attribute_name is not None: @@ -1622,6 +1627,9 @@ def __enter__(self): def __exit__(self, *exc_info): """Undo the patch.""" + if not self.is_started: + return + if self.is_local and self.temp_original is not DEFAULT: setattr(self.target, self.attribute, self.temp_original) else: @@ -1638,6 +1646,7 @@ def __exit__(self, *exc_info): del self.target exit_stack = self._exit_stack del self._exit_stack + self.is_started = False return exit_stack.__exit__(*exc_info) diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py index a721d777c82f828..8d7631d5693ece9 100644 --- a/Lib/urllib/parse.py +++ b/Lib/urllib/parse.py @@ -753,7 +753,8 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False, parsed_result = {} pairs = parse_qsl(qs, keep_blank_values, strict_parsing, encoding=encoding, errors=errors, - max_num_fields=max_num_fields, separator=separator) + max_num_fields=max_num_fields, separator=separator, + _stacklevel=2) for name, value in pairs: if name in parsed_result: parsed_result[name].append(value) @@ -763,7 +764,7 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False, def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, - encoding='utf-8', errors='replace', max_num_fields=None, separator='&'): + encoding='utf-8', errors='replace', max_num_fields=None, separator='&', *, _stacklevel=1): """Parse a query given as a string argument. Arguments: @@ -791,7 +792,6 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, Returns a list, as G-d intended. """ - if not separator or not isinstance(separator, (str, bytes)): raise ValueError("Separator must be of type string or bytes.") if isinstance(qs, str): @@ -800,12 +800,21 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False, eq = '=' def _unquote(s): return unquote_plus(s, encoding=encoding, errors=errors) + elif qs is None: + return [] else: - if not qs: - return [] - # Use memoryview() to reject integers and iterables, - # acceptable by the bytes constructor. - qs = bytes(memoryview(qs)) + try: + # Use memoryview() to reject integers and iterables, + # acceptable by the bytes constructor. + qs = bytes(memoryview(qs)) + except TypeError: + if not qs: + warnings.warn(f"Accepting {type(qs).__name__} objects with " + f"false value in urllib.parse.parse_qsl() is " + f"deprecated as of 3.14", + DeprecationWarning, stacklevel=_stacklevel + 1) + return [] + raise if isinstance(separator, str): separator = bytes(separator, 'ascii') eq = b'=' diff --git a/Lib/uuid.py b/Lib/uuid.py index 4d4f06cfc9ebbe3..9c6ad9643cf6d5d 100644 --- a/Lib/uuid.py +++ b/Lib/uuid.py @@ -1,8 +1,8 @@ -r"""UUID objects (universally unique identifiers) according to RFC 4122. +r"""UUID objects (universally unique identifiers) according to RFC 4122/9562. This module provides immutable UUID objects (class UUID) and the functions -uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5 -UUIDs as specified in RFC 4122. +uuid1(), uuid3(), uuid4(), uuid5(), and uuid8() for generating version 1, 3, +4, 5, and 8 UUIDs as specified in RFC 4122/9562. If all you want is a unique ID, you should probably call uuid1() or uuid4(). Note that uuid1() may compromise privacy since it creates a UUID containing @@ -124,12 +124,12 @@ class UUID: int the UUID as a 128-bit integer - urn the UUID as a URN as specified in RFC 4122 + urn the UUID as a URN as specified in RFC 4122/9562 variant the UUID variant (one of the constants RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) - version the UUID version number (1 through 5, meaningful only + version the UUID version number (1 through 8, meaningful only when the variant is RFC_4122) is_safe An enum indicating whether the UUID has been generated in @@ -214,9 +214,9 @@ def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None, if not 0 <= int < 1<<128: raise ValueError('int is out of range (need a 128-bit value)') if version is not None: - if not 1 <= version <= 5: + if not 1 <= version <= 8: raise ValueError('illegal version number') - # Set the variant to RFC 4122. + # Set the variant to RFC 4122/9562. int &= ~(0xc000 << 48) int |= 0x8000 << 48 # Set the version number. @@ -355,7 +355,7 @@ def variant(self): @property def version(self): - # The version bits are only meaningful for RFC 4122 UUIDs. + # The version bits are only meaningful for RFC 4122/9562 UUIDs. if self.variant == RFC_4122: return int((self.int >> 76) & 0xf) @@ -719,6 +719,28 @@ def uuid5(namespace, name): hash = sha1(namespace.bytes + name).digest() return UUID(bytes=hash[:16], version=5) +def uuid8(a=None, b=None, c=None): + """Generate a UUID from three custom blocks. + + * 'a' is the first 48-bit chunk of the UUID (octets 0-5); + * 'b' is the mid 12-bit chunk (octets 6-7); + * 'c' is the last 62-bit chunk (octets 8-15). + + When a value is not specified, a pseudo-random value is generated. + """ + if a is None: + import random + a = random.getrandbits(48) + if b is None: + import random + b = random.getrandbits(12) + if c is None: + import random + c = random.getrandbits(62) + int_uuid_8 = (a & 0xffff_ffff_ffff) << 80 + int_uuid_8 |= (b & 0xfff) << 64 + int_uuid_8 |= c & 0x3fff_ffff_ffff_ffff + return UUID(int=int_uuid_8, version=8) def main(): """Run the uuid command line interface.""" @@ -726,7 +748,8 @@ def main(): "uuid1": uuid1, "uuid3": uuid3, "uuid4": uuid4, - "uuid5": uuid5 + "uuid5": uuid5, + "uuid8": uuid8, } uuid_namespace_funcs = ("uuid3", "uuid5") namespaces = { diff --git a/Lib/wsgiref/headers.py b/Lib/wsgiref/headers.py index 05d2ba4c664e5e0..c78879f80c7df24 100644 --- a/Lib/wsgiref/headers.py +++ b/Lib/wsgiref/headers.py @@ -1,7 +1,7 @@ """Manage HTTP Response Headers Much of this module is red-handedly pilfered from email.message in the stdlib, -so portions are Copyright (C) 2001,2002 Python Software Foundation, and were +so portions are Copyright (C) 2001 Python Software Foundation, and were written by Barry Warsaw. """ diff --git a/Mac/BuildScript/resources/License.rtf b/Mac/BuildScript/resources/License.rtf index 1255d1ce48ed6cf..b5cb8ec41c86e21 100644 --- a/Mac/BuildScript/resources/License.rtf +++ b/Mac/BuildScript/resources/License.rtf @@ -64,7 +64,7 @@ Some software incorporated into Python is under different licenses. The licenses \f1\b0 \ 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and the Individual or Organization ("Licensee") accessing and otherwise using this software ("Python") in source or binary form and its associated documentation.\ \ -2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright \'a9 2001-2020 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.\ +2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright \'a9 2001 Python Software Foundation; All Rights Reserved" are retained in Python alone or in any derivative version prepared by Licensee.\ \ 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby agrees to include in any such work a brief summary of the changes made to Python.\ \ diff --git a/Mac/PythonLauncher/Info.plist.in b/Mac/PythonLauncher/Info.plist.in index 233694788ac2b7b..ce8f27cd7d4de70 100644 --- a/Mac/PythonLauncher/Info.plist.in +++ b/Mac/PythonLauncher/Info.plist.in @@ -40,9 +40,9 @@ CFBundleExecutable Python Launcher NSHumanReadableCopyright - Copyright © 2001-2024 Python Software Foundation + Copyright © 2001 Python Software Foundation CFBundleGetInfoString - %VERSION%, © 2001-2024 Python Software Foundation + %VERSION%, © 2001 Python Software Foundation CFBundleIconFile PythonLauncher.icns CFBundleIdentifier diff --git a/Makefile.pre.in b/Makefile.pre.in index a337223d4d8608c..8d94ba361fd934c 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2567,6 +2567,8 @@ TESTSUBDIRS= idlelib/idle_test \ test/tracedmodules \ test/translationdata \ test/translationdata/argparse \ + test/translationdata/getopt \ + test/translationdata/optparse \ test/typinganndata \ test/wheeldata \ test/xmltestdata \ diff --git a/Misc/ACKS b/Misc/ACKS index 1a25088052f4e10..08cd293eac38358 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -485,6 +485,7 @@ Luke Dunstan Virgil Dupras Bruno Dupuis Andy Dustman +Sayandip Dutta Gary Duzan Eugene Dvurechenski Karmen Dykstra @@ -732,6 +733,7 @@ Larry Hastings Tim Hatch Zac Hatfield-Dodds Shane Hathaway +Akinori Hattori Michael Haubenwallner Janko Hauser Flavian Hautbois @@ -953,6 +955,7 @@ Sanyam Khurana Tyler Kieft Mads Kiilerich Jason Killen +Beomsoo Bombs Kim Derek D. Kim Gihwan Kim Jan Kim diff --git a/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst b/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst new file mode 100644 index 000000000000000..ac821b5326026e6 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-09-03-13-33-33.gh-issue-123619.HhgUUI.rst @@ -0,0 +1,2 @@ +Added the :c:func:`PyUnstable_Object_EnableDeferredRefcount` function for +enabling :pep:`703` deferred reference counting. diff --git a/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst b/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst new file mode 100644 index 000000000000000..0a4ad4ea2874cfc --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-10-28-15-56-03.gh-issue-126061.Py51_1.rst @@ -0,0 +1,3 @@ +Add :c:func:`PyLong_IsPositive`, :c:func:`PyLong_IsNegative` +and :c:func:`PyLong_IsZero` for checking if a :c:type:`PyLongObject` +is positive, negative, or zero, respectively. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst new file mode 100644 index 000000000000000..01424d8a545d782 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-10-30-18-16-10.gh-issue-126195.6ezBpr.rst @@ -0,0 +1 @@ +Improve JIT performance by 1.4% on macOS Apple Silicon by using platform-specific memory protection APIs. Patch by Diego Russo. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst new file mode 100644 index 000000000000000..727f7f8180ab22d --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-02-18-01-31.gh-issue-126209.2ZIhrS.rst @@ -0,0 +1,3 @@ +Fix an issue with ``skip_file_prefixes`` parameter which resulted in an inconsistent +behaviour between the C and Python implementations of :func:`warnings.warn`. +Patch by Daehee Kim. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst new file mode 100644 index 000000000000000..9c0072304ded638 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-09-16-10-22.gh-issue-126066.9zs4m4.rst @@ -0,0 +1,3 @@ +Fix :mod:`importlib` to not write an incomplete .pyc files when a ulimit or some +other operating system mechanism is preventing the write to go through +fully. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst new file mode 100644 index 000000000000000..30aa5722f0ea02f --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-11-17-02-48.gh-issue-126688.QiOXUi.rst @@ -0,0 +1,2 @@ +Fix a crash when calling :func:`os.fork` on some operating systems, +including SerenityOS. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst new file mode 100644 index 000000000000000..c2436d2ebf4d09f --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-12-19-24-00.gh-issue-126341.5SdAe1.rst @@ -0,0 +1 @@ +Now :exc:`ValueError` is raised instead of :exc:`SystemError` when trying to iterate over a released :class:`memoryview` object. diff --git a/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst b/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst new file mode 100644 index 000000000000000..a2181b5712873b4 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2024-11-09-19-43-10.gh-issue-126622.YacfDc.rst @@ -0,0 +1,3 @@ +Added stub pages for removed modules explaining their removal, where to find +replacements, and linking to the last Python version that supported them. +Contributed by Ned Batchelder. diff --git a/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst b/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst new file mode 100644 index 000000000000000..2581697591af629 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-19-01-12-47.gh-issue-84852.FEjHJW.rst @@ -0,0 +1,2 @@ +Add MIME types for MS Embedded OpenType, OpenType Layout, TrueType, +WOFF 1.0 and 2.0 fonts. Patch by Sahil Prajapati and Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst b/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst new file mode 100644 index 000000000000000..7d11d20d94e8a3c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2021-12-19-10-47-24.bpo-46128.Qv3EK1.rst @@ -0,0 +1,2 @@ +Strip :class:`unittest.IsolatedAsyncioTestCase` stack frames from reported +stacktraces. diff --git a/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst b/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst new file mode 100644 index 000000000000000..a0959cc086fa9e3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-10-15-10-18-20.gh-issue-71936.MzJjc_.rst @@ -0,0 +1 @@ +Fix a race condition in :class:`multiprocessing.pool.Pool`. diff --git a/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst b/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst new file mode 100644 index 000000000000000..6c8e4b16f20de86 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-03-16-13-38-27.gh-issue-116897.UDQTjp.rst @@ -0,0 +1,4 @@ +Accepting objects with false values (like ``0`` and ``[]``) except empty +strings, byte-like objects and ``None`` in :mod:`urllib.parse` functions +:func:`~urllib.parse.parse_qsl` and :func:`~urllib.parse.parse_qs` is now +deprecated. diff --git a/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst b/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst new file mode 100644 index 000000000000000..6901e7475dd0827 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-02-11-48-19.gh-issue-119826.N1obGa.rst @@ -0,0 +1 @@ +Always return an absolute path for :func:`os.path.abspath` on Windows. diff --git a/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst b/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst new file mode 100644 index 000000000000000..522572e160ba7bc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-06-05-19-09-36.gh-issue-118289.moL9_d.rst @@ -0,0 +1,2 @@ +:func:`!posixpath.realpath` now raises :exc:`NotADirectoryError` when *strict* +mode is enabled and a non-directory path with a trailing slash is supplied. diff --git a/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst b/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst new file mode 100644 index 000000000000000..d37d585d51b4908 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-08-22-12-12-35.gh-issue-89083.b6zFh0.rst @@ -0,0 +1,2 @@ +Add :func:`uuid.uuid8` for generating UUIDv8 objects as specified in +:rfc:`9562`. Patch by Bénédikt Tran diff --git a/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst b/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst new file mode 100644 index 000000000000000..9c2553f0f0e8cd6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-23-20-44-30.gh-issue-117941.Y9jdlW.rst @@ -0,0 +1,2 @@ +:class:`!argparse.BooleanOptionalAction` now rejects option names starting +with ``--no-``. diff --git a/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst b/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst new file mode 100644 index 000000000000000..cbe2fc166ba6af5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-24-13-40-20.gh-issue-126916.MAgz6D.rst @@ -0,0 +1,2 @@ +Allow the *initial* parameter of :func:`functools.reduce` to be passed as a keyword argument. +Patch by Sayandip Dutta. diff --git a/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst b/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst new file mode 100644 index 000000000000000..ad7ecfb6af9ec8b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-03-23-25-07.gh-issue-126374.Xu_THP.rst @@ -0,0 +1 @@ +Add support for options with optional arguments in the :mod:`getopt` module. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst b/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst new file mode 100644 index 000000000000000..3b32bb512f65566 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-04-13-16-18.gh-issue-126390.Cxvqa5.rst @@ -0,0 +1,2 @@ +Add support for returning intermixed options and non-option arguments in +order in :func:`getopt.gnu_getopt`. diff --git a/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst b/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst new file mode 100644 index 000000000000000..f1a2fcbaff25640 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-04-22-53-09.gh-issue-89416.YVQaas.rst @@ -0,0 +1,2 @@ +Add :rfc:`9559` MIME types for Matroska audiovisual container formats. Patch +by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst b/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst new file mode 100644 index 000000000000000..de7ce88c8d0f281 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-05-09-54-49.gh-issue-126175.spnjJr.rst @@ -0,0 +1,2 @@ +Add ``msg``, ``doc``, ``pos``, ``lineno`` and ``colno`` attributes to :exc:`tomllib.TOMLDecodeError`. +Deprecate instantiating with free-form arguments. diff --git a/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst b/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst index cdbe21f9f9a663b..d7d4477ec17814d 100644 --- a/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst +++ b/Misc/NEWS.d/next/Library/2024-11-07-01-40-11.gh-issue-117378.o9O5uM.rst @@ -11,7 +11,7 @@ It could also have a side effect of ``""`` remaining in :data:`sys.path` during forkserver preload imports instead of the absolute path from :func:`os.getcwd` at multiprocessing import time used in the worker ``sys.path``. -Potentially leading to incorrect imports from the wrong location during -preload. We are unaware of that actually happening. The issue was discovered -by someone observing unexpected preload performance gains. +The ``sys.path`` differences between phases in the child process could +potentially have caused preload to import incorrect things from the wrong +location. We are unaware of that actually having happened in practice. diff --git a/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst b/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst new file mode 100644 index 000000000000000..0a0f893a2688a0c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-07-22-41-47.gh-issue-126505.iztYE1.rst @@ -0,0 +1,4 @@ +Fix bugs in compiling case-insensitive :mod:`regular expressions ` with +character classes containing non-BMP characters: upper-case non-BMP +character did was ignored and the ASCII flag was ignored when +matching a character range whose upper bound is beyond the BMP region. diff --git a/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst b/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst new file mode 100644 index 000000000000000..b475257ceb6610d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-08-17-05-10.gh-issue-120423.7rdLVV.rst @@ -0,0 +1,2 @@ +Fix issue where :func:`urllib.request.pathname2url` mishandled Windows paths +with embedded forward slashes. diff --git a/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst b/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst new file mode 100644 index 000000000000000..84a5dc0b23922f5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-09-10-31-10.gh-issue-126595.A-7MyC.rst @@ -0,0 +1,2 @@ +Fix a crash when instantiating :class:`itertools.count` with an initial +count of :data:`sys.maxsize` on debug builds. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst b/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst new file mode 100644 index 000000000000000..c83a10769820cfe --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-10-18-14-51.gh-issue-104745.zAa5Ke.rst @@ -0,0 +1,3 @@ +Limit starting a patcher (from :func:`unittest.mock.patch` or +:func:`unittest.mock.patch.object`) more than +once without stopping it diff --git a/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst b/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst new file mode 100644 index 000000000000000..750158e6d4d3ae2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-11-13-00-21.gh-issue-126654.4gfP2y.rst @@ -0,0 +1,2 @@ +Fix crash when non-dict was passed to several functions in ``_interpreters`` +module. diff --git a/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst b/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst new file mode 100644 index 000000000000000..9741294487d7160 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-11-13-24-22.gh-issue-126699.ONGbMd.rst @@ -0,0 +1 @@ +Allow :class:`collections.abc.AsyncIterator` to be a base for Protocols. diff --git a/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst b/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst new file mode 100644 index 000000000000000..f49c9c765d778f9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-11-14-52-21.gh-issue-126705.0W7jFW.rst @@ -0,0 +1 @@ +Allow :class:`os.PathLike` to be a base for Protocols. diff --git a/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst b/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst new file mode 100644 index 000000000000000..bb13662e6ae62ce --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-13-20-03-18.gh-issue-126188.RJLKk-.rst @@ -0,0 +1 @@ +Update bundled pip to 24.3.1 diff --git a/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst b/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst new file mode 100644 index 000000000000000..f09a158af2a475f --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-11-13-11-09-12.gh-issue-126623.TO7NnR.rst @@ -0,0 +1 @@ +Upgrade libexpat to 2.6.4 diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index cc73e93009b43fe..583ad84e18fd4a3 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -48,11 +48,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "6aaee1b194bea30f0a60d1cce71eada8b14d3526" + "checksumValue": "373cc00d87782a736970644d863ff2ebbd0e4886" }, { "algorithm": "SHA256", - "checksumValue": "7bd4e53a8015534b5bbb58afe1a131b3989d3d4fca29bca685c44d34bcaa2555" + "checksumValue": "0f750bc336e510d14ac9a3e63fc2399f60f3f04f0061c426e86751ed5fba90e4" } ], "fileName": "Modules/expat/expat.h" @@ -62,11 +62,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "b70ce53fdc25ae482681ae2f6623c3c8edc9c1b7" + "checksumValue": "9e615c6e5c3ba00670f674a6b071bb855b0b563d" }, { "algorithm": "SHA256", - "checksumValue": "86afb425ec9999eb4f1ec9ab2fb41c58c4aa5cb9bf934b8c94264670fc5a961d" + "checksumValue": "3d90a4b65c40a3f848c36100f4d73b933a015c7b7cd85c28e4331a6b845c1ad0" } ], "fileName": "Modules/expat/expat_external.h" @@ -128,18 +128,18 @@ "fileName": "Modules/expat/nametab.h" }, { - "SPDXID": "SPDXRef-FILE-Modules-expat-pyexpatns.h", + "SPDXID": "SPDXRef-FILE-Modules-expat-refresh.sh", "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f50c899172acd93fc539007bfb43315b83d407e4" + "checksumValue": "a9b0a33b8359cfe94b23972a1605daf8dcc605d9" }, { "algorithm": "SHA256", - "checksumValue": "d571b8258cfaa067a20adef553e5fcedd6671ca4a8841483496de031bd904567" + "checksumValue": "19eb541460bc2ca8b87118acd3c048f6af77affbf8719ac29aa7b6c8d70f83fd" } ], - "fileName": "Modules/expat/pyexpatns.h" + "fileName": "Modules/expat/refresh.sh" }, { "SPDXID": "SPDXRef-FILE-Modules-expat-siphash.h", @@ -188,11 +188,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "b2ec0ad170ccc21e63fbcfc8d7404cdd756eedd3" + "checksumValue": "3199fbd38b6fb158f73d5c8de6b6e6e3812ef803" }, { "algorithm": "SHA256", - "checksumValue": "92159d4e17393e56ee85f47d9fb31348695a58589899aa01e7536cdc88f60b85" + "checksumValue": "c1518244dd5ea397e345d00e12cc45d42f43453ed208218559c981c97a0583e2" } ], "fileName": "Modules/expat/xmlparse.c" @@ -1749,7 +1749,7 @@ "spdxElementId": "SPDXRef-PACKAGE-expat" }, { - "relatedSpdxElement": "SPDXRef-FILE-Modules-expat-pyexpatns.h", + "relatedSpdxElement": "SPDXRef-FILE-Modules-expat-refresh.sh", "relationshipType": "CONTAINS", "spdxElementId": "SPDXRef-PACKAGE-expat" }, diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 617a3dca35d9c25..f883125a2c70b26 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -1017,8 +1017,10 @@ _asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, if (len == 1) { PyObject *cb_tup = PyList_GET_ITEM(self->fut_callbacks, 0); + Py_INCREF(cb_tup); int cmp = PyObject_RichCompareBool( PyTuple_GET_ITEM(cb_tup, 0), fn, Py_EQ); + Py_DECREF(cb_tup); if (cmp == -1) { return NULL; } diff --git a/Modules/_decimal/docstrings.h b/Modules/_decimal/docstrings.h index b34bff83d3f4e95..5abd7b9d807e195 100644 --- a/Modules/_decimal/docstrings.h +++ b/Modules/_decimal/docstrings.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2001-2012 Python Software Foundation. All Rights Reserved. + * Copyright (c) 2001 Python Software Foundation. All Rights Reserved. * Modified and extended by Stefan Krah. */ diff --git a/Modules/_decimal/tests/bench.py b/Modules/_decimal/tests/bench.py index 640290f2ec7962c..6605e9a92e2dde4 100644 --- a/Modules/_decimal/tests/bench.py +++ b/Modules/_decimal/tests/bench.py @@ -1,5 +1,5 @@ # -# Copyright (C) 2001-2012 Python Software Foundation. All Rights Reserved. +# Copyright (C) 2001 Python Software Foundation. All Rights Reserved. # Modified and extended by Stefan Krah. # diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index d2afe1a1bea018b..24b38063dde9e54 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -18,7 +18,7 @@ class _functools._lru_cache_wrapper "PyObject *" "&lru_cache_type_spec" /* _functools module written and maintained by Hye-Shik Chang with adaptations by Raymond Hettinger - Copyright (c) 2004, 2005, 2006 Python Software Foundation. + Copyright (c) 2004 Python Software Foundation. All rights reserved. */ @@ -937,8 +937,8 @@ _functools.reduce function as func: object iterable as seq: object - initial as result: object = NULL / + initial as result: object = NULL Apply a function of two arguments cumulatively to the items of an iterable, from left to right. @@ -953,7 +953,7 @@ calculates ((((1 + 2) + 3) + 4) + 5). static PyObject * _functools_reduce_impl(PyObject *module, PyObject *func, PyObject *seq, PyObject *result) -/*[clinic end generated code: output=30d898fe1267c79d input=d233c2670cba7f66]*/ +/*[clinic end generated code: output=30d898fe1267c79d input=1511e9a8c38581ac]*/ { PyObject *args, *it; diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c index b8d7dfb87cce0e1..75d69ade1d3c9b3 100644 --- a/Modules/_interpchannelsmodule.c +++ b/Modules/_interpchannelsmodule.c @@ -63,7 +63,7 @@ _globals (static struct globals): data (void *) obj (PyObject *) interpid (int64_t) - new_object (xid_newobjectfunc) + new_object (xid_newobjfunc) free (xid_freefunc) last (struct _channelitem *): ... @@ -1758,6 +1758,11 @@ channel_send(_channels *channels, int64_t cid, PyObject *obj, } int64_t interpid = PyInterpreterState_GetID(interp); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + // Look up the channel. PyThread_type_lock mutex = NULL; _channel_state *chan = NULL; @@ -1779,7 +1784,7 @@ channel_send(_channels *channels, int64_t cid, PyObject *obj, PyThread_release_lock(mutex); return -1; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { PyThread_release_lock(mutex); GLOBAL_FREE(data); return -1; diff --git a/Modules/_interpqueuesmodule.c b/Modules/_interpqueuesmodule.c index 8d0e223db7ff194..808938a9e8cd16a 100644 --- a/Modules/_interpqueuesmodule.c +++ b/Modules/_interpqueuesmodule.c @@ -1127,6 +1127,12 @@ queue_destroy(_queues *queues, int64_t qid) static int queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt, int unboundop) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + // Look up the queue. _queue *queue = NULL; int err = _queues_lookup(queues, qid, &queue); @@ -1141,13 +1147,12 @@ queue_put(_queues *queues, int64_t qid, PyObject *obj, int fmt, int unboundop) _queue_unmark_waiter(queue, queues->mutex); return -1; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { _queue_unmark_waiter(queue, queues->mutex); GLOBAL_FREE(data); return -1; } - assert(_PyXIData_INTERPID(data) == \ - PyInterpreterState_GetID(PyInterpreterState_Get())); + assert(_PyXIData_INTERPID(data) == PyInterpreterState_GetID(interp)); // Add the data to the queue. int64_t interpid = -1; // _queueitem_init() will set it. diff --git a/Modules/_interpreters_common.h b/Modules/_interpreters_common.h index b0e31a33734dabc..a6c639feea5d14a 100644 --- a/Modules/_interpreters_common.h +++ b/Modules/_interpreters_common.h @@ -8,15 +8,24 @@ static int ensure_xid_class(PyTypeObject *cls, xidatafunc getdata) { - //assert(cls->tp_flags & Py_TPFLAGS_HEAPTYPE); - return _PyXIData_RegisterClass(cls, getdata); + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + return _PyXIData_RegisterClass(&ctx, cls, getdata); } #ifdef REGISTERS_HEAP_TYPES static int clear_xid_class(PyTypeObject *cls) { - return _PyXIData_UnregisterClass(cls); + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + return _PyXIData_UnregisterClass(&ctx, cls); } #endif diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c index 95acdd69e53260f..a36823c4bb982b4 100644 --- a/Modules/_interpretersmodule.c +++ b/Modules/_interpretersmodule.c @@ -936,6 +936,11 @@ static int _interp_exec(PyObject *self, PyInterpreterState *interp, PyObject *code_arg, PyObject *shared_arg, PyObject **p_excinfo) { + if (shared_arg != NULL && !PyDict_CheckExact(shared_arg)) { + PyErr_SetString(PyExc_TypeError, "expected 'shared' to be a dict"); + return -1; + } + // Extract code. Py_ssize_t codestrlen = -1; PyObject *bytes_obj = NULL; @@ -1181,7 +1186,13 @@ object_is_shareable(PyObject *self, PyObject *args, PyObject *kwds) return NULL; } - if (_PyObject_CheckXIData(obj) == 0) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return NULL; + } + + if (_PyObject_CheckXIData(&ctx, obj) == 0) { Py_RETURN_TRUE; } PyErr_Clear(); @@ -1480,6 +1491,11 @@ module_exec(PyObject *mod) PyInterpreterState *interp = PyInterpreterState_Get(); module_state *state = get_module_state(mod); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + #define ADD_WHENCE(NAME) \ if (PyModule_AddIntConstant(mod, "WHENCE_" #NAME, \ _PyInterpreterState_WHENCE_##NAME) < 0) \ @@ -1501,9 +1517,7 @@ module_exec(PyObject *mod) if (PyModule_AddType(mod, (PyTypeObject *)PyExc_InterpreterNotFoundError) < 0) { goto error; } - PyObject *PyExc_NotShareableError = \ - _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError; - if (PyModule_AddType(mod, (PyTypeObject *)PyExc_NotShareableError) < 0) { + if (PyModule_AddType(mod, (PyTypeObject *)ctx.PyExc_NotShareableError) < 0) { goto error; } diff --git a/Modules/_json.c b/Modules/_json.c index ce0093ab431d05b..a99abbe72bf7a08 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -86,11 +86,11 @@ encoder_dealloc(PyObject *self); static int encoder_clear(PyEncoderObject *self); static int -encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *seq, PyObject *newline_indent); +encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *seq, Py_ssize_t indent_level, PyObject *indent_cache); static int -encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *obj, PyObject *newline_indent); +encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *obj, Py_ssize_t indent_level, PyObject *indent_cache); static int -encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *dct, PyObject *newline_indent); +encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *dct, Py_ssize_t indent_level, PyObject *indent_cache); static PyObject * _encoded_const(PyObject *obj); static void @@ -1252,17 +1252,92 @@ encoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return (PyObject *)s; } + +/* indent_cache is a list that contains intermixed values at even and odd + * positions: + * + * 2*k : '\n' + indent * (k + initial_indent_level) + * strings written after opening and before closing brackets + * 2*k-1 : item_separator + '\n' + indent * (k + initial_indent_level) + * strings written between items + * + * Its size is always an odd number. + */ static PyObject * -_create_newline_indent(PyObject *indent, Py_ssize_t indent_level) +create_indent_cache(PyEncoderObject *s, Py_ssize_t indent_level) { PyObject *newline_indent = PyUnicode_FromOrdinal('\n'); if (newline_indent != NULL && indent_level) { PyUnicode_AppendAndDel(&newline_indent, - PySequence_Repeat(indent, indent_level)); + PySequence_Repeat(s->indent, indent_level)); + } + if (newline_indent == NULL) { + return NULL; + } + PyObject *indent_cache = PyList_New(1); + if (indent_cache == NULL) { + Py_DECREF(newline_indent); + return NULL; } - return newline_indent; + PyList_SET_ITEM(indent_cache, 0, newline_indent); + return indent_cache; +} + +/* Extend indent_cache by adding values for the next level. + * It should have values for the indent_level-1 level before the call. + */ +static int +update_indent_cache(PyEncoderObject *s, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + assert(indent_level * 2 == PyList_GET_SIZE(indent_cache) + 1); + assert(indent_level > 0); + PyObject *newline_indent = PyList_GET_ITEM(indent_cache, (indent_level - 1)*2); + newline_indent = PyUnicode_Concat(newline_indent, s->indent); + if (newline_indent == NULL) { + return -1; + } + PyObject *separator_indent = PyUnicode_Concat(s->item_separator, newline_indent); + if (separator_indent == NULL) { + Py_DECREF(newline_indent); + return -1; + } + + if (PyList_Append(indent_cache, separator_indent) < 0 || + PyList_Append(indent_cache, newline_indent) < 0) + { + Py_DECREF(separator_indent); + Py_DECREF(newline_indent); + return -1; + } + Py_DECREF(separator_indent); + Py_DECREF(newline_indent); + return 0; } +static PyObject * +get_item_separator(PyEncoderObject *s, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + assert(indent_level > 0); + if (indent_level * 2 > PyList_GET_SIZE(indent_cache)) { + if (update_indent_cache(s, indent_level, indent_cache) < 0) { + return NULL; + } + } + assert(indent_level * 2 < PyList_GET_SIZE(indent_cache)); + return PyList_GET_ITEM(indent_cache, indent_level * 2 - 1); +} + +static int +write_newline_indent(PyUnicodeWriter *writer, + Py_ssize_t indent_level, PyObject *indent_cache) +{ + PyObject *newline_indent = PyList_GET_ITEM(indent_cache, indent_level * 2); + return PyUnicodeWriter_WriteStr(writer, newline_indent); +} + + static PyObject * encoder_call(PyEncoderObject *self, PyObject *args, PyObject *kwds) { @@ -1280,20 +1355,20 @@ encoder_call(PyEncoderObject *self, PyObject *args, PyObject *kwds) return NULL; } - PyObject *newline_indent = NULL; + PyObject *indent_cache = NULL; if (self->indent != Py_None) { - newline_indent = _create_newline_indent(self->indent, indent_level); - if (newline_indent == NULL) { + indent_cache = create_indent_cache(self, indent_level); + if (indent_cache == NULL) { PyUnicodeWriter_Discard(writer); return NULL; } } - if (encoder_listencode_obj(self, writer, obj, newline_indent)) { + if (encoder_listencode_obj(self, writer, obj, indent_level, indent_cache)) { PyUnicodeWriter_Discard(writer); - Py_XDECREF(newline_indent); + Py_XDECREF(indent_cache); return NULL; } - Py_XDECREF(newline_indent); + Py_XDECREF(indent_cache); PyObject *str = PyUnicodeWriter_Finish(writer); if (str == NULL) { @@ -1381,7 +1456,8 @@ _steal_accumulate(PyUnicodeWriter *writer, PyObject *stolen) static int encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *obj, PyObject *newline_indent) + PyObject *obj, + Py_ssize_t indent_level, PyObject *indent_cache) { /* Encode Python object obj to a JSON term */ PyObject *newobj; @@ -1421,14 +1497,14 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, else if (PyList_Check(obj) || PyTuple_Check(obj)) { if (_Py_EnterRecursiveCall(" while encoding a JSON object")) return -1; - rv = encoder_listencode_list(s, writer, obj, newline_indent); + rv = encoder_listencode_list(s, writer, obj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); return rv; } else if (PyDict_Check(obj)) { if (_Py_EnterRecursiveCall(" while encoding a JSON object")) return -1; - rv = encoder_listencode_dict(s, writer, obj, newline_indent); + rv = encoder_listencode_dict(s, writer, obj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); return rv; } @@ -1462,7 +1538,7 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, Py_XDECREF(ident); return -1; } - rv = encoder_listencode_obj(s, writer, newobj, newline_indent); + rv = encoder_listencode_obj(s, writer, newobj, indent_level, indent_cache); _Py_LeaveRecursiveCall(); Py_DECREF(newobj); @@ -1485,7 +1561,7 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer, static int encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *first, PyObject *dct, PyObject *key, PyObject *value, - PyObject *newline_indent, + Py_ssize_t indent_level, PyObject *indent_cache, PyObject *item_separator) { PyObject *keystr = NULL; @@ -1541,7 +1617,7 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs if (PyUnicodeWriter_WriteStr(writer, s->key_separator) < 0) { return -1; } - if (encoder_listencode_obj(s, writer, value, newline_indent) < 0) { + if (encoder_listencode_obj(s, writer, value, indent_level, indent_cache) < 0) { _PyErr_FormatNote("when serializing %T item %R", dct, key); return -1; } @@ -1550,15 +1626,14 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs static int encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *dct, PyObject *newline_indent) + PyObject *dct, + Py_ssize_t indent_level, PyObject *indent_cache) { /* Encode Python dict dct a JSON term */ PyObject *ident = NULL; PyObject *items = NULL; PyObject *key, *value; bool first = true; - PyObject *new_newline_indent = NULL; - PyObject *separator_indent = NULL; if (PyDict_GET_SIZE(dct) == 0) { /* Fast path */ @@ -1585,19 +1660,13 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, goto bail; } - PyObject *current_item_separator = s->item_separator; // borrowed reference + PyObject *separator = s->item_separator; // borrowed reference if (s->indent != Py_None) { - new_newline_indent = PyUnicode_Concat(newline_indent, s->indent); - if (new_newline_indent == NULL) { - goto bail; - } - separator_indent = PyUnicode_Concat(current_item_separator, new_newline_indent); - if (separator_indent == NULL) { - goto bail; - } - // update item separator with a borrowed reference - current_item_separator = separator_indent; - if (PyUnicodeWriter_WriteStr(writer, new_newline_indent) < 0) { + indent_level++; + separator = get_item_separator(s, indent_level, indent_cache); + if (separator == NULL || + write_newline_indent(writer, indent_level, indent_cache) < 0) + { goto bail; } } @@ -1618,8 +1687,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, key = PyTuple_GET_ITEM(item, 0); value = PyTuple_GET_ITEM(item, 1); if (encoder_encode_key_value(s, writer, &first, dct, key, value, - new_newline_indent, - current_item_separator) < 0) + indent_level, indent_cache, + separator) < 0) goto bail; } Py_CLEAR(items); @@ -1628,8 +1697,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, Py_ssize_t pos = 0; while (PyDict_Next(dct, &pos, &key, &value)) { if (encoder_encode_key_value(s, writer, &first, dct, key, value, - new_newline_indent, - current_item_separator) < 0) + indent_level, indent_cache, + separator) < 0) goto bail; } } @@ -1640,10 +1709,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, Py_CLEAR(ident); } if (s->indent != Py_None) { - Py_CLEAR(new_newline_indent); - Py_CLEAR(separator_indent); - - if (PyUnicodeWriter_WriteStr(writer, newline_indent) < 0) { + indent_level--; + if (write_newline_indent(writer, indent_level, indent_cache) < 0) { goto bail; } } @@ -1656,20 +1723,17 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer, bail: Py_XDECREF(items); Py_XDECREF(ident); - Py_XDECREF(separator_indent); - Py_XDECREF(new_newline_indent); return -1; } static int encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, - PyObject *seq, PyObject *newline_indent) + PyObject *seq, + Py_ssize_t indent_level, PyObject *indent_cache) { PyObject *ident = NULL; PyObject *s_fast = NULL; Py_ssize_t i; - PyObject *new_newline_indent = NULL; - PyObject *separator_indent = NULL; ident = NULL; s_fast = PySequence_Fast(seq, "_iterencode_list needs a sequence"); @@ -1702,20 +1766,13 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, PyObject *separator = s->item_separator; // borrowed reference if (s->indent != Py_None) { - new_newline_indent = PyUnicode_Concat(newline_indent, s->indent); - if (new_newline_indent == NULL) { - goto bail; - } - - if (PyUnicodeWriter_WriteStr(writer, new_newline_indent) < 0) { - goto bail; - } - - separator_indent = PyUnicode_Concat(separator, new_newline_indent); - if (separator_indent == NULL) { + indent_level++; + separator = get_item_separator(s, indent_level, indent_cache); + if (separator == NULL || + write_newline_indent(writer, indent_level, indent_cache) < 0) + { goto bail; } - separator = separator_indent; // assign separator with borrowed reference } for (i = 0; i < PySequence_Fast_GET_SIZE(s_fast); i++) { PyObject *obj = PySequence_Fast_GET_ITEM(s_fast, i); @@ -1723,7 +1780,7 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, if (PyUnicodeWriter_WriteStr(writer, separator) < 0) goto bail; } - if (encoder_listencode_obj(s, writer, obj, new_newline_indent)) { + if (encoder_listencode_obj(s, writer, obj, indent_level, indent_cache)) { _PyErr_FormatNote("when serializing %T item %zd", seq, i); goto bail; } @@ -1735,9 +1792,8 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, } if (s->indent != Py_None) { - Py_CLEAR(new_newline_indent); - Py_CLEAR(separator_indent); - if (PyUnicodeWriter_WriteStr(writer, newline_indent) < 0) { + indent_level--; + if (write_newline_indent(writer, indent_level, indent_cache) < 0) { goto bail; } } @@ -1751,8 +1807,6 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer, bail: Py_XDECREF(ident); Py_DECREF(s_fast); - Py_XDECREF(separator_indent); - Py_XDECREF(new_newline_indent); return -1; } diff --git a/Modules/_testcapi/long.c b/Modules/_testcapi/long.c index 2b5e85d57075226..ebea09080ef11c7 100644 --- a/Modules/_testcapi/long.c +++ b/Modules/_testcapi/long.c @@ -105,6 +105,30 @@ pylong_getsign(PyObject *module, PyObject *arg) } +static PyObject * +pylong_ispositive(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsPositive(arg)); +} + + +static PyObject * +pylong_isnegative(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsNegative(arg)); +} + + +static PyObject * +pylong_iszero(PyObject *module, PyObject *arg) +{ + NULLABLE(arg); + RETURN_INT(PyLong_IsZero(arg)); +} + + static PyObject * pylong_aspid(PyObject *module, PyObject *arg) { @@ -124,6 +148,9 @@ static PyMethodDef test_methods[] = { {"pylong_fromnativebytes", pylong_fromnativebytes, METH_VARARGS}, {"pylong_getsign", pylong_getsign, METH_O}, {"pylong_aspid", pylong_aspid, METH_O}, + {"pylong_ispositive", pylong_ispositive, METH_O}, + {"pylong_isnegative", pylong_isnegative, METH_O}, + {"pylong_iszero", pylong_iszero, METH_O}, {NULL}, }; diff --git a/Modules/_testcapi/object.c b/Modules/_testcapi/object.c index 1c76e766a790f07..3af5429ef00985e 100644 --- a/Modules/_testcapi/object.c +++ b/Modules/_testcapi/object.c @@ -124,13 +124,20 @@ pyobject_clear_weakrefs_no_callbacks(PyObject *self, PyObject *obj) Py_RETURN_NONE; } +static PyObject * +pyobject_enable_deferred_refcount(PyObject *self, PyObject *obj) +{ + int result = PyUnstable_Object_EnableDeferredRefcount(obj); + return PyLong_FromLong(result); +} + static PyMethodDef test_methods[] = { {"call_pyobject_print", call_pyobject_print, METH_VARARGS}, {"pyobject_print_null", pyobject_print_null, METH_VARARGS}, {"pyobject_print_noref_object", pyobject_print_noref_object, METH_VARARGS}, {"pyobject_print_os_error", pyobject_print_os_error, METH_VARARGS}, {"pyobject_clear_weakrefs_no_callbacks", pyobject_clear_weakrefs_no_callbacks, METH_O}, - + {"pyobject_enable_deferred_refcount", pyobject_enable_deferred_refcount, METH_O}, {NULL}, }; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 327a077671047c6..b02f794d27d5bd2 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -1797,6 +1797,12 @@ _xid_capsule_destructor(PyObject *capsule) static PyObject * get_crossinterp_data(PyObject *self, PyObject *args) { + PyInterpreterState *interp = PyInterpreterState_Get(); + _PyXIData_lookup_context_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return NULL; + } + PyObject *obj = NULL; if (!PyArg_ParseTuple(args, "O:get_crossinterp_data", &obj)) { return NULL; @@ -1806,7 +1812,7 @@ get_crossinterp_data(PyObject *self, PyObject *args) if (data == NULL) { return NULL; } - if (_PyObject_GetXIData(obj, data) != 0) { + if (_PyObject_GetXIData(&ctx, obj, data) != 0) { _PyXIData_Free(data); return NULL; } @@ -2063,6 +2069,14 @@ identify_type_slot_wrappers(PyObject *self, PyObject *Py_UNUSED(ignored)) return _PyType_GetSlotWrapperNames(); } + +static PyObject * +has_deferred_refcount(PyObject *self, PyObject *op) +{ + return PyBool_FromLong(_PyObject_HasDeferredRefcount(op)); +} + + static PyMethodDef module_functions[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, @@ -2159,6 +2173,7 @@ static PyMethodDef module_functions[] = { GH_119213_GETARGS_METHODDEF {"get_static_builtin_types", get_static_builtin_types, METH_NOARGS}, {"identify_type_slot_wrappers", identify_type_slot_wrappers, METH_NOARGS}, + {"has_deferred_refcount", has_deferred_refcount, METH_O}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/clinic/_functoolsmodule.c.h b/Modules/clinic/_functoolsmodule.c.h index 760877928db60d8..afd5eb4eb12b782 100644 --- a/Modules/clinic/_functoolsmodule.c.h +++ b/Modules/clinic/_functoolsmodule.c.h @@ -69,7 +69,7 @@ _functools_cmp_to_key(PyObject *module, PyObject *const *args, Py_ssize_t nargs, } PyDoc_STRVAR(_functools_reduce__doc__, -"reduce($module, function, iterable, initial=, /)\n" +"reduce($module, function, iterable, /, initial=)\n" "--\n" "\n" "Apply a function of two arguments cumulatively to the items of an iterable, from left to right.\n" @@ -82,30 +82,59 @@ PyDoc_STRVAR(_functools_reduce__doc__, "calculates ((((1 + 2) + 3) + 4) + 5)."); #define _FUNCTOOLS_REDUCE_METHODDEF \ - {"reduce", _PyCFunction_CAST(_functools_reduce), METH_FASTCALL, _functools_reduce__doc__}, + {"reduce", _PyCFunction_CAST(_functools_reduce), METH_FASTCALL|METH_KEYWORDS, _functools_reduce__doc__}, static PyObject * _functools_reduce_impl(PyObject *module, PyObject *func, PyObject *seq, PyObject *result); static PyObject * -_functools_reduce(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +_functools_reduce(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) + + #define NUM_KEYWORDS 1 + static struct { + PyGC_Head _this_is_not_used; + PyObject_VAR_HEAD + PyObject *ob_item[NUM_KEYWORDS]; + } _kwtuple = { + .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS) + .ob_item = { &_Py_ID(initial), }, + }; + #undef NUM_KEYWORDS + #define KWTUPLE (&_kwtuple.ob_base.ob_base) + + #else // !Py_BUILD_CORE + # define KWTUPLE NULL + #endif // !Py_BUILD_CORE + + static const char * const _keywords[] = {"", "", "initial", NULL}; + static _PyArg_Parser _parser = { + .keywords = _keywords, + .fname = "reduce", + .kwtuple = KWTUPLE, + }; + #undef KWTUPLE + PyObject *argsbuf[3]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 2; PyObject *func; PyObject *seq; PyObject *result = NULL; - if (!_PyArg_CheckPositional("reduce", nargs, 2, 3)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, + /*minpos*/ 2, /*maxpos*/ 3, /*minkw*/ 0, /*varpos*/ 0, argsbuf); + if (!args) { goto exit; } func = args[0]; seq = args[1]; - if (nargs < 3) { - goto skip_optional; + if (!noptargs) { + goto skip_optional_pos; } result = args[2]; -skip_optional: +skip_optional_pos: return_value = _functools_reduce_impl(module, func, seq, result); exit: @@ -159,4 +188,4 @@ _functools__lru_cache_wrapper_cache_clear(PyObject *self, PyObject *Py_UNUSED(ig return return_value; } -/*[clinic end generated code: output=0c3df7e5131200b7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e6edcc01f0720daf input=a9049054013a1b77]*/ diff --git a/Modules/expat/expat.h b/Modules/expat/expat.h index d0d6015a66283f4..523b37d8d5787d8 100644 --- a/Modules/expat/expat.h +++ b/Modules/expat/expat.h @@ -130,7 +130,9 @@ enum XML_Error { /* Added in 2.3.0. */ XML_ERROR_NO_BUFFER, /* Added in 2.4.0. */ - XML_ERROR_AMPLIFICATION_LIMIT_BREACH + XML_ERROR_AMPLIFICATION_LIMIT_BREACH, + /* Added in 2.6.4. */ + XML_ERROR_NOT_STARTED, }; enum XML_Content_Type { @@ -1066,7 +1068,7 @@ XML_SetReparseDeferralEnabled(XML_Parser parser, XML_Bool enabled); */ #define XML_MAJOR_VERSION 2 #define XML_MINOR_VERSION 6 -#define XML_MICRO_VERSION 3 +#define XML_MICRO_VERSION 4 #ifdef __cplusplus } diff --git a/Modules/expat/expat_external.h b/Modules/expat/expat_external.h index 12c560e14716ff0..567872b09836e1c 100644 --- a/Modules/expat/expat_external.h +++ b/Modules/expat/expat_external.h @@ -40,6 +40,10 @@ #ifndef Expat_External_INCLUDED #define Expat_External_INCLUDED 1 +/* Namespace external symbols to allow multiple libexpat version to + co-exist. */ +#include "pyexpatns.h" + /* External API definitions */ /* Expat tries very hard to make the API boundary very specifically @@ -64,11 +68,6 @@ compiled with the cdecl calling convention as the default since system headers may assume the cdecl convention. */ - -/* Namespace external symbols to allow multiple libexpat version to - co-exist. */ -#include "pyexpatns.h" - #ifndef XMLCALL # if defined(_MSC_VER) # define XMLCALL __cdecl diff --git a/Modules/expat/refresh.sh b/Modules/expat/refresh.sh new file mode 100755 index 000000000000000..82a9dbc23ad26b7 --- /dev/null +++ b/Modules/expat/refresh.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# +# Use this script to update libexpat + +set -e +set -o pipefail + +if [[ "${BASH_VERSINFO[0]}" -lt 4 ]]; then + echo "A bash version >= 4 required. Got: $BASH_VERSION" >&2 + exit 1 +fi + +# Update this when updating to a new version after verifying that the changes +# the update brings in are good. These values are used for verifying the SBOM, too. +expected_libexpat_tag="R_2_6_4" +expected_libexpat_version="2.6.4" +expected_libexpat_sha256="fd03b7172b3bd7427a3e7a812063f74754f24542429b634e0db6511b53fb2278" + +expat_dir="$(realpath "$(dirname -- "${BASH_SOURCE[0]}")")" +cd ${expat_dir} + +# Step 1: download and copy files +curl --location "https://github.com/libexpat/libexpat/releases/download/${expected_libexpat_tag}/expat-${expected_libexpat_version}.tar.gz" > libexpat.tar.gz +echo "${expected_libexpat_sha256} libexpat.tar.gz" | sha256sum --check + +# Step 2: Pull files from the libexpat distribution +declare -a lib_files +lib_files=( + ascii.h + asciitab.h + expat.h + expat_external.h + iasciitab.h + internal.h + latin1tab.h + nametab.h + siphash.h + utf8tab.h + winconfig.h + xmlparse.c + xmlrole.c + xmlrole.h + xmltok.c + xmltok.h + xmltok_impl.c + xmltok_impl.h + xmltok_ns.c +) +for f in "${lib_files[@]}"; do + tar xzvf libexpat.tar.gz "expat-${expected_libexpat_version}/lib/${f}" --strip-components 2 +done +rm libexpat.tar.gz + +# Step 3: Add the namespacing include to expat_external.h +sed -i 's/#define Expat_External_INCLUDED 1/&\n\n\/* Namespace external symbols to allow multiple libexpat version to\n co-exist. \*\/\n#include "pyexpatns.h"/' expat_external.h + +echo "Updated; verify all is okay using git diff and git status." diff --git a/Modules/expat/xmlparse.c b/Modules/expat/xmlparse.c index d9285b213b38bd4..a4e091e7c33c0ae 100644 --- a/Modules/expat/xmlparse.c +++ b/Modules/expat/xmlparse.c @@ -1,4 +1,4 @@ -/* ba4cdf9bdb534f355a9def4c9e25d20ee8e72f95b0a4d930be52e563f5080196 (2.6.3+) +/* c5625880f4bf417c1463deee4eb92d86ff413f802048621c57e25fe483eb59e4 (2.6.4+) __ __ _ ___\ \/ /_ __ __ _| |_ / _ \\ /| '_ \ / _` | __| @@ -40,6 +40,7 @@ Copyright (c) 2023 Owain Davies Copyright (c) 2023-2024 Sony Corporation / Snild Dolkow Copyright (c) 2024 Berkay Eren Ürün + Copyright (c) 2024 Hanno Böck Licensed under the MIT license: Permission is hereby granted, free of charge, to any person obtaining @@ -2234,6 +2235,9 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { if (parser == NULL) return XML_STATUS_ERROR; switch (parser->m_parsingStatus.parsing) { + case XML_INITIALIZED: + parser->m_errorCode = XML_ERROR_NOT_STARTED; + return XML_STATUS_ERROR; case XML_SUSPENDED: if (resumable) { parser->m_errorCode = XML_ERROR_SUSPENDED; @@ -2244,7 +2248,7 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { case XML_FINISHED: parser->m_errorCode = XML_ERROR_FINISHED; return XML_STATUS_ERROR; - default: + case XML_PARSING: if (resumable) { #ifdef XML_DTD if (parser->m_isParamEntity) { @@ -2255,6 +2259,9 @@ XML_StopParser(XML_Parser parser, XML_Bool resumable) { parser->m_parsingStatus.parsing = XML_SUSPENDED; } else parser->m_parsingStatus.parsing = XML_FINISHED; + break; + default: + assert(0); } return XML_STATUS_OK; } @@ -2519,6 +2526,9 @@ XML_ErrorString(enum XML_Error code) { case XML_ERROR_AMPLIFICATION_LIMIT_BREACH: return XML_L( "limit on input amplification factor (from DTD and entities) breached"); + /* Added in 2.6.4. */ + case XML_ERROR_NOT_STARTED: + return XML_L("parser not started"); } return NULL; } @@ -7856,7 +7866,7 @@ accountingReportDiff(XML_Parser rootParser, assert(! rootParser->m_parentParser); fprintf(stderr, - " (+" EXPAT_FMT_PTRDIFF_T("6") " bytes %s|%d, xmlparse.c:%d) %*s\"", + " (+" EXPAT_FMT_PTRDIFF_T("6") " bytes %s|%u, xmlparse.c:%d) %*s\"", bytesMore, (account == XML_ACCOUNT_DIRECT) ? "DIR" : "EXP", levelsAwayFromRootParser, source_line, 10, ""); @@ -7969,7 +7979,7 @@ entityTrackingReportStats(XML_Parser rootParser, ENTITY *entity, fprintf( stderr, - "expat: Entities(%p): Count %9d, depth %2d/%2d %*s%s%s; %s length %d (xmlparse.c:%d)\n", + "expat: Entities(%p): Count %9u, depth %2u/%2u %*s%s%s; %s length %d (xmlparse.c:%d)\n", (void *)rootParser, rootParser->m_entity_stats.countEverOpened, rootParser->m_entity_stats.currentDepth, rootParser->m_entity_stats.maximumDepthSeen, diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 1201fa094902d7e..78fbdcdf77a9236 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -3291,6 +3291,9 @@ itertools_count_impl(PyTypeObject *type, PyObject *long_cnt, PyErr_Clear(); fast_mode = 0; } + else if (cnt == PY_SSIZE_T_MAX) { + fast_mode = 0; + } } } else { cnt = 0; diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 1ce2baecb8a9649..da7399de86f2137 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -678,6 +678,7 @@ PyOS_AfterFork_Child(void) _PyEval_StartTheWorldAll(&_PyRuntime); _PyThreadState_DeleteList(list); + _PyImport_ReInitLock(tstate->interp); _PyImport_ReleaseLock(tstate->interp); _PySignal_AfterFork(); diff --git a/Objects/longobject.c b/Objects/longobject.c index b4c0f63a9843ce5..4aa35685b509f27 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -784,6 +784,39 @@ PyLong_AsUnsignedLongMask(PyObject *op) return val; } +int +PyLong_IsPositive(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsPositive((PyLongObject *)obj); +} + +int +PyLong_IsNegative(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsNegative((PyLongObject *)obj); +} + +int +PyLong_IsZero(PyObject *obj) +{ + assert(obj != NULL); + if (!PyLong_Check(obj)) { + PyErr_Format(PyExc_TypeError, "expected int, got %T", obj); + return -1; + } + return _PyLong_IsZero((PyLongObject *)obj); +} + int _PyLong_Sign(PyObject *vv) { diff --git a/Objects/memoryobject.c b/Objects/memoryobject.c index d4672e8198cb24e..25634f997ac66b5 100644 --- a/Objects/memoryobject.c +++ b/Objects/memoryobject.c @@ -3356,6 +3356,7 @@ memory_iter(PyObject *seq) PyErr_BadInternalCall(); return NULL; } + CHECK_RELEASED(seq); PyMemoryViewObject *obj = (PyMemoryViewObject *)seq; int ndims = obj->view.ndim; if (ndims == 0) { diff --git a/Objects/object.c b/Objects/object.c index d976428f485176e..80a236c3b6db796 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -2520,6 +2520,35 @@ _PyObject_SetDeferredRefcount(PyObject *op) #endif } +int +PyUnstable_Object_EnableDeferredRefcount(PyObject *op) +{ +#ifdef Py_GIL_DISABLED + if (!PyType_IS_GC(Py_TYPE(op))) { + // Deferred reference counting doesn't work + // on untracked types. + return 0; + } + + uint8_t bits = _Py_atomic_load_uint8(&op->ob_gc_bits); + if ((bits & _PyGC_BITS_DEFERRED) != 0) + { + // Nothing to do. + return 0; + } + + if (_Py_atomic_compare_exchange_uint8(&op->ob_gc_bits, &bits, bits | _PyGC_BITS_DEFERRED) == 0) + { + // Someone beat us to it! + return 0; + } + _Py_atomic_add_ssize(&op->ob_ref_shared, _Py_REF_SHARED(_Py_REF_DEFERRED, 0)); + return 1; +#else + return 0; +#endif +} + void _Py_ResurrectReference(PyObject *op) { diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 4af7f0273aae91a..a6cf3da542b6913 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -4761,10 +4761,10 @@ PyType_FromMetaclass( if (strcmp(memb->name, "__weaklistoffset__") == 0) { weaklistoffset_member = memb; } - if (strcmp(memb->name, "__dictoffset__") == 0) { + else if (strcmp(memb->name, "__dictoffset__") == 0) { dictoffset_member = memb; } - if (strcmp(memb->name, "__vectorcalloffset__") == 0) { + else if (strcmp(memb->name, "__vectorcalloffset__") == 0) { vectorcalloffset_member = memb; } } diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index 08509f96ed1db89..ee867fe41224c37 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -5,7 +5,7 @@ #include "winver.h" #define PYTHON_COMPANY "Python Software Foundation" -#define PYTHON_COPYRIGHT "Copyright \xA9 2001-2024 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." +#define PYTHON_COPYRIGHT "Copyright \xA9 2001 Python Software Foundation. Copyright \xA9 2000 BeOpen.com. Copyright \xA9 1995-2001 CNRI. Copyright \xA9 1991-1995 SMC." #define MS_WINDOWS #include "modsupport.h" diff --git a/PC/store_info.txt b/PC/store_info.txt index f6a85cb8ebec1fd..d150ba17cbe62dd 100644 --- a/PC/store_info.txt +++ b/PC/store_info.txt @@ -109,7 +109,7 @@ PSF LICENSE AGREEMENT FOR PYTHON 3.9 analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python 3.9 alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of - copyright, i.e., "Copyright © 2001-2018 Python Software Foundation; All Rights + copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights Reserved" are retained in Python 3.9 alone or in any derivative version prepared by Licensee. diff --git a/Python/_warnings.c b/Python/_warnings.c index 3f9e73b5376223b..e05ba99e8eaec4b 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -803,7 +803,8 @@ is_filename_to_skip(PyObject *filename, PyTupleObject *skip_file_prefixes) for (Py_ssize_t idx = 0; idx < prefixes; ++idx) { PyObject *prefix = PyTuple_GET_ITEM(skip_file_prefixes, idx); - Py_ssize_t found = PyUnicode_Tailmatch(filename, prefix, 0, -1, -1); + Py_ssize_t found = PyUnicode_Tailmatch(filename, prefix, + 0, PY_SSIZE_T_MAX, -1); if (found == 1) { return true; } diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 0fa8fed2a8c3a38..e8e42cc2f84692e 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -3339,7 +3339,7 @@ _PyBuiltin_Init(PyInterpreterState *interp) SETBUILTIN("False", Py_False); SETBUILTIN("True", Py_True); SETBUILTIN("bool", &PyBool_Type); - SETBUILTIN("memoryview", &PyMemoryView_Type); + SETBUILTIN("memoryview", &PyMemoryView_Type); SETBUILTIN("bytearray", &PyByteArray_Type); SETBUILTIN("bytes", &PyBytes_Type); SETBUILTIN("classmethod", &PyClassMethod_Type); diff --git a/Python/crossinterp.c b/Python/crossinterp.c index 2daba99988c12a1..fe7d75f6b72f68a 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -9,19 +9,15 @@ #include "pycore_pyerrors.h" // _PyErr_Clear() -#define _PyXI_GET_GLOBAL_STATE(interp) (&(interp)->runtime->xi) -#define _PyXI_GET_STATE(interp) (&(interp)->xi) - - /**************/ /* exceptions */ /**************/ -static int init_exceptions(PyInterpreterState *); -static void fini_exceptions(PyInterpreterState *); -static int _init_not_shareable_error_type(PyInterpreterState *); -static void _fini_not_shareable_error_type(PyInterpreterState *); -static PyObject * _get_not_shareable_error_type(PyInterpreterState *); +typedef struct xi_exceptions exceptions_t; +static int init_static_exctypes(exceptions_t *, PyInterpreterState *); +static void fini_static_exctypes(exceptions_t *, PyInterpreterState *); +static int init_heap_exctypes(exceptions_t *); +static void fini_heap_exctypes(exceptions_t *); #include "crossinterp_exceptions.h" @@ -68,7 +64,7 @@ _Py_CallInInterpreterAndRawFree(PyInterpreterState *interp, static void xid_lookup_init(_PyXIData_lookup_t *); static void xid_lookup_fini(_PyXIData_lookup_t *); -static xidatafunc lookup_getdata(PyInterpreterState *, PyObject *); +static xidatafunc lookup_getdata(_PyXIData_lookup_context_t *, PyObject *); #include "crossinterp_data_lookup.h" @@ -126,7 +122,7 @@ void _PyXIData_Init(_PyXIData_t *data, PyInterpreterState *interp, void *shared, PyObject *obj, - xid_newobjectfunc new_object) + xid_newobjfunc new_object) { assert(data != NULL); assert(new_object != NULL); @@ -150,7 +146,7 @@ int _PyXIData_InitWithSize(_PyXIData_t *data, PyInterpreterState *interp, const size_t size, PyObject *obj, - xid_newobjectfunc new_object) + xid_newobjfunc new_object) { assert(size > 0); // For now we always free the shared data in the same interpreter @@ -202,10 +198,9 @@ _check_xidata(PyThreadState *tstate, _PyXIData_t *data) } static inline void -_set_xid_lookup_failure(PyInterpreterState *interp, - PyObject *obj, const char *msg) +_set_xid_lookup_failure(dlcontext_t *ctx, PyObject *obj, const char *msg) { - PyObject *exctype = _get_not_shareable_error_type(interp); + PyObject *exctype = ctx->PyExc_NotShareableError; assert(exctype != NULL); if (msg != NULL) { assert(obj == NULL); @@ -222,13 +217,12 @@ _set_xid_lookup_failure(PyInterpreterState *interp, } int -_PyObject_CheckXIData(PyObject *obj) +_PyObject_CheckXIData(_PyXIData_lookup_context_t *ctx, PyObject *obj) { - PyInterpreterState *interp = PyInterpreterState_Get(); - xidatafunc getdata = lookup_getdata(interp, obj); + xidatafunc getdata = lookup_getdata(ctx, obj); if (getdata == NULL) { if (!PyErr_Occurred()) { - _set_xid_lookup_failure(interp, obj, NULL); + _set_xid_lookup_failure(ctx, obj, NULL); } return -1; } @@ -236,7 +230,8 @@ _PyObject_CheckXIData(PyObject *obj) } int -_PyObject_GetXIData(PyObject *obj, _PyXIData_t *data) +_PyObject_GetXIData(_PyXIData_lookup_context_t *ctx, + PyObject *obj, _PyXIData_t *data) { PyThreadState *tstate = PyThreadState_Get(); PyInterpreterState *interp = tstate->interp; @@ -247,11 +242,11 @@ _PyObject_GetXIData(PyObject *obj, _PyXIData_t *data) // Call the "getdata" func for the object. Py_INCREF(obj); - xidatafunc getdata = lookup_getdata(interp, obj); + xidatafunc getdata = lookup_getdata(ctx, obj); if (getdata == NULL) { Py_DECREF(obj); if (!PyErr_Occurred()) { - _set_xid_lookup_failure(interp, obj, NULL); + _set_xid_lookup_failure(ctx, obj, NULL); } return -1; } @@ -968,6 +963,8 @@ _PyXI_ClearExcInfo(_PyXI_excinfo *info) static int _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) { + dlcontext_t ctx; + assert(!PyErr_Occurred()); switch (code) { case _PyXI_ERR_NO_ERROR: _Py_FALLTHROUGH; @@ -998,7 +995,10 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp) "failed to apply namespace to __main__"); break; case _PyXI_ERR_NOT_SHAREABLE: - _set_xid_lookup_failure(interp, NULL, NULL); + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + _set_xid_lookup_failure(&ctx, NULL, NULL); break; default: #ifdef Py_DEBUG @@ -1060,7 +1060,11 @@ _PyXI_ApplyError(_PyXI_error *error) } else if (error->code == _PyXI_ERR_NOT_SHAREABLE) { // Propagate the exception directly. - _set_xid_lookup_failure(error->interp, NULL, error->uncaught.msg); + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(error->interp, &ctx) < 0) { + return NULL; + } + _set_xid_lookup_failure(&ctx, NULL, error->uncaught.msg); } else { // Raise an exception corresponding to the code. @@ -1147,7 +1151,12 @@ _sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value) PyErr_NoMemory(); return -1; } - if (_PyObject_GetXIData(value, item->data) != 0) { + PyInterpreterState *interp = PyInterpreterState_Get(); + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + return -1; + } + if (_PyObject_GetXIData(&ctx, value, item->data) != 0) { PyMem_RawFree(item->data); item->data = NULL; // The caller may want to propagate PyExc_NotShareableError @@ -1605,7 +1614,13 @@ _propagate_not_shareable_error(_PyXI_session *session) return; } PyInterpreterState *interp = PyInterpreterState_Get(); - if (PyErr_ExceptionMatches(_get_not_shareable_error_type(interp))) { + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(interp, &ctx) < 0) { + PyErr_FormatUnraisable( + "Exception ignored while propagating not shareable error"); + return; + } + if (PyErr_ExceptionMatches(ctx.PyExc_NotShareableError)) { // We want to propagate the exception directly. session->_error_override = _PyXI_ERR_NOT_SHAREABLE; session->error_override = &session->_error_override; @@ -1773,18 +1788,87 @@ _PyXI_Exit(_PyXI_session *session) /* runtime lifecycle */ /*********************/ +int +_Py_xi_global_state_init(_PyXI_global_state_t *state) +{ + assert(state != NULL); + xid_lookup_init(&state->data_lookup); + return 0; +} + +void +_Py_xi_global_state_fini(_PyXI_global_state_t *state) +{ + assert(state != NULL); + xid_lookup_fini(&state->data_lookup); +} + +int +_Py_xi_state_init(_PyXI_state_t *state, PyInterpreterState *interp) +{ + assert(state != NULL); + assert(interp == NULL || state == _PyXI_GET_STATE(interp)); + + xid_lookup_init(&state->data_lookup); + + // Initialize exceptions. + if (interp != NULL) { + if (init_static_exctypes(&state->exceptions, interp) < 0) { + fini_heap_exctypes(&state->exceptions); + return -1; + } + } + if (init_heap_exctypes(&state->exceptions) < 0) { + return -1; + } + + return 0; +} + +void +_Py_xi_state_fini(_PyXI_state_t *state, PyInterpreterState *interp) +{ + assert(state != NULL); + assert(interp == NULL || state == _PyXI_GET_STATE(interp)); + + fini_heap_exctypes(&state->exceptions); + if (interp != NULL) { + fini_static_exctypes(&state->exceptions, interp); + } + + xid_lookup_fini(&state->data_lookup); +} + + PyStatus _PyXI_Init(PyInterpreterState *interp) { - // Initialize the XID lookup state (e.g. registry). if (_Py_IsMainInterpreter(interp)) { - xid_lookup_init(&_PyXI_GET_GLOBAL_STATE(interp)->data_lookup); + _PyXI_global_state_t *global_state = _PyXI_GET_GLOBAL_STATE(interp); + if (global_state == NULL) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to get global cross-interpreter state"); + } + if (_Py_xi_global_state_init(global_state) < 0) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to initialize global cross-interpreter state"); + } } - xid_lookup_init(&_PyXI_GET_STATE(interp)->data_lookup); - // Initialize exceptions (heap types). - if (_init_not_shareable_error_type(interp) < 0) { - return _PyStatus_ERR("failed to initialize NotShareableError"); + _PyXI_state_t *state = _PyXI_GET_STATE(interp); + if (state == NULL) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to get interpreter's cross-interpreter state"); + } + // The static types were already initialized in _PyXI_InitTypes(), + // so we pass in NULL here to avoid initializing them again. + if (_Py_xi_state_init(state, NULL) < 0) { + PyErr_PrintEx(0); + return _PyStatus_ERR( + "failed to initialize interpreter's cross-interpreter state"); } return _PyStatus_OK(); @@ -1796,30 +1880,42 @@ _PyXI_Init(PyInterpreterState *interp) void _PyXI_Fini(PyInterpreterState *interp) { - // Finalize exceptions (heap types). - _fini_not_shareable_error_type(interp); + _PyXI_state_t *state = _PyXI_GET_STATE(interp); +#ifndef NDEBUG + if (state == NULL) { + PyErr_PrintEx(0); + return; + } +#endif + // The static types will be finalized soon in _PyXI_FiniTypes(), + // so we pass in NULL here to avoid finalizing them right now. + _Py_xi_state_fini(state, NULL); - // Finalize the XID lookup state (e.g. registry). - xid_lookup_fini(&_PyXI_GET_STATE(interp)->data_lookup); if (_Py_IsMainInterpreter(interp)) { - xid_lookup_fini(&_PyXI_GET_GLOBAL_STATE(interp)->data_lookup); + _PyXI_global_state_t *global_state = _PyXI_GET_GLOBAL_STATE(interp); + _Py_xi_global_state_fini(global_state); } } PyStatus _PyXI_InitTypes(PyInterpreterState *interp) { - if (init_exceptions(interp) < 0) { + if (init_static_exctypes(&_PyXI_GET_STATE(interp)->exceptions, interp) < 0) { PyErr_PrintEx(0); - return _PyStatus_ERR("failed to initialize an exception type"); + return _PyStatus_ERR( + "failed to initialize the cross-interpreter exception types"); } + // We would initialize heap types here too but that leads to ref leaks. + // Instead, we intialize them in _PyXI_Init(). return _PyStatus_OK(); } void _PyXI_FiniTypes(PyInterpreterState *interp) { - fini_exceptions(interp); + // We would finalize heap types here too but that leads to ref leaks. + // Instead, we finalize them in _PyXI_Fini(). + fini_static_exctypes(&_PyXI_GET_STATE(interp)->exceptions, interp); } diff --git a/Python/crossinterp_data_lookup.h b/Python/crossinterp_data_lookup.h index 88c662a3df00d64..48e5d9762cd6970 100644 --- a/Python/crossinterp_data_lookup.h +++ b/Python/crossinterp_data_lookup.h @@ -1,14 +1,15 @@ #include "pycore_weakref.h" // _PyWeakref_GET_REF() -typedef struct _xidregistry dlregistry_t; -typedef struct _xidregitem dlregitem_t; +typedef _PyXIData_lookup_context_t dlcontext_t; +typedef _PyXIData_registry_t dlregistry_t; +typedef _PyXIData_regitem_t dlregitem_t; // forward static void _xidregistry_init(dlregistry_t *); static void _xidregistry_fini(dlregistry_t *); -static xidatafunc _lookup_getdata_from_registry(PyInterpreterState *, PyObject *); +static xidatafunc _lookup_getdata_from_registry(dlcontext_t *, PyObject *); /* used in crossinterp.c */ @@ -26,22 +27,43 @@ xid_lookup_fini(_PyXIData_lookup_t *state) } static xidatafunc -lookup_getdata(PyInterpreterState *interp, PyObject *obj) +lookup_getdata(dlcontext_t *ctx, PyObject *obj) { /* Cross-interpreter objects are looked up by exact match on the class. We can reassess this policy when we move from a global registry to a tp_* slot. */ - return _lookup_getdata_from_registry(interp, obj); + return _lookup_getdata_from_registry(ctx, obj); } /* exported API */ +int +_PyXIData_GetLookupContext(PyInterpreterState *interp, + _PyXIData_lookup_context_t *res) +{ + _PyXI_global_state_t *global = _PyXI_GET_GLOBAL_STATE(interp); + if (global == NULL) { + assert(PyErr_Occurred()); + return -1; + } + _PyXI_state_t *local = _PyXI_GET_STATE(interp); + if (local == NULL) { + assert(PyErr_Occurred()); + return -1; + } + *res = (dlcontext_t){ + .global = &global->data_lookup, + .local = &local->data_lookup, + .PyExc_NotShareableError = local->exceptions.PyExc_NotShareableError, + }; + return 0; +} + xidatafunc -_PyXIData_Lookup(PyObject *obj) +_PyXIData_Lookup(_PyXIData_lookup_context_t *ctx, PyObject *obj) { - PyInterpreterState *interp = PyInterpreterState_Get(); - return lookup_getdata(interp, obj); + return lookup_getdata(ctx, obj); } @@ -110,25 +132,12 @@ _xidregistry_unlock(dlregistry_t *registry) /* accessing the registry */ static inline dlregistry_t * -_get_global_xidregistry(_PyRuntimeState *runtime) +_get_xidregistry_for_type(dlcontext_t *ctx, PyTypeObject *cls) { - return &runtime->xi.data_lookup.registry; -} - -static inline dlregistry_t * -_get_xidregistry(PyInterpreterState *interp) -{ - return &interp->xi.data_lookup.registry; -} - -static inline dlregistry_t * -_get_xidregistry_for_type(PyInterpreterState *interp, PyTypeObject *cls) -{ - dlregistry_t *registry = _get_global_xidregistry(interp->runtime); if (cls->tp_flags & Py_TPFLAGS_HEAPTYPE) { - registry = _get_xidregistry(interp); + return &ctx->local->registry; } - return registry; + return &ctx->global->registry; } static dlregitem_t* _xidregistry_remove_entry(dlregistry_t *, dlregitem_t *); @@ -160,11 +169,11 @@ _xidregistry_find_type(dlregistry_t *xidregistry, PyTypeObject *cls) } static xidatafunc -_lookup_getdata_from_registry(PyInterpreterState *interp, PyObject *obj) +_lookup_getdata_from_registry(dlcontext_t *ctx, PyObject *obj) { PyTypeObject *cls = Py_TYPE(obj); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -241,7 +250,8 @@ _xidregistry_clear(dlregistry_t *xidregistry) } int -_PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) +_PyXIData_RegisterClass(_PyXIData_lookup_context_t *ctx, + PyTypeObject *cls, xidatafunc getdata) { if (!PyType_Check(cls)) { PyErr_Format(PyExc_ValueError, "only classes may be registered"); @@ -253,8 +263,7 @@ _PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) } int res = 0; - PyInterpreterState *interp = _PyInterpreterState_GET(); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -272,11 +281,10 @@ _PyXIData_RegisterClass(PyTypeObject *cls, xidatafunc getdata) } int -_PyXIData_UnregisterClass(PyTypeObject *cls) +_PyXIData_UnregisterClass(_PyXIData_lookup_context_t *ctx, PyTypeObject *cls) { int res = 0; - PyInterpreterState *interp = _PyInterpreterState_GET(); - dlregistry_t *xidregistry = _get_xidregistry_for_type(interp, cls); + dlregistry_t *xidregistry = _get_xidregistry_for_type(ctx, cls); _xidregistry_lock(xidregistry); dlregitem_t *matched = _xidregistry_find_type(xidregistry, cls); @@ -500,6 +508,11 @@ _tuple_shared_free(void* data) static int _tuple_shared(PyThreadState *tstate, PyObject *obj, _PyXIData_t *data) { + dlcontext_t ctx; + if (_PyXIData_GetLookupContext(tstate->interp, &ctx) < 0) { + return -1; + } + Py_ssize_t len = PyTuple_GET_SIZE(obj); if (len < 0) { return -1; @@ -526,7 +539,7 @@ _tuple_shared(PyThreadState *tstate, PyObject *obj, _PyXIData_t *data) int res = -1; if (!_Py_EnterRecursiveCallTstate(tstate, " while sharing a tuple")) { - res = _PyObject_GetXIData(item, data); + res = _PyObject_GetXIData(&ctx, item, data); _Py_LeaveRecursiveCallTstate(tstate); } if (res < 0) { diff --git a/Python/crossinterp_exceptions.h b/Python/crossinterp_exceptions.h index 278511da615c75d..3cb45d2067710b4 100644 --- a/Python/crossinterp_exceptions.h +++ b/Python/crossinterp_exceptions.h @@ -25,71 +25,78 @@ static PyTypeObject _PyExc_InterpreterNotFoundError = { }; PyObject *PyExc_InterpreterNotFoundError = (PyObject *)&_PyExc_InterpreterNotFoundError; -/* NotShareableError extends ValueError */ - -static int -_init_not_shareable_error_type(PyInterpreterState *interp) -{ - const char *name = "interpreters.NotShareableError"; - PyObject *base = PyExc_ValueError; - PyObject *ns = NULL; - PyObject *exctype = PyErr_NewException(name, base, ns); - if (exctype == NULL) { - return -1; - } - - _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError = exctype; - return 0; -} - -static void -_fini_not_shareable_error_type(PyInterpreterState *interp) -{ - Py_CLEAR(_PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError); -} - -static PyObject * -_get_not_shareable_error_type(PyInterpreterState *interp) -{ - assert(_PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError != NULL); - return _PyInterpreterState_GetXIState(interp)->PyExc_NotShareableError; -} - /* lifecycle */ static int -init_exceptions(PyInterpreterState *interp) +init_static_exctypes(exceptions_t *state, PyInterpreterState *interp) { + assert(state == &_PyXI_GET_STATE(interp)->exceptions); PyTypeObject *base = (PyTypeObject *)PyExc_Exception; - // builtin static types - + // PyExc_InterpreterError _PyExc_InterpreterError.tp_base = base; _PyExc_InterpreterError.tp_traverse = base->tp_traverse; _PyExc_InterpreterError.tp_clear = base->tp_clear; if (_PyStaticType_InitBuiltin(interp, &_PyExc_InterpreterError) < 0) { - return -1; + goto error; } + state->PyExc_InterpreterError = (PyObject *)&_PyExc_InterpreterError; + // PyExc_InterpreterNotFoundError _PyExc_InterpreterNotFoundError.tp_traverse = base->tp_traverse; _PyExc_InterpreterNotFoundError.tp_clear = base->tp_clear; if (_PyStaticType_InitBuiltin(interp, &_PyExc_InterpreterNotFoundError) < 0) { - return -1; + goto error; } + state->PyExc_InterpreterNotFoundError = + (PyObject *)&_PyExc_InterpreterNotFoundError; - // heap types + return 0; - // We would call _init_not_shareable_error_type() here too, - // but that leads to ref leaks +error: + fini_static_exctypes(state, interp); + return -1; +} + +static void +fini_static_exctypes(exceptions_t *state, PyInterpreterState *interp) +{ + assert(state == &_PyXI_GET_STATE(interp)->exceptions); + if (state->PyExc_InterpreterNotFoundError != NULL) { + state->PyExc_InterpreterNotFoundError = NULL; + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterNotFoundError); + } + if (state->PyExc_InterpreterError != NULL) { + state->PyExc_InterpreterError = NULL; + _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterError); + } +} + +static int +init_heap_exctypes(exceptions_t *state) +{ + PyObject *exctype; + + /* NotShareableError extends ValueError */ + const char *name = "interpreters.NotShareableError"; + PyObject *base = PyExc_ValueError; + PyObject *ns = NULL; + exctype = PyErr_NewException(name, base, ns); + if (exctype == NULL) { + goto error; + } + state->PyExc_NotShareableError = exctype; return 0; + +error: + fini_heap_exctypes(state); + return -1; } static void -fini_exceptions(PyInterpreterState *interp) +fini_heap_exctypes(exceptions_t *state) { - // Likewise with _fini_not_shareable_error_type(). - _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterNotFoundError); - _PyStaticType_FiniBuiltin(interp, &_PyExc_InterpreterError); + Py_CLEAR(state->PyExc_NotShareableError); } diff --git a/Python/getcopyright.c b/Python/getcopyright.c index 066c2ed66acddfe..964584ddf7998ea 100644 --- a/Python/getcopyright.c +++ b/Python/getcopyright.c @@ -4,7 +4,7 @@ static const char cprt[] = "\ -Copyright (c) 2001-2024 Python Software Foundation.\n\ +Copyright (c) 2001 Python Software Foundation.\n\ All Rights Reserved.\n\ \n\ Copyright (c) 2000 BeOpen.com.\n\ diff --git a/Python/import.c b/Python/import.c index 29bd8bf68ff5e1e..09fe95fa1fb6478 100644 --- a/Python/import.c +++ b/Python/import.c @@ -122,6 +122,13 @@ _PyImport_ReleaseLock(PyInterpreterState *interp) _PyRecursiveMutex_Unlock(&IMPORT_LOCK(interp)); } +void +_PyImport_ReInitLock(PyInterpreterState *interp) +{ + // gh-126688: Thread id may change after fork() on some operating systems. + IMPORT_LOCK(interp).thread = PyThread_get_thread_ident_ex(); +} + /***************/ /* sys.modules */ diff --git a/Python/jit.c b/Python/jit.c index 90f693dfb7c41b9..7dd0da7a45055a9 100644 --- a/Python/jit.c +++ b/Python/jit.c @@ -58,7 +58,12 @@ jit_alloc(size_t size) int failed = memory == NULL; #else int flags = MAP_ANONYMOUS | MAP_PRIVATE; - unsigned char *memory = mmap(NULL, size, PROT_READ | PROT_WRITE, flags, -1, 0); + int prot = PROT_READ | PROT_WRITE; +# ifdef MAP_JIT + flags |= MAP_JIT; + prot |= PROT_EXEC; +# endif + unsigned char *memory = mmap(NULL, size, prot, flags, -1, 0); int failed = memory == MAP_FAILED; #endif if (failed) { @@ -102,8 +107,11 @@ mark_executable(unsigned char *memory, size_t size) int old; int failed = !VirtualProtect(memory, size, PAGE_EXECUTE_READ, &old); #else + int failed = 0; __builtin___clear_cache((char *)memory, (char *)memory + size); - int failed = mprotect(memory, size, PROT_EXEC | PROT_READ); +#ifndef MAP_JIT + failed = mprotect(memory, size, PROT_EXEC | PROT_READ); +#endif #endif if (failed) { jit_error("unable to protect executable memory"); @@ -499,6 +507,9 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz if (memory == NULL) { return -1; } +#ifdef MAP_JIT + pthread_jit_write_protect_np(0); +#endif // Update the offsets of each instruction: for (size_t i = 0; i < length; i++) { state.instruction_starts[i] += (uintptr_t)memory; @@ -529,6 +540,9 @@ _PyJIT_Compile(_PyExecutorObject *executor, const _PyUOpInstruction trace[], siz data += group->data_size; assert(code == memory + code_size); assert(data == memory + code_size + data_size); +#ifdef MAP_JIT + pthread_jit_write_protect_np(1); +#endif if (mark_executable(memory, total_size)) { jit_free(memory, total_size); return -1; diff --git a/Python/parking_lot.c b/Python/parking_lot.c index bffc959e5d09784..8edf43235942ab3 100644 --- a/Python/parking_lot.c +++ b/Python/parking_lot.c @@ -221,8 +221,7 @@ _PySemaphore_Wait(_PySemaphore *sema, PyTime_t timeout, int detach) PyThreadState *tstate = NULL; if (detach) { tstate = _PyThreadState_GET(); - if (tstate && _Py_atomic_load_int_relaxed(&tstate->state) == - _Py_THREAD_ATTACHED) { + if (tstate && _PyThreadState_IsAttached(tstate)) { // Only detach if we are attached PyEval_ReleaseThread(tstate); } diff --git a/README.rst b/README.rst index 3f694771e090cb7..0134aafe2a969a5 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,7 @@ This is Python version 3.14.0 alpha 1 :target: https://discuss.python.org/ -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. See the end of this file for further copyright and license information. @@ -215,7 +215,7 @@ Copyright and License Information --------------------------------- -Copyright © 2001-2024 Python Software Foundation. All rights reserved. +Copyright © 2001 Python Software Foundation. All rights reserved. Copyright © 2000 BeOpen.com. All rights reserved. diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py index 020f874cffeaef7..5c4a725102d79aa 100644 --- a/Tools/build/generate_sbom.py +++ b/Tools/build/generate_sbom.py @@ -59,6 +59,8 @@ class PackageFiles(typing.NamedTuple): include=["Modules/expat/**"], exclude=[ "Modules/expat/expat_config.h", + "Modules/expat/pyexpatns.h", + "Modules/_hacl/refresh.sh", ] ), "macholib": PackageFiles( @@ -218,6 +220,32 @@ def check_sbom_packages(sbom_data: dict[str, typing.Any]) -> None: "HACL* SBOM version doesn't match value in 'Modules/_hacl/refresh.sh'" ) + # libexpat specifies its expected rev in a refresh script. + if package["name"] == "libexpat": + libexpat_refresh_sh = (CPYTHON_ROOT_DIR / "Modules/expat/refresh.sh").read_text() + libexpat_expected_version_match = re.search( + r"expected_libexpat_version=\"([0-9]+\.[0-9]+\.[0-9]+)\"", + libexpat_refresh_sh + ) + libexpat_expected_sha256_match = re.search( + r"expected_libexpat_sha256=\"[a-f0-9]{40}\"", + libexpat_refresh_sh + ) + libexpat_expected_version = libexpat_expected_version_match and libexpat_expected_version_match.group(1) + libexpat_expected_sha256 = libexpat_expected_sha256_match and libexpat_expected_sha256_match.group(1) + + error_if( + libexpat_expected_version != version, + "libexpat SBOM version doesn't match value in 'Modules/expat/refresh.sh'" + ) + error_if( + package["checksums"] != [{ + "algorithm": "SHA256", + "checksumValue": libexpat_expected_sha256 + }], + "libexpat SBOM checksum doesn't match value in 'Modules/expat/refresh.sh'" + ) + # License must be on the approved list for SPDX. license_concluded = package["licenseConcluded"] error_if( diff --git a/Tools/build/regen-configure.sh b/Tools/build/regen-configure.sh index e1ecefddeb87320..d2a613b1e40dc11 100755 --- a/Tools/build/regen-configure.sh +++ b/Tools/build/regen-configure.sh @@ -5,7 +5,7 @@ set -e -x # The check_autoconf_regen job of .github/workflows/build.yml must kept in # sync with this script. Use the same container image than the job so the job # doesn't need to run autoreconf in a container. -IMAGE="ghcr.io/python/autoconf:2024.10.16.11360930377" +IMAGE="ghcr.io/python/autoconf:2024.11.11.11786316759" AUTORECONF="autoreconf -ivf -Werror" WORK_DIR="/src" diff --git a/Tools/unicode/makeunicodedata.py b/Tools/unicode/makeunicodedata.py index c94de7f9377b749..889ae8fc869b8ad 100644 --- a/Tools/unicode/makeunicodedata.py +++ b/Tools/unicode/makeunicodedata.py @@ -35,7 +35,7 @@ from textwrap import dedent from typing import Iterator, List, Optional, Set, Tuple -SCRIPT = sys.argv[0] +SCRIPT = os.path.normpath(sys.argv[0]) VERSION = "3.3" # The Unicode Database diff --git a/aclocal.m4 b/aclocal.m4 index b082a5b1bc5e074..920c2b38560faa0 100644 --- a/aclocal.m4 +++ b/aclocal.m4 @@ -91,7 +91,7 @@ m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun # and this notice are preserved. This file is offered as-is, without any # warranty. -#serial 12 +#serial 14 AC_DEFUN([AX_C_FLOAT_WORDS_BIGENDIAN], [AC_CACHE_CHECK(whether float word ordering is bigendian, @@ -112,10 +112,10 @@ int main (int argc, char *argv[]) ]])], [ -if grep noonsees conftest$EXEEXT >/dev/null ; then +if grep noonsees conftest* > /dev/null ; then ax_cv_c_float_words_bigendian=yes fi -if grep seesnoon conftest$EXEEXT >/dev/null ; then +if grep seesnoon conftest* >/dev/null ; then if test "$ax_cv_c_float_words_bigendian" = unknown; then ax_cv_c_float_words_bigendian=no else @@ -398,7 +398,7 @@ AC_DEFUN([AX_CHECK_OPENSSL], [ AC_SUBST([OPENSSL_LDFLAGS]) ]) -# pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- +# pkg.m4 - Macros to locate and use pkg-config. -*- Autoconf -*- # serial 12 (pkg-config-0.29.2) dnl Copyright © 2004 Scott James Remnant . @@ -486,7 +486,7 @@ dnl Check to see whether a particular set of modules exists. Similar to dnl PKG_CHECK_MODULES(), but does not set variables or print errors. dnl dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) -dnl only at the first occurence in configure.ac, so if the first place +dnl only at the first occurrence in configure.ac, so if the first place dnl it's called might be skipped (such as if it is within an "if", you dnl have to call PKG_CHECK_EXISTS manually AC_DEFUN([PKG_CHECK_EXISTS], @@ -555,14 +555,14 @@ if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then - $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` + $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else - $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` + $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD + # Put the nasty error message in config.log where it belongs + echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD - m4_default([$4], [AC_MSG_ERROR( + m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS @@ -574,7 +574,7 @@ _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) - m4_default([$4], [AC_MSG_FAILURE( + m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. @@ -584,10 +584,10 @@ _PKG_TEXT To get pkg-config, see .])[]dnl ]) else - $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS - $1[]_LIBS=$pkg_cv_[]$1[]_LIBS + $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS + $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) - $3 + $3 fi[]dnl ])dnl PKG_CHECK_MODULES @@ -674,6 +674,74 @@ AS_VAR_COPY([$1], [pkg_cv_][$1]) AS_VAR_IF([$1], [""], [$5], [$4])dnl ])dnl PKG_CHECK_VAR +dnl PKG_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [ACTION-IF-FOUND],[ACTION-IF-NOT-FOUND], +dnl [DESCRIPTION], [DEFAULT]) +dnl ------------------------------------------ +dnl +dnl Prepare a "--with-" configure option using the lowercase +dnl [VARIABLE-PREFIX] name, merging the behaviour of AC_ARG_WITH and +dnl PKG_CHECK_MODULES in a single macro. +AC_DEFUN([PKG_WITH_MODULES], +[ +m4_pushdef([with_arg], m4_tolower([$1])) + +m4_pushdef([description], + [m4_default([$5], [build with ]with_arg[ support])]) + +m4_pushdef([def_arg], [m4_default([$6], [auto])]) +m4_pushdef([def_action_if_found], [AS_TR_SH([with_]with_arg)=yes]) +m4_pushdef([def_action_if_not_found], [AS_TR_SH([with_]with_arg)=no]) + +m4_case(def_arg, + [yes],[m4_pushdef([with_without], [--without-]with_arg)], + [m4_pushdef([with_without],[--with-]with_arg)]) + +AC_ARG_WITH(with_arg, + AS_HELP_STRING(with_without, description[ @<:@default=]def_arg[@:>@]),, + [AS_TR_SH([with_]with_arg)=def_arg]) + +AS_CASE([$AS_TR_SH([with_]with_arg)], + [yes],[PKG_CHECK_MODULES([$1],[$2],$3,$4)], + [auto],[PKG_CHECK_MODULES([$1],[$2], + [m4_n([def_action_if_found]) $3], + [m4_n([def_action_if_not_found]) $4])]) + +m4_popdef([with_arg]) +m4_popdef([description]) +m4_popdef([def_arg]) + +])dnl PKG_WITH_MODULES + +dnl PKG_HAVE_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [DESCRIPTION], [DEFAULT]) +dnl ----------------------------------------------- +dnl +dnl Convenience macro to trigger AM_CONDITIONAL after PKG_WITH_MODULES +dnl check._[VARIABLE-PREFIX] is exported as make variable. +AC_DEFUN([PKG_HAVE_WITH_MODULES], +[ +PKG_WITH_MODULES([$1],[$2],,,[$3],[$4]) + +AM_CONDITIONAL([HAVE_][$1], + [test "$AS_TR_SH([with_]m4_tolower([$1]))" = "yes"]) +])dnl PKG_HAVE_WITH_MODULES + +dnl PKG_HAVE_DEFINE_WITH_MODULES(VARIABLE-PREFIX, MODULES, +dnl [DESCRIPTION], [DEFAULT]) +dnl ------------------------------------------------------ +dnl +dnl Convenience macro to run AM_CONDITIONAL and AC_DEFINE after +dnl PKG_WITH_MODULES check. HAVE_[VARIABLE-PREFIX] is exported as make +dnl and preprocessor variable. +AC_DEFUN([PKG_HAVE_DEFINE_WITH_MODULES], +[ +PKG_HAVE_WITH_MODULES([$1],[$2],[$3],[$4]) + +AS_IF([test "$AS_TR_SH([with_]m4_tolower([$1]))" = "yes"], + [AC_DEFINE([HAVE_][$1], 1, [Enable ]m4_tolower([$1])[ support])]) +])dnl PKG_HAVE_DEFINE_WITH_MODULES + # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2021 Free Software Foundation, Inc. diff --git a/configure b/configure index 7a9d9627e50dfcf..b1ced3106618ba6 100755 --- a/configure +++ b/configure @@ -13717,12 +13717,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBUUID_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "uuid >= 2.20" 2>&1` + LIBUUID_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "uuid >= 2.20" 2>&1` else - LIBUUID_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "uuid >= 2.20" 2>&1` + LIBUUID_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "uuid >= 2.20" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBUUID_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBUUID_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -13974,11 +13974,11 @@ LIBS=$save_LIBS else - LIBUUID_CFLAGS=$pkg_cv_LIBUUID_CFLAGS - LIBUUID_LIBS=$pkg_cv_LIBUUID_LIBS + LIBUUID_CFLAGS=$pkg_cv_LIBUUID_CFLAGS + LIBUUID_LIBS=$pkg_cv_LIBUUID_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_uuid=yes + have_uuid=yes printf "%s\n" "#define HAVE_UUID_H 1" >>confdefs.h printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h @@ -14666,12 +14666,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBFFI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libffi" 2>&1` + LIBFFI_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libffi" 2>&1` else - LIBFFI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libffi" 2>&1` + LIBFFI_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libffi" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBFFI_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBFFI_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -14817,11 +14817,11 @@ LIBS=$save_LIBS else - LIBFFI_CFLAGS=$pkg_cv_LIBFFI_CFLAGS - LIBFFI_LIBS=$pkg_cv_LIBFFI_LIBS + LIBFFI_CFLAGS=$pkg_cv_LIBFFI_CFLAGS + LIBFFI_LIBS=$pkg_cv_LIBFFI_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_libffi=yes + have_libffi=yes fi fi @@ -15143,25 +15143,25 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` + LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` else - LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` + LIBMPDEC_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libmpdec >= 2.5.0" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBMPDEC_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBMPDEC_PKG_ERRORS" >&5 - LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} + LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"} LIBMPDEC_INTERNAL= elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} + LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""} LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"} LIBMPDEC_INTERNAL= else - LIBMPDEC_CFLAGS=$pkg_cv_LIBMPDEC_CFLAGS - LIBMPDEC_LIBS=$pkg_cv_LIBMPDEC_LIBS + LIBMPDEC_CFLAGS=$pkg_cv_LIBMPDEC_CFLAGS + LIBMPDEC_LIBS=$pkg_cv_LIBMPDEC_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -15412,12 +15412,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` + LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` else - LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` + LIBSQLITE3_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "sqlite3 >= 3.15.2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBSQLITE3_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBSQLITE3_PKG_ERRORS" >&5 LIBSQLITE3_CFLAGS=${LIBSQLITE3_CFLAGS-""} @@ -15433,8 +15433,8 @@ printf "%s\n" "no" >&6; } else - LIBSQLITE3_CFLAGS=$pkg_cv_LIBSQLITE3_CFLAGS - LIBSQLITE3_LIBS=$pkg_cv_LIBSQLITE3_LIBS + LIBSQLITE3_CFLAGS=$pkg_cv_LIBSQLITE3_CFLAGS + LIBSQLITE3_LIBS=$pkg_cv_LIBSQLITE3_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -16176,24 +16176,24 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - TCLTK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$_QUERY" 2>&1` + TCLTK_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$_QUERY" 2>&1` else - TCLTK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$_QUERY" 2>&1` + TCLTK_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$_QUERY" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$TCLTK_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$TCLTK_PKG_ERRORS" >&5 - found_tcltk=no + found_tcltk=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - found_tcltk=no + found_tcltk=no else - TCLTK_CFLAGS=$pkg_cv_TCLTK_CFLAGS - TCLTK_LIBS=$pkg_cv_TCLTK_LIBS + TCLTK_CFLAGS=$pkg_cv_TCLTK_CFLAGS + TCLTK_LIBS=$pkg_cv_TCLTK_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - found_tcltk=yes + found_tcltk=yes fi fi @@ -16273,14 +16273,14 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - X11_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "x11" 2>&1` + X11_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "x11" 2>&1` else - X11_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "x11" 2>&1` + X11_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "x11" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$X11_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$X11_PKG_ERRORS" >&5 - as_fn_error $? "Package requirements (x11) were not met: + as_fn_error $? "Package requirements (x11) were not met: $X11_PKG_ERRORS @@ -16293,7 +16293,7 @@ See the pkg-config man page for more details." "$LINENO" 5 elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 + { { printf "%s\n" "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 printf "%s\n" "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full @@ -16306,8 +16306,8 @@ See the pkg-config man page for more details. To get pkg-config, see . See \`config.log' for more details" "$LINENO" 5; } else - X11_CFLAGS=$pkg_cv_X11_CFLAGS - X11_LIBS=$pkg_cv_X11_LIBS + X11_CFLAGS=$pkg_cv_X11_CFLAGS + X11_LIBS=$pkg_cv_X11_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -20712,12 +20712,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - ZLIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` + ZLIB_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` else - ZLIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` + ZLIB_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "zlib >= 1.2.0" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$ZLIB_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$ZLIB_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -20975,8 +20975,8 @@ LIBS=$save_LIBS else - ZLIB_CFLAGS=$pkg_cv_ZLIB_CFLAGS - ZLIB_LIBS=$pkg_cv_ZLIB_LIBS + ZLIB_CFLAGS=$pkg_cv_ZLIB_CFLAGS + ZLIB_LIBS=$pkg_cv_ZLIB_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -21060,12 +21060,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - BZIP2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "bzip2" 2>&1` + BZIP2_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "bzip2" 2>&1` else - BZIP2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "bzip2" 2>&1` + BZIP2_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "bzip2" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$BZIP2_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$BZIP2_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -21229,11 +21229,11 @@ LIBS=$save_LIBS else - BZIP2_CFLAGS=$pkg_cv_BZIP2_CFLAGS - BZIP2_LIBS=$pkg_cv_BZIP2_LIBS + BZIP2_CFLAGS=$pkg_cv_BZIP2_CFLAGS + BZIP2_LIBS=$pkg_cv_BZIP2_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_bzip2=yes + have_bzip2=yes fi @@ -21288,12 +21288,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "liblzma" 2>&1` + LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "liblzma" 2>&1` else - LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "liblzma" 2>&1` + LIBLZMA_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "liblzma" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBLZMA_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBLZMA_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -21457,11 +21457,11 @@ LIBS=$save_LIBS else - LIBLZMA_CFLAGS=$pkg_cv_LIBLZMA_CFLAGS - LIBLZMA_LIBS=$pkg_cv_LIBLZMA_LIBS + LIBLZMA_CFLAGS=$pkg_cv_LIBLZMA_CFLAGS + LIBLZMA_LIBS=$pkg_cv_LIBLZMA_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } - have_liblzma=yes + have_liblzma=yes fi @@ -24174,10 +24174,10 @@ if ac_fn_c_try_link "$LINENO" then : -if grep noonsees conftest$EXEEXT >/dev/null ; then +if grep noonsees conftest* > /dev/null ; then ax_cv_c_float_words_bigendian=yes fi -if grep seesnoon conftest$EXEEXT >/dev/null ; then +if grep seesnoon conftest* >/dev/null ; then if test "$ax_cv_c_float_words_bigendian" = unknown; then ax_cv_c_float_words_bigendian=no else @@ -24213,10 +24213,6 @@ printf "%s\n" "#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 1" >>confdefs.h # but if it's not big or little, then it must be this? printf "%s\n" "#define DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754 1" >>confdefs.h - ;; #( - wasm*) : - -printf "%s\n" "#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 1" >>confdefs.h ;; #( *) : as_fn_error $? "Unknown float word ordering. You need to manually preset ax_cv_c_float_words_bigendian=no (or yes) according to your system." "$LINENO" 5 ;; @@ -25296,12 +25292,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "readline" 2>&1` + LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "readline" 2>&1` else - LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "readline" 2>&1` + LIBREADLINE_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "readline" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBREADLINE_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBREADLINE_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -25459,8 +25455,8 @@ LIBS=$save_LIBS else - LIBREADLINE_CFLAGS=$pkg_cv_LIBREADLINE_CFLAGS - LIBREADLINE_LIBS=$pkg_cv_LIBREADLINE_LIBS + LIBREADLINE_CFLAGS=$pkg_cv_LIBREADLINE_CFLAGS + LIBREADLINE_LIBS=$pkg_cv_LIBREADLINE_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -25527,12 +25523,12 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libedit" 2>&1` + LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libedit" 2>&1` else - LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libedit" 2>&1` + LIBEDIT_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libedit" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$LIBEDIT_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$LIBEDIT_PKG_ERRORS" >&5 save_CFLAGS=$CFLAGS @@ -25694,8 +25690,8 @@ LIBS=$save_LIBS else - LIBEDIT_CFLAGS=$pkg_cv_LIBEDIT_CFLAGS - LIBEDIT_LIBS=$pkg_cv_LIBEDIT_LIBS + LIBEDIT_CFLAGS=$pkg_cv_LIBEDIT_CFLAGS + LIBEDIT_LIBS=$pkg_cv_LIBEDIT_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26556,21 +26552,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncursesw" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncursesw" 2>&1` else - CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncursesw" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncursesw" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$CURSES_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$CURSES_PKG_ERRORS" >&5 - have_curses=no + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_curses=no + have_curses=no else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26629,21 +26625,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panelw" 2>&1` else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panelw" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$PANEL_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$PANEL_PKG_ERRORS" >&5 - have_panel=no + have_panel=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_panel=no + have_panel=no else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26710,21 +26706,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncurses" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "ncurses" 2>&1` else - CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncurses" 2>&1` + CURSES_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "ncurses" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$CURSES_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$CURSES_PKG_ERRORS" >&5 - have_curses=no + have_curses=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_curses=no + have_curses=no else - CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS - CURSES_LIBS=$pkg_cv_CURSES_LIBS + CURSES_CFLAGS=$pkg_cv_CURSES_CFLAGS + CURSES_LIBS=$pkg_cv_CURSES_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } @@ -26783,21 +26779,21 @@ else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then - PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "panel" 2>&1` else - PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` + PANEL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "panel" 2>&1` fi - # Put the nasty error message in config.log where it belongs - echo "$PANEL_PKG_ERRORS" >&5 + # Put the nasty error message in config.log where it belongs + echo "$PANEL_PKG_ERRORS" >&5 - have_panel=no + have_panel=no elif test $pkg_failed = untried; then { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 printf "%s\n" "no" >&6; } - have_panel=no + have_panel=no else - PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS - PANEL_LIBS=$pkg_cv_PANEL_LIBS + PANEL_CFLAGS=$pkg_cv_PANEL_CFLAGS + PANEL_LIBS=$pkg_cv_PANEL_LIBS { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 printf "%s\n" "yes" >&6; } diff --git a/configure.ac b/configure.ac index bc3d2d0e63b77ae..3a55cbc13203933 100644 --- a/configure.ac +++ b/configure.ac @@ -5918,9 +5918,6 @@ AX_C_FLOAT_WORDS_BIGENDIAN( AC_DEFINE([DOUBLE_IS_ARM_MIXED_ENDIAN_IEEE754], [1], [Define if C doubles are 64-bit IEEE 754 binary format, stored in ARM mixed-endian order (byte order 45670123)])], - [wasm*], [AC_DEFINE([DOUBLE_IS_LITTLE_ENDIAN_IEEE754], [1], - [Define if C doubles are 64-bit IEEE 754 binary format, - stored with the least significant byte first])], [AC_MSG_ERROR([m4_normalize([ Unknown float word ordering. You need to manually preset ax_cv_c_float_words_bigendian=no (or yes)