diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 1abcd455eca11b..0add48e777866c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,11 +5,11 @@ # https://git-scm.com/docs/gitignore#_pattern_format # GitHub -.github/** @ezio-melotti @hugovk +.github/** @ezio-melotti @hugovk @AA-Turner # pre-commit .pre-commit-config.yaml @hugovk @AlexWaygood -.ruff.toml @hugovk @AlexWaygood +.ruff.toml @hugovk @AlexWaygood @AA-Turner # Build system configure* @erlend-aasland @corona10 @@ -56,6 +56,14 @@ Tools/c-analyzer/ @ericsnowcurrently # dbm **/*dbm* @corona10 @erlend-aasland @serhiy-storchaka +# Doc/ tools +Doc/conf.py @AA-Turner @hugovk +Doc/Makefile @AA-Turner @hugovk +Doc/make.bat @AA-Turner @hugovk +Doc/requirements.txt @AA-Turner @hugovk +Doc/_static/** @AA-Turner @hugovk +Doc/tools/** @AA-Turner @hugovk + # runtime state/lifecycle **/*pylifecycle* @ericsnowcurrently **/*pystate* @ericsnowcurrently @@ -293,6 +301,10 @@ Lib/configparser.py @jaraco Lib/test/test_configparser.py @jaraco # Doc sections -Doc/reference/ @willingc +Doc/reference/ @willingc @AA-Turner **/*weakref* @kumaraditya303 + +# Colorize +Lib/_colorize.py @hugovk +Lib/test/test__colorize.py @hugovk diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml index 3701f7297ffeb2..af125266ae7813 100644 --- a/.github/actionlint.yaml +++ b/.github/actionlint.yaml @@ -1,5 +1,5 @@ self-hosted-runner: - labels: ["ubuntu-24.04-aarch64", "windows-aarch64"] + labels: ["windows-aarch64"] config-variables: null @@ -7,4 +7,4 @@ paths: .github/workflows/**/*.yml: ignore: - 1st argument of function call is not assignable - - SC2(015|038|086|091|097|098|129|155) \ No newline at end of file + - SC2(015|038|086|091|097|098|129|155) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 9adf860632e8a2..0c20b85acbd565 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -18,6 +18,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-reusable cancel-in-progress: true +env: + FORCE_COLOR: 1 + jobs: check_source: name: Change detection @@ -231,24 +234,31 @@ jobs: name: >- Ubuntu ${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }} + ${{ fromJSON(matrix.bolt) && '(bolt)' || '' }} needs: check_source if: needs.check_source.outputs.run_tests == 'true' strategy: matrix: + bolt: + - false + - true free-threading: - false - true os: - ubuntu-24.04 - - ubuntu-24.04-aarch64 - is-fork: # only used for the exclusion trick - - ${{ github.repository_owner != 'python' }} + - ubuntu-24.04-arm exclude: - - os: ubuntu-24.04-aarch64 - is-fork: true + # Do not test BOLT with free-threading, to conserve resources + - bolt: true + free-threading: true + # BOLT currently crashes during instrumentation on aarch64 + - os: ubuntu-24.04-arm + bolt: true uses: ./.github/workflows/reusable-ubuntu.yml with: config_hash: ${{ needs.check_source.outputs.config_hash }} + bolt-optimizations: ${{ matrix.bolt }} free-threading: ${{ matrix.free-threading }} os: ${{ matrix.os }} diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml index 9b84998a55666d..806a8524112d76 100644 --- a/.github/workflows/jit.yml +++ b/.github/workflows/jit.yml @@ -25,6 +25,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +env: + FORCE_COLOR: 1 + jobs: interpreter: name: Interpreter (Debug) @@ -83,8 +86,7 @@ jobs: runner: ubuntu-24.04 - target: aarch64-unknown-linux-gnu/gcc architecture: aarch64 - # Forks don't have access to our paid AArch64 runners. These jobs are skipped below: - runner: ${{ github.repository_owner == 'python' && 'ubuntu-24.04-aarch64' || 'ubuntu-24.04' }} + runner: ubuntu-24.04-arm steps: - uses: actions/checkout@v4 with: @@ -123,8 +125,7 @@ jobs: ./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3 - name: Native Linux - # Forks don't have access to our paid AArch64 runners. Skip those: - if: runner.os == 'Linux' && (matrix.architecture == 'x86_64' || github.repository_owner == 'python') + if: runner.os == 'Linux' run: | sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }} export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH" diff --git a/.github/workflows/reusable-docs.yml b/.github/workflows/reusable-docs.yml index 88da55bf08b8fe..6738acc98c6565 100644 --- a/.github/workflows/reusable-docs.yml +++ b/.github/workflows/reusable-docs.yml @@ -65,8 +65,8 @@ jobs: continue-on-error: true run: | set -Eeuo pipefail - # Build docs with the '-n' (nit-picky) option; write warnings to file - make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html + # Build docs with the nit-picky option; write warnings to file + make -C Doc/ PYTHON=../python SPHINXOPTS="--quiet --nitpicky --fail-on-warning --keep-going --warning-file sphinx-warnings.txt" html - name: 'Check warnings' if: github.event_name == 'pull_request' run: | @@ -76,26 +76,6 @@ jobs: --fail-if-improved \ --fail-if-new-news-nit - # This build doesn't use problem matchers or check annotations - build_doc_oldest_supported_sphinx: - name: 'Docs (Oldest Sphinx)' - runs-on: ubuntu-latest - timeout-minutes: 60 - steps: - - uses: actions/checkout@v4 - with: - persist-credentials: false - - name: 'Set up Python' - uses: actions/setup-python@v5 - with: - python-version: '3.13' # known to work with Sphinx 7.2.6 - cache: 'pip' - cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt' - - name: 'Install build dependencies' - run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt" - - name: 'Build HTML documentation' - run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html - # Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release doctest: name: 'Doctest' @@ -121,4 +101,4 @@ jobs: run: make -C Doc/ PYTHON=../python venv # Use "xvfb-run" since some doctest tests open GUI windows - name: 'Run documentation doctest' - run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="-W --keep-going" doctest + run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="--fail-on-warning --keep-going" doctest diff --git a/.github/workflows/reusable-macos.yml b/.github/workflows/reusable-macos.yml index cdbe05e09fb8e7..4e7aced94a321c 100644 --- a/.github/workflows/reusable-macos.yml +++ b/.github/workflows/reusable-macos.yml @@ -15,6 +15,9 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_macos: name: build and test (${{ inputs.os }}) diff --git a/.github/workflows/reusable-tsan.yml b/.github/workflows/reusable-tsan.yml index b5144ca3e9efc4..269f479849f21e 100644 --- a/.github/workflows/reusable-tsan.yml +++ b/.github/workflows/reusable-tsan.yml @@ -18,6 +18,9 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_tsan_reusable: name: 'Thread sanitizer' diff --git a/.github/workflows/reusable-ubuntu.yml b/.github/workflows/reusable-ubuntu.yml index 46c542940c8483..aa8ba00f19d8ca 100644 --- a/.github/workflows/reusable-ubuntu.yml +++ b/.github/workflows/reusable-ubuntu.yml @@ -6,6 +6,11 @@ on: config_hash: required: true type: string + bolt-optimizations: + description: Whether to enable BOLT optimizations + required: false + type: boolean + default: false free-threading: description: Whether to use free-threaded mode required: false @@ -16,13 +21,15 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_ubuntu_reusable: name: build and test (${{ inputs.os }}) timeout-minutes: 60 runs-on: ${{ inputs.os }} env: - FORCE_COLOR: 1 OPENSSL_VER: 3.0.15 PYTHONSTRICTEXTENSIONBUILD: 1 TERM: linux @@ -34,6 +41,12 @@ jobs: run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install dependencies run: sudo ./.github/workflows/posix-deps-apt.sh + - name: Install Clang and BOLT + if: ${{ fromJSON(inputs.bolt-optimizations) }} + run: | + sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh 19 + sudo apt-get install bolt-19 + echo PATH="$(llvm-config-19 --bindir):$PATH" >> $GITHUB_ENV - name: Configure OpenSSL env vars run: | echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV" @@ -73,7 +86,10 @@ jobs: key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ inputs.config_hash }} - name: Configure CPython out-of-tree working-directory: ${{ env.CPYTHON_BUILDDIR }} + # `test_unpickle_module_race` writes to the source directory, which is + # read-only during builds — so we exclude it from profiling with BOLT. run: >- + PROFILE_TASK='-m test --pgo --ignore test_unpickle_module_race' ../cpython-ro-srcdir/configure --config-cache --with-pydebug @@ -81,6 +97,7 @@ jobs: --enable-safety --with-openssl="$OPENSSL_DIR" ${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }} + ${{ fromJSON(inputs.bolt-optimizations) && '--enable-bolt' || '' }} - name: Build CPython out-of-tree if: ${{ inputs.free-threading }} working-directory: ${{ env.CPYTHON_BUILDDIR }} diff --git a/.github/workflows/reusable-wasi.yml b/.github/workflows/reusable-wasi.yml index 4356d9c1c8795e..4456b83c8e0032 100644 --- a/.github/workflows/reusable-wasi.yml +++ b/.github/workflows/reusable-wasi.yml @@ -7,6 +7,9 @@ on: required: true type: string +env: + FORCE_COLOR: 1 + jobs: build_wasi_reusable: name: 'build and test' diff --git a/.github/workflows/reusable-windows-msi.yml b/.github/workflows/reusable-windows-msi.yml index a1c45d954247fb..bc0414d1bbcd8f 100644 --- a/.github/workflows/reusable-windows-msi.yml +++ b/.github/workflows/reusable-windows-msi.yml @@ -11,6 +11,9 @@ on: permissions: contents: read +env: + FORCE_COLOR: 1 + jobs: build: name: installer for ${{ inputs.arch }} diff --git a/.github/workflows/reusable-windows.yml b/.github/workflows/reusable-windows.yml index 459d2b29e5d42b..bfee3d2722cb44 100644 --- a/.github/workflows/reusable-windows.yml +++ b/.github/workflows/reusable-windows.yml @@ -18,6 +18,7 @@ on: default: false env: + FORCE_COLOR: 1 IncludeUwp: >- true diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af6accd89b5bd4..fb44c27704d455 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.9.1 hooks: - id: ruff name: Run Ruff (lint) on Doc/ @@ -29,12 +29,10 @@ repos: - id: black name: Run Black on Tools/build/check_warnings.py files: ^Tools/build/check_warnings.py - language_version: python3.12 args: [--line-length=79] - id: black name: Run Black on Tools/jit/ files: ^Tools/jit/ - language_version: python3.12 - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 @@ -51,19 +49,19 @@ repos: types_or: [c, inc, python, rst] - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.30.0 + rev: 0.31.0 hooks: - id: check-dependabot - id: check-github-workflows - id: check-readthedocs - repo: https://github.com/rhysd/actionlint - rev: v1.7.4 + rev: v1.7.7 hooks: - id: actionlint - repo: https://github.com/woodruffw/zizmor-pre-commit - rev: v0.8.0 + rev: v1.1.1 hooks: - id: zizmor diff --git a/Android/android-env.sh b/Android/android-env.sh index a0f23ef8c9fc52..bab4130c9e92d0 100644 --- a/Android/android-env.sh +++ b/Android/android-env.sh @@ -1,10 +1,10 @@ # This script must be sourced with the following variables already set: -: ${ANDROID_HOME:?} # Path to Android SDK -: ${HOST:?} # GNU target triplet +: "${ANDROID_HOME:?}" # Path to Android SDK +: "${HOST:?}" # GNU target triplet # You may also override the following: -: ${api_level:=24} # Minimum Android API level the build will run on -: ${PREFIX:-} # Path in which to find required libraries +: "${api_level:=24}" # Minimum Android API level the build will run on +: "${PREFIX:-}" # Path in which to find required libraries # Print all messages on stderr so they're visible when running within build-wheel. @@ -27,20 +27,20 @@ fail() { ndk_version=27.1.12297006 ndk=$ANDROID_HOME/ndk/$ndk_version -if ! [ -e $ndk ]; then +if ! [ -e "$ndk" ]; then log "Installing NDK - this may take several minutes" - yes | $ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager "ndk;$ndk_version" + yes | "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;$ndk_version" fi -if [ $HOST = "arm-linux-androideabi" ]; then +if [ "$HOST" = "arm-linux-androideabi" ]; then clang_triplet=armv7a-linux-androideabi else - clang_triplet=$HOST + clang_triplet="$HOST" fi # These variables are based on BuildSystemMaintainers.md above, and # $ndk/build/cmake/android.toolchain.cmake. -toolchain=$(echo $ndk/toolchains/llvm/prebuilt/*) +toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*) export AR="$toolchain/bin/llvm-ar" export AS="$toolchain/bin/llvm-as" export CC="$toolchain/bin/${clang_triplet}${api_level}-clang" @@ -72,12 +72,12 @@ LDFLAGS="$LDFLAGS -lm" # -mstackrealign is included where necessary in the clang launcher scripts which are # pointed to by $CC, so we don't need to include it here. -if [ $HOST = "arm-linux-androideabi" ]; then +if [ "$HOST" = "arm-linux-androideabi" ]; then CFLAGS="$CFLAGS -march=armv7-a -mthumb" fi if [ -n "${PREFIX:-}" ]; then - abs_prefix=$(realpath $PREFIX) + abs_prefix="$(realpath "$PREFIX")" CFLAGS="$CFLAGS -I$abs_prefix/include" LDFLAGS="$LDFLAGS -L$abs_prefix/lib" @@ -87,11 +87,13 @@ fi # When compiling C++, some build systems will combine CFLAGS and CXXFLAGS, and some will # use CXXFLAGS alone. -export CXXFLAGS=$CFLAGS +export CXXFLAGS="$CFLAGS" # Use the same variable name as conda-build -if [ $(uname) = "Darwin" ]; then - export CPU_COUNT=$(sysctl -n hw.ncpu) +if [ "$(uname)" = "Darwin" ]; then + CPU_COUNT="$(sysctl -n hw.ncpu)" + export CPU_COUNT else - export CPU_COUNT=$(nproc) + CPU_COUNT="$(nproc)" + export CPU_COUNT fi diff --git a/Doc/Makefile b/Doc/Makefile index 4a704ad58b33d3..b8896da4a91869 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -14,15 +14,15 @@ PAPER = SOURCES = DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py) REQUIREMENTS = requirements.txt -SPHINXERRORHANDLING = -W +SPHINXERRORHANDLING = --fail-on-warning # Internal variables. -PAPEROPT_a4 = -D latex_elements.papersize=a4paper -PAPEROPT_letter = -D latex_elements.papersize=letterpaper +PAPEROPT_a4 = --define latex_elements.papersize=a4paper +PAPEROPT_letter = --define latex_elements.papersize=letterpaper -ALLSPHINXOPTS = -b $(BUILDER) \ - -d build/doctrees \ - -j $(JOBS) \ +ALLSPHINXOPTS = --builder $(BUILDER) \ + --doctree-dir build/doctrees \ + --jobs $(JOBS) \ $(PAPEROPT_$(PAPER)) \ $(SPHINXOPTS) $(SPHINXERRORHANDLING) \ . build/$(BUILDER) $(SOURCES) @@ -144,7 +144,7 @@ pydoc-topics: build .PHONY: gettext gettext: BUILDER = gettext -gettext: override SPHINXOPTS := -d build/doctrees-gettext $(SPHINXOPTS) +gettext: override SPHINXOPTS := --doctree-dir build/doctrees-gettext $(SPHINXOPTS) gettext: build .PHONY: htmlview @@ -172,7 +172,7 @@ venv: else \ echo "Creating venv in $(VENVDIR)"; \ if $(UV) --version >/dev/null 2>&1; then \ - $(UV) venv $(VENVDIR); \ + $(UV) venv --python=$(PYTHON) $(VENVDIR); \ VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \ else \ $(PYTHON) -m venv $(VENVDIR); \ @@ -300,20 +300,20 @@ serve: # By default, Sphinx only rebuilds pages where the page content has changed. # This means it doesn't always pick up changes to preferred link targets, etc # To ensure such changes are picked up, we build the published docs with -# `-E` (to ignore the cached environment) and `-a` (to ignore already existing -# output files) +# ``--fresh-env`` (to ignore the cached environment) and ``--write-all`` +# (to ignore already existing output files) # for development releases: always build .PHONY: autobuild-dev autobuild-dev: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev: - $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) + $(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION) # for HTML-only rebuilds .PHONY: autobuild-dev-html autobuild-dev-html: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short) autobuild-dev-html: - $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1' DISTVERSION=$(DISTVERSION) + $(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION) # for stable releases: only build if not in pre-release stage (alpha, beta) # release candidate downloads are okay, since the stable tree can be in that stage diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 97996a6f69dd22..dc44f3eaf87765 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -109,7 +109,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-b` option. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_DebugFlag @@ -123,7 +123,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-d` option and the :envvar:`PYTHONDEBUG` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_DontWriteBytecodeFlag @@ -137,7 +137,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-B` option and the :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_FrozenFlag @@ -150,7 +150,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Private flag used by ``_freeze_module`` and ``frozenmain`` programs. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_HashRandomizationFlag @@ -165,7 +165,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. If the flag is non-zero, read the :envvar:`PYTHONHASHSEED` environment variable to initialize the secret hash seed. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_IgnoreEnvironmentFlag @@ -178,7 +178,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-E` and :option:`-I` options. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_InspectFlag @@ -193,7 +193,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-i` option and the :envvar:`PYTHONINSPECT` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_InteractiveFlag @@ -218,7 +218,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. versionadded:: 3.4 - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_LegacyWindowsFSEncodingFlag @@ -237,7 +237,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. availability:: Windows. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_LegacyWindowsStdioFlag @@ -255,7 +255,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. availability:: Windows. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_NoSiteFlag @@ -270,7 +270,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-S` option. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_NoUserSiteDirectory @@ -284,7 +284,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-s` and :option:`-I` options, and the :envvar:`PYTHONNOUSERSITE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_OptimizeFlag @@ -295,7 +295,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-O` option and the :envvar:`PYTHONOPTIMIZE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_QuietFlag @@ -309,7 +309,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. .. versionadded:: 3.2 - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_UnbufferedStdioFlag @@ -322,7 +322,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-u` option and the :envvar:`PYTHONUNBUFFERED` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 .. c:var:: int Py_VerboseFlag @@ -338,7 +338,7 @@ to 1 and ``-bb`` sets :c:data:`Py_BytesWarningFlag` to 2. Set by the :option:`-v` option and the :envvar:`PYTHONVERBOSE` environment variable. - .. deprecated-removed:: 3.12 3.14 + .. deprecated-removed:: 3.12 3.15 Initializing and finalizing the interpreter @@ -606,7 +606,7 @@ Process-wide parameters Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a :c:expr:`wchar_*` string. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: wchar_t* Py_GetProgramName() @@ -868,7 +868,7 @@ Process-wide parameters .. XXX impl. doesn't seem consistent in allowing ``0``/``NULL`` for the params; check w/ Guido. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: void PySys_SetArgv(int argc, wchar_t **argv) @@ -889,7 +889,7 @@ Process-wide parameters .. versionchanged:: 3.4 The *updatepath* value depends on :option:`-I`. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: void Py_SetPythonHome(const wchar_t *home) @@ -910,7 +910,7 @@ Process-wide parameters Use :c:func:`Py_DecodeLocale` to decode a bytes string to get a :c:expr:`wchar_*` string. - .. deprecated:: 3.11 + .. deprecated-removed:: 3.11 3.15 .. c:function:: wchar_t* Py_GetPythonHome() diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 6b33d93a9f2af9..b791d3cdc5d95c 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -6,28 +6,15 @@ Python Initialization Configuration *********************************** -.. _pyconfig_api: - -PyConfig C API -============== -.. versionadded:: 3.8 - -Python can be initialized with :c:func:`Py_InitializeFromConfig` and the -:c:type:`PyConfig` structure. It can be preinitialized with -:c:func:`Py_PreInitialize` and the :c:type:`PyPreConfig` structure. +.. _pyinitconfig_api: -There are two kinds of configuration: +PyInitConfig C API +================== -* The :ref:`Python Configuration ` can be used to build a - customized Python which behaves as the regular Python. For example, - environment variables and command line arguments are used to configure - Python. +.. versionadded:: 3.14 -* The :ref:`Isolated Configuration ` can be used to embed - Python into an application. It isolates Python from the system. For example, - environment variables are ignored, the LC_CTYPE locale is left unchanged and - no signal handler is registered. +Python can be initialized with :c:func:`Py_InitializeFromInitConfig`. The :c:func:`Py_RunMain` function can be used to write a customized Python program. @@ -35,2000 +22,2267 @@ program. See also :ref:`Initialization, Finalization, and Threads `. .. seealso:: - :pep:`587` "Python Initialization Configuration". + :pep:`741` "Python Configuration C API". Example ------- -Example of customized Python always running in isolated mode:: - - int main(int argc, char **argv) - { - PyStatus status; +Example of customized Python always running with the :ref:`Python Development +Mode ` enabled; return ``-1`` on error: - PyConfig config; - PyConfig_InitPythonConfig(&config); - config.isolated = 1; +.. code-block:: c - /* Decode command line arguments. - Implicitly preinitialize Python (in isolated mode). */ - status = PyConfig_SetBytesArgv(&config, argc, argv); - if (PyStatus_Exception(status)) { - goto exception; + int init_python(void) + { + PyInitConfig *config = PyInitConfig_Create(); + if (config == NULL) { + printf("PYTHON INIT ERROR: memory allocation failed\n"); + return -1; } - status = Py_InitializeFromConfig(&config); - if (PyStatus_Exception(status)) { - goto exception; + // Enable the Python Development Mode + if (PyInitConfig_SetInt(config, "dev_mode", 1) < 0) { + goto error; } - PyConfig_Clear(&config); - return Py_RunMain(); + // Initialize Python with the configuration + if (Py_InitializeFromInitConfig(config) < 0) { + goto error; + } + PyInitConfig_Free(config); + return 0; - exception: - PyConfig_Clear(&config); - if (PyStatus_IsExit(status)) { - return status.exitcode; + error: + { + // Display the error message. + // + // This uncommon braces style is used, because you cannot make + // goto targets point to variable declarations. + const char *err_msg; + (void)PyInitConfig_GetError(config, &err_msg); + printf("PYTHON INIT ERROR: %s\n", err_msg); + PyInitConfig_Free(config); + return -1; } - /* Display the error message and exit the process with - non-zero exit code */ - Py_ExitStatusException(status); } +Create Config +------------- -PyWideStringList ----------------- +.. c:struct:: PyInitConfig -.. c:type:: PyWideStringList + Opaque structure to configure the Python initialization. - List of ``wchar_t*`` strings. - If *length* is non-zero, *items* must be non-``NULL`` and all strings must be - non-``NULL``. +.. c:function:: PyInitConfig* PyInitConfig_Create(void) - .. c:namespace:: NULL + Create a new initialization configuration using :ref:`Isolated Configuration + ` default values. - Methods: + It must be freed by :c:func:`PyInitConfig_Free`. - .. c:function:: PyStatus PyWideStringList_Append(PyWideStringList *list, const wchar_t *item) + Return ``NULL`` on memory allocation failure. - Append *item* to *list*. - Python must be preinitialized to call this function. +.. c:function:: void PyInitConfig_Free(PyInitConfig *config) - .. c:function:: PyStatus PyWideStringList_Insert(PyWideStringList *list, Py_ssize_t index, const wchar_t *item) + Free memory of the initialization configuration *config*. - Insert *item* into *list* at *index*. + If *config* is ``NULL``, no operation is performed. - If *index* is greater than or equal to *list* length, append *item* to - *list*. - *index* must be greater than or equal to ``0``. +Error Handling +-------------- - Python must be preinitialized to call this function. +.. c:function:: int PyInitConfig_GetError(PyInitConfig* config, const char **err_msg) - .. c:namespace:: PyWideStringList + Get the *config* error message. - Structure fields: + * Set *\*err_msg* and return ``1`` if an error is set. + * Set *\*err_msg* to ``NULL`` and return ``0`` otherwise. - .. c:member:: Py_ssize_t length + An error message is an UTF-8 encoded string. - List length. + If *config* has an exit code, format the exit code as an error + message. - .. c:member:: wchar_t** items + The error message remains valid until another ``PyInitConfig`` + function is called with *config*. The caller doesn't have to free the + error message. - List items. -PyStatus --------- +.. c:function:: int PyInitConfig_GetExitCode(PyInitConfig* config, int *exitcode) -.. c:type:: PyStatus + Get the *config* exit code. - Structure to store an initialization function status: success, error - or exit. + * Set *\*exitcode* and return ``1`` if *config* has an exit code set. + * Return ``0`` if *config* has no exit code set. - For an error, it can store the C function name which created the error. + Only the ``Py_InitializeFromInitConfig()`` function can set an exit + code if the ``parse_argv`` option is non-zero. - Structure fields: + An exit code can be set when parsing the command line failed (exit + code ``2``) or when a command line option asks to display the command + line help (exit code ``0``). - .. c:member:: int exitcode - Exit code. Argument passed to ``exit()``. +Get Options +----------- - .. c:member:: const char *err_msg +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. - Error message. +.. c:function:: int PyInitConfig_HasOption(PyInitConfig *config, const char *name) - .. c:member:: const char *func + Test if the configuration has an option called *name*. - Name of the function which created an error, can be ``NULL``. + Return ``1`` if the option exists, or return ``0`` otherwise. - .. c:namespace:: NULL - Functions to create a status: +.. c:function:: int PyInitConfig_GetInt(PyInitConfig *config, const char *name, int64_t *value) - .. c:function:: PyStatus PyStatus_Ok(void) + Get an integer configuration option. - Success. + * Set *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - .. c:function:: PyStatus PyStatus_Error(const char *err_msg) - Initialization error with a message. +.. c:function:: int PyInitConfig_GetStr(PyInitConfig *config, const char *name, char **value) - *err_msg* must not be ``NULL``. + Get a string configuration option as a null-terminated UTF-8 + encoded string. - .. c:function:: PyStatus PyStatus_NoMemory(void) + * Set *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Memory allocation failure (out of memory). + *\*value* can be set to ``NULL`` if the option is an optional string and the + option is unset. - .. c:function:: PyStatus PyStatus_Exit(int exitcode) + On success, the string must be released with ``free(value)`` if it's not + ``NULL``. - Exit Python with the specified exit code. - Functions to handle a status: +.. c:function:: int PyInitConfig_GetStrList(PyInitConfig *config, const char *name, size_t *length, char ***items) - .. c:function:: int PyStatus_Exception(PyStatus status) + Get a string list configuration option as an array of + null-terminated UTF-8 encoded strings. - Is the status an error or an exit? If true, the exception must be - handled; by calling :c:func:`Py_ExitStatusException` for example. + * Set *\*length* and *\*value*, and return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - .. c:function:: int PyStatus_IsError(PyStatus status) + On success, the string list must be released with + ``PyInitConfig_FreeStrList(length, items)``. - Is the result an error? - .. c:function:: int PyStatus_IsExit(PyStatus status) +.. c:function:: void PyInitConfig_FreeStrList(size_t length, char **items) - Is the result an exit? + Free memory of a string list created by + ``PyInitConfig_GetStrList()``. - .. c:function:: void Py_ExitStatusException(PyStatus status) - Call ``exit(exitcode)`` if *status* is an exit. Print the error - message and exit with a non-zero exit code if *status* is an error. Must - only be called if ``PyStatus_Exception(status)`` is non-zero. +Set Options +----------- -.. note:: - Internally, Python uses macros which set ``PyStatus.func``, - whereas functions to create a status set ``func`` to ``NULL``. +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. -Example:: +Some configuration options have side effects on other options. This logic is +only implemented when ``Py_InitializeFromInitConfig()`` is called, not by the +"Set" functions below. For example, setting ``dev_mode`` to ``1`` does not set +``faulthandler`` to ``1``. - PyStatus alloc(void **ptr, size_t size) - { - *ptr = PyMem_RawMalloc(size); - if (*ptr == NULL) { - return PyStatus_NoMemory(); - } - return PyStatus_Ok(); - } +.. c:function:: int PyInitConfig_SetInt(PyInitConfig *config, const char *name, int64_t value) - int main(int argc, char **argv) - { - void *ptr; - PyStatus status = alloc(&ptr, 16); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - PyMem_Free(ptr); - return 0; - } + Set an integer configuration option. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. -PyPreConfig ------------ -.. c:type:: PyPreConfig +.. c:function:: int PyInitConfig_SetStr(PyInitConfig *config, const char *name, const char *value) - Structure used to preinitialize Python. + Set a string configuration option from a null-terminated UTF-8 + encoded string. The string is copied. - .. c:namespace:: NULL + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Function to initialize a preconfiguration: - .. c:function:: void PyPreConfig_InitPythonConfig(PyPreConfig *preconfig) +.. c:function:: int PyInitConfig_SetStrList(PyInitConfig *config, const char *name, size_t length, char * const *items) - Initialize the preconfiguration with :ref:`Python Configuration - `. + Set a string list configuration option from an array of + null-terminated UTF-8 encoded strings. The string list is copied. - .. c:function:: void PyPreConfig_InitIsolatedConfig(PyPreConfig *preconfig) + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. - Initialize the preconfiguration with :ref:`Isolated Configuration - `. - .. c:namespace:: PyPreConfig +Module +------ - Structure fields: +.. c:function:: int PyInitConfig_AddModule(PyInitConfig *config, const char *name, PyObject* (*initfunc)(void)) - .. c:member:: int allocator + Add a built-in extension module to the table of built-in modules. - Name of the Python memory allocators: + The new module can be imported by the name *name*, and uses the function + *initfunc* as the initialization function called on the first attempted + import. - * ``PYMEM_ALLOCATOR_NOT_SET`` (``0``): don't change memory allocators - (use defaults). - * ``PYMEM_ALLOCATOR_DEFAULT`` (``1``): :ref:`default memory allocators - `. - * ``PYMEM_ALLOCATOR_DEBUG`` (``2``): :ref:`default memory allocators - ` with :ref:`debug hooks - `. - * ``PYMEM_ALLOCATOR_MALLOC`` (``3``): use ``malloc()`` of the C library. - * ``PYMEM_ALLOCATOR_MALLOC_DEBUG`` (``4``): force usage of - ``malloc()`` with :ref:`debug hooks `. - * ``PYMEM_ALLOCATOR_PYMALLOC`` (``5``): :ref:`Python pymalloc memory - allocator `. - * ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` (``6``): :ref:`Python pymalloc - memory allocator ` with :ref:`debug hooks - `. - * ``PYMEM_ALLOCATOR_MIMALLOC`` (``6``): use ``mimalloc``, a fast - malloc replacement. - * ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` (``7``): use ``mimalloc``, a fast - malloc replacement with :ref:`debug hooks `. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. + If Python is initialized multiple times, ``PyInitConfig_AddModule()`` must + be called at each Python initialization. - ``PYMEM_ALLOCATOR_PYMALLOC`` and ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` are - not supported if Python is :option:`configured using --without-pymalloc - <--without-pymalloc>`. + Similar to the :c:func:`PyImport_AppendInittab` function. - ``PYMEM_ALLOCATOR_MIMALLOC`` and ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` are - not supported if Python is :option:`configured using --without-mimalloc - <--without-mimalloc>` or if the underlying atomic support isn't - available. - See :ref:`Memory Management `. +Initialize Python +----------------- - Default: ``PYMEM_ALLOCATOR_NOT_SET``. +.. c:function:: int Py_InitializeFromInitConfig(PyInitConfig *config) - .. c:member:: int configure_locale + Initialize Python from the initialization configuration. - Set the LC_CTYPE locale to the user preferred locale. + * Return ``0`` on success. + * Set an error in *config* and return ``-1`` on error. + * Set an exit code in *config* and return ``-1`` if Python wants to + exit. - If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and - :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. + See ``PyInitConfig_GetExitcode()`` for the exit code case. - See the :term:`locale encoding`. - Default: ``1`` in Python config, ``0`` in isolated config. +.. _pyinitconfig-opts: + +Configuration Options +===================== + +.. list-table:: + :header-rows: 1 + + * - Option + - PyConfig/PyPreConfig member + - Type + - Visibility + * - ``"allocator"`` + - :c:member:`allocator ` + - ``int`` + - Read-only + * - ``"argv"`` + - :c:member:`argv ` + - ``list[str]`` + - Public + * - ``"base_exec_prefix"`` + - :c:member:`base_exec_prefix ` + - ``str`` + - Public + * - ``"base_executable"`` + - :c:member:`base_executable ` + - ``str`` + - Public + * - ``"base_prefix"`` + - :c:member:`base_prefix ` + - ``str`` + - Public + * - ``"buffered_stdio"`` + - :c:member:`buffered_stdio ` + - ``bool`` + - Read-only + * - ``"bytes_warning"`` + - :c:member:`bytes_warning ` + - ``int`` + - Public + * - ``"check_hash_pycs_mode"`` + - :c:member:`check_hash_pycs_mode ` + - ``str`` + - Read-only + * - ``"code_debug_ranges"`` + - :c:member:`code_debug_ranges ` + - ``bool`` + - Read-only + * - ``"coerce_c_locale"`` + - :c:member:`coerce_c_locale ` + - ``bool`` + - Read-only + * - ``"coerce_c_locale_warn"`` + - :c:member:`coerce_c_locale_warn ` + - ``bool`` + - Read-only + * - ``"configure_c_stdio"`` + - :c:member:`configure_c_stdio ` + - ``bool`` + - Read-only + * - ``"configure_locale"`` + - :c:member:`configure_locale ` + - ``bool`` + - Read-only + * - ``"cpu_count"`` + - :c:member:`cpu_count ` + - ``int`` + - Read-only + * - ``"dev_mode"`` + - :c:member:`dev_mode ` + - ``bool`` + - Read-only + * - ``"dump_refs"`` + - :c:member:`dump_refs ` + - ``bool`` + - Read-only + * - ``"dump_refs_file"`` + - :c:member:`dump_refs_file ` + - ``str`` + - Read-only + * - ``"exec_prefix"`` + - :c:member:`exec_prefix ` + - ``str`` + - Public + * - ``"executable"`` + - :c:member:`executable ` + - ``str`` + - Public + * - ``"faulthandler"`` + - :c:member:`faulthandler ` + - ``bool`` + - Read-only + * - ``"filesystem_encoding"`` + - :c:member:`filesystem_encoding ` + - ``str`` + - Read-only + * - ``"filesystem_errors"`` + - :c:member:`filesystem_errors ` + - ``str`` + - Read-only + * - ``"hash_seed"`` + - :c:member:`hash_seed ` + - ``int`` + - Read-only + * - ``"home"`` + - :c:member:`home ` + - ``str`` + - Read-only + * - ``"import_time"`` + - :c:member:`import_time ` + - ``bool`` + - Read-only + * - ``"inspect"`` + - :c:member:`inspect ` + - ``bool`` + - Public + * - ``"install_signal_handlers"`` + - :c:member:`install_signal_handlers ` + - ``bool`` + - Read-only + * - ``"int_max_str_digits"`` + - :c:member:`int_max_str_digits ` + - ``int`` + - Public + * - ``"interactive"`` + - :c:member:`interactive ` + - ``bool`` + - Public + * - ``"isolated"`` + - :c:member:`isolated ` + - ``bool`` + - Read-only + * - ``"legacy_windows_fs_encoding"`` + - :c:member:`legacy_windows_fs_encoding ` + - ``bool`` + - Read-only + * - ``"legacy_windows_stdio"`` + - :c:member:`legacy_windows_stdio ` + - ``bool`` + - Read-only + * - ``"malloc_stats"`` + - :c:member:`malloc_stats ` + - ``bool`` + - Read-only + * - ``"module_search_paths"`` + - :c:member:`module_search_paths ` + - ``list[str]`` + - Public + * - ``"optimization_level"`` + - :c:member:`optimization_level ` + - ``int`` + - Public + * - ``"orig_argv"`` + - :c:member:`orig_argv ` + - ``list[str]`` + - Read-only + * - ``"parse_argv"`` + - :c:member:`parse_argv ` + - ``bool`` + - Read-only + * - ``"parser_debug"`` + - :c:member:`parser_debug ` + - ``bool`` + - Public + * - ``"pathconfig_warnings"`` + - :c:member:`pathconfig_warnings ` + - ``bool`` + - Read-only + * - ``"perf_profiling"`` + - :c:member:`perf_profiling ` + - ``bool`` + - Read-only + * - ``"platlibdir"`` + - :c:member:`platlibdir ` + - ``str`` + - Public + * - ``"prefix"`` + - :c:member:`prefix ` + - ``str`` + - Public + * - ``"program_name"`` + - :c:member:`program_name ` + - ``str`` + - Read-only + * - ``"pycache_prefix"`` + - :c:member:`pycache_prefix ` + - ``str`` + - Public + * - ``"quiet"`` + - :c:member:`quiet ` + - ``bool`` + - Public + * - ``"run_command"`` + - :c:member:`run_command ` + - ``str`` + - Read-only + * - ``"run_filename"`` + - :c:member:`run_filename ` + - ``str`` + - Read-only + * - ``"run_module"`` + - :c:member:`run_module ` + - ``str`` + - Read-only + * - ``"run_presite"`` + - :c:member:`run_presite ` + - ``str`` + - Read-only + * - ``"safe_path"`` + - :c:member:`safe_path ` + - ``bool`` + - Read-only + * - ``"show_ref_count"`` + - :c:member:`show_ref_count ` + - ``bool`` + - Read-only + * - ``"site_import"`` + - :c:member:`site_import ` + - ``bool`` + - Read-only + * - ``"skip_source_first_line"`` + - :c:member:`skip_source_first_line ` + - ``bool`` + - Read-only + * - ``"stdio_encoding"`` + - :c:member:`stdio_encoding ` + - ``str`` + - Read-only + * - ``"stdio_errors"`` + - :c:member:`stdio_errors ` + - ``str`` + - Read-only + * - ``"stdlib_dir"`` + - :c:member:`stdlib_dir ` + - ``str`` + - Public + * - ``"tracemalloc"`` + - :c:member:`tracemalloc ` + - ``int`` + - Read-only + * - ``"use_environment"`` + - :c:member:`use_environment ` + - ``bool`` + - Public + * - ``"use_frozen_modules"`` + - :c:member:`use_frozen_modules ` + - ``bool`` + - Read-only + * - ``"use_hash_seed"`` + - :c:member:`use_hash_seed ` + - ``bool`` + - Read-only + * - ``"user_site_directory"`` + - :c:member:`user_site_directory ` + - ``bool`` + - Read-only + * - ``"utf8_mode"`` + - :c:member:`utf8_mode ` + - ``bool`` + - Read-only + * - ``"verbose"`` + - :c:member:`verbose ` + - ``int`` + - Public + * - ``"warn_default_encoding"`` + - :c:member:`warn_default_encoding ` + - ``bool`` + - Read-only + * - ``"warnoptions"`` + - :c:member:`warnoptions ` + - ``list[str]`` + - Public + * - ``"write_bytecode"`` + - :c:member:`write_bytecode ` + - ``bool`` + - Public + * - ``"xoptions"`` + - :c:member:`xoptions ` + - ``dict[str, str]`` + - Public + * - ``"_pystats"`` + - :c:member:`_pystats ` + - ``bool`` + - Read-only + +Visibility: + +* Public: Can by get by :c:func:`PyConfig_Get` and set by + :c:func:`PyConfig_Set`. +* Read-only: Can by get by :c:func:`PyConfig_Get`, but cannot be set by + :c:func:`PyConfig_Set`. - .. c:member:: int coerce_c_locale - If equals to ``2``, coerce the C locale. +Runtime Python configuration API +================================ - If equals to ``1``, read the LC_CTYPE locale to decide if it should be - coerced. +At runtime, it's possible to get and set configuration options using +:c:func:`PyConfig_Get` and :c:func:`PyConfig_Set` functions. - See the :term:`locale encoding`. +The configuration option *name* parameter must be a non-NULL null-terminated +UTF-8 encoded string. See :ref:`Configuration Options `. - Default: ``-1`` in Python config, ``0`` in isolated config. +Some options are read from the :mod:`sys` attributes. For example, the option +``"argv"`` is read from :data:`sys.argv`. - .. c:member:: int coerce_c_locale_warn - If non-zero, emit a warning if the C locale is coerced. +.. c:function:: PyObject* PyConfig_Get(const char *name) - Default: ``-1`` in Python config, ``0`` in isolated config. + Get the current runtime value of a configuration option as a Python object. - .. c:member:: int dev_mode + * Return a new reference on success. + * Set an exception and return ``NULL`` on error. - :ref:`Python Development Mode `: see - :c:member:`PyConfig.dev_mode`. + The object type depends on the configuration option. It can be: - Default: ``-1`` in Python mode, ``0`` in isolated mode. + * ``bool`` + * ``int`` + * ``str`` + * ``list[str]`` + * ``dict[str, str]`` - .. c:member:: int isolated + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - Isolated mode: see :c:member:`PyConfig.isolated`. + .. versionadded:: 3.14 - Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int legacy_windows_fs_encoding +.. c:function:: int PyConfig_GetInt(const char *name, int *value) - If non-zero: + Similar to :c:func:`PyConfig_Get`, but get the value as a C int. - * Set :c:member:`PyPreConfig.utf8_mode` to ``0``, - * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, - * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. + * Return ``0`` on success. + * Set an exception and return ``-1`` on error. - Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment - variable value. + .. versionadded:: 3.14 - Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for - Windows specific code. - Default: ``0``. +.. c:function:: PyObject* PyConfig_Names(void) - .. c:member:: int parse_argv + Get all configuration option names as a ``frozenset``. - If non-zero, :c:func:`Py_PreInitializeFromArgs` and - :c:func:`Py_PreInitializeFromBytesArgs` parse their ``argv`` argument the - same way the regular Python parses command line arguments: see - :ref:`Command Line Arguments `. + * Return a new reference on success. + * Set an exception and return ``NULL`` on error. - Default: ``1`` in Python config, ``0`` in isolated config. + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - .. c:member:: int use_environment + .. versionadded:: 3.14 - Use :ref:`environment variables `? See - :c:member:`PyConfig.use_environment`. - Default: ``1`` in Python config and ``0`` in isolated config. +.. c:function:: int PyConfig_Set(const char *name, PyObject *value) - .. c:member:: int utf8_mode + Set the current runtime value of a configuration option. - If non-zero, enable the :ref:`Python UTF-8 Mode `. + * Raise a :exc:`ValueError` if there is no option *name*. + * Raise a :exc:`ValueError` if *value* is an invalid value. + * Raise a :exc:`ValueError` if the option is read-only (cannot be set). + * Raise a :exc:`TypeError` if *value* has not the proper type. - Set to ``0`` or ``1`` by the :option:`-X utf8 <-X>` command line option - and the :envvar:`PYTHONUTF8` environment variable. + The caller must hold the GIL. The function cannot be called before + Python initialization nor after Python finalization. - Also set to ``1`` if the ``LC_CTYPE`` locale is ``C`` or ``POSIX``. + .. versionadded:: 3.14 - Default: ``-1`` in Python config and ``0`` in isolated config. +.. _pyconfig_api: -.. _c-preinit: +PyConfig C API +============== -Preinitialize Python with PyPreConfig -------------------------------------- +.. versionadded:: 3.8 -The preinitialization of Python: +Python can be initialized with :c:func:`Py_InitializeFromConfig` and the +:c:type:`PyConfig` structure. It can be preinitialized with +:c:func:`Py_PreInitialize` and the :c:type:`PyPreConfig` structure. -* Set the Python memory allocators (:c:member:`PyPreConfig.allocator`) -* Configure the LC_CTYPE locale (:term:`locale encoding`) -* Set the :ref:`Python UTF-8 Mode ` - (:c:member:`PyPreConfig.utf8_mode`) +There are two kinds of configuration: -The current preconfiguration (``PyPreConfig`` type) is stored in -``_PyRuntime.preconfig``. +* The :ref:`Python Configuration ` can be used to build a + customized Python which behaves as the regular Python. For example, + environment variables and command line arguments are used to configure + Python. -Functions to preinitialize Python: +* The :ref:`Isolated Configuration ` can be used to embed + Python into an application. It isolates Python from the system. For example, + environment variables are ignored, the LC_CTYPE locale is left unchanged and + no signal handler is registered. -.. c:function:: PyStatus Py_PreInitialize(const PyPreConfig *preconfig) +The :c:func:`Py_RunMain` function can be used to write a customized Python +program. - Preinitialize Python from *preconfig* preconfiguration. +See also :ref:`Initialization, Finalization, and Threads `. - *preconfig* must not be ``NULL``. +.. seealso:: + :pep:`587` "Python Initialization Configuration". -.. c:function:: PyStatus Py_PreInitializeFromBytesArgs(const PyPreConfig *preconfig, int argc, char * const *argv) - Preinitialize Python from *preconfig* preconfiguration. +Example +------- - Parse *argv* command line arguments (bytes strings) if - :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. +Example of customized Python always running in isolated mode:: - *preconfig* must not be ``NULL``. + int main(int argc, char **argv) + { + PyStatus status; -.. c:function:: PyStatus Py_PreInitializeFromArgs(const PyPreConfig *preconfig, int argc, wchar_t * const * argv) + PyConfig config; + PyConfig_InitPythonConfig(&config); + config.isolated = 1; - Preinitialize Python from *preconfig* preconfiguration. + /* Decode command line arguments. + Implicitly preinitialize Python (in isolated mode). */ + status = PyConfig_SetBytesArgv(&config, argc, argv); + if (PyStatus_Exception(status)) { + goto exception; + } - Parse *argv* command line arguments (wide strings) if - :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. + status = Py_InitializeFromConfig(&config); + if (PyStatus_Exception(status)) { + goto exception; + } + PyConfig_Clear(&config); - *preconfig* must not be ``NULL``. + return Py_RunMain(); -The caller is responsible to handle exceptions (error or exit) using -:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. + exception: + PyConfig_Clear(&config); + if (PyStatus_IsExit(status)) { + return status.exitcode; + } + /* Display the error message and exit the process with + non-zero exit code */ + Py_ExitStatusException(status); + } -For :ref:`Python Configuration ` -(:c:func:`PyPreConfig_InitPythonConfig`), if Python is initialized with -command line arguments, the command line arguments must also be passed to -preinitialize Python, since they have an effect on the pre-configuration -like encodings. For example, the :option:`-X utf8 <-X>` command line option -enables the :ref:`Python UTF-8 Mode `. -``PyMem_SetAllocator()`` can be called after :c:func:`Py_PreInitialize` and -before :c:func:`Py_InitializeFromConfig` to install a custom memory allocator. -It can be called before :c:func:`Py_PreInitialize` if -:c:member:`PyPreConfig.allocator` is set to ``PYMEM_ALLOCATOR_NOT_SET``. +PyWideStringList +---------------- -Python memory allocation functions like :c:func:`PyMem_RawMalloc` must not be -used before the Python preinitialization, whereas calling directly ``malloc()`` -and ``free()`` is always safe. :c:func:`Py_DecodeLocale` must not be called -before the Python preinitialization. +.. c:type:: PyWideStringList -Example using the preinitialization to enable -the :ref:`Python UTF-8 Mode `:: + List of ``wchar_t*`` strings. - PyStatus status; - PyPreConfig preconfig; - PyPreConfig_InitPythonConfig(&preconfig); + If *length* is non-zero, *items* must be non-``NULL`` and all strings must be + non-``NULL``. - preconfig.utf8_mode = 1; + .. c:namespace:: NULL - status = Py_PreInitialize(&preconfig); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } + Methods: - /* at this point, Python speaks UTF-8 */ + .. c:function:: PyStatus PyWideStringList_Append(PyWideStringList *list, const wchar_t *item) - Py_Initialize(); - /* ... use Python API here ... */ - Py_Finalize(); + Append *item* to *list*. + Python must be preinitialized to call this function. -PyConfig --------- + .. c:function:: PyStatus PyWideStringList_Insert(PyWideStringList *list, Py_ssize_t index, const wchar_t *item) -.. c:type:: PyConfig + Insert *item* into *list* at *index*. - Structure containing most parameters to configure Python. + If *index* is greater than or equal to *list* length, append *item* to + *list*. - When done, the :c:func:`PyConfig_Clear` function must be used to release the - configuration memory. + *index* must be greater than or equal to ``0``. - .. c:namespace:: NULL + Python must be preinitialized to call this function. - Structure methods: + .. c:namespace:: PyWideStringList - .. c:function:: void PyConfig_InitPythonConfig(PyConfig *config) + Structure fields: - Initialize configuration with the :ref:`Python Configuration - `. + .. c:member:: Py_ssize_t length - .. c:function:: void PyConfig_InitIsolatedConfig(PyConfig *config) + List length. - Initialize configuration with the :ref:`Isolated Configuration - `. + .. c:member:: wchar_t** items - .. c:function:: PyStatus PyConfig_SetString(PyConfig *config, wchar_t * const *config_str, const wchar_t *str) + List items. - Copy the wide character string *str* into ``*config_str``. +PyStatus +-------- - :ref:`Preinitialize Python ` if needed. +.. c:type:: PyStatus - .. c:function:: PyStatus PyConfig_SetBytesString(PyConfig *config, wchar_t * const *config_str, const char *str) + Structure to store an initialization function status: success, error + or exit. - Decode *str* using :c:func:`Py_DecodeLocale` and set the result into - ``*config_str``. + For an error, it can store the C function name which created the error. - :ref:`Preinitialize Python ` if needed. + Structure fields: - .. c:function:: PyStatus PyConfig_SetArgv(PyConfig *config, int argc, wchar_t * const *argv) + .. c:member:: int exitcode - Set command line arguments (:c:member:`~PyConfig.argv` member of - *config*) from the *argv* list of wide character strings. + Exit code. Argument passed to ``exit()``. - :ref:`Preinitialize Python ` if needed. + .. c:member:: const char *err_msg - .. c:function:: PyStatus PyConfig_SetBytesArgv(PyConfig *config, int argc, char * const *argv) + Error message. - Set command line arguments (:c:member:`~PyConfig.argv` member of - *config*) from the *argv* list of bytes strings. Decode bytes using - :c:func:`Py_DecodeLocale`. + .. c:member:: const char *func - :ref:`Preinitialize Python ` if needed. + Name of the function which created an error, can be ``NULL``. - .. c:function:: PyStatus PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items) + .. c:namespace:: NULL - Set the list of wide strings *list* to *length* and *items*. + Functions to create a status: - :ref:`Preinitialize Python ` if needed. + .. c:function:: PyStatus PyStatus_Ok(void) - .. c:function:: PyStatus PyConfig_Read(PyConfig *config) + Success. - Read all Python configuration. + .. c:function:: PyStatus PyStatus_Error(const char *err_msg) - Fields which are already initialized are left unchanged. + Initialization error with a message. - Fields for :ref:`path configuration ` are no longer - calculated or modified when calling this function, as of Python 3.11. + *err_msg* must not be ``NULL``. - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are - stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. + .. c:function:: PyStatus PyStatus_NoMemory(void) - :ref:`Preinitialize Python ` if needed. + Memory allocation failure (out of memory). - .. versionchanged:: 3.10 - The :c:member:`PyConfig.argv` arguments are now only parsed once, - :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are - parsed, and arguments are only parsed if - :c:member:`PyConfig.parse_argv` equals ``1``. + .. c:function:: PyStatus PyStatus_Exit(int exitcode) - .. versionchanged:: 3.11 - :c:func:`PyConfig_Read` no longer calculates all paths, and so fields - listed under :ref:`Python Path Configuration ` may - no longer be updated until :c:func:`Py_InitializeFromConfig` is - called. + Exit Python with the specified exit code. - .. c:function:: void PyConfig_Clear(PyConfig *config) + Functions to handle a status: - Release configuration memory. + .. c:function:: int PyStatus_Exception(PyStatus status) - Most ``PyConfig`` methods :ref:`preinitialize Python ` if needed. - In that case, the Python preinitialization configuration - (:c:type:`PyPreConfig`) in based on the :c:type:`PyConfig`. If configuration - fields which are in common with :c:type:`PyPreConfig` are tuned, they must - be set before calling a :c:type:`PyConfig` method: + Is the status an error or an exit? If true, the exception must be + handled; by calling :c:func:`Py_ExitStatusException` for example. - * :c:member:`PyConfig.dev_mode` - * :c:member:`PyConfig.isolated` - * :c:member:`PyConfig.parse_argv` - * :c:member:`PyConfig.use_environment` + .. c:function:: int PyStatus_IsError(PyStatus status) - Moreover, if :c:func:`PyConfig_SetArgv` or :c:func:`PyConfig_SetBytesArgv` - is used, this method must be called before other methods, since the - preinitialization configuration depends on command line arguments (if - :c:member:`~PyConfig.parse_argv` is non-zero). + Is the result an error? - The caller of these methods is responsible to handle exceptions (error or - exit) using ``PyStatus_Exception()`` and ``Py_ExitStatusException()``. + .. c:function:: int PyStatus_IsExit(PyStatus status) - .. c:namespace:: PyConfig + Is the result an exit? - Structure fields: + .. c:function:: void Py_ExitStatusException(PyStatus status) - .. c:member:: PyWideStringList argv + Call ``exit(exitcode)`` if *status* is an exit. Print the error + message and exit with a non-zero exit code if *status* is an error. Must + only be called if ``PyStatus_Exception(status)`` is non-zero. - .. index:: - single: main() - single: argv (in module sys) +.. note:: + Internally, Python uses macros which set ``PyStatus.func``, + whereas functions to create a status set ``func`` to ``NULL``. - Set :data:`sys.argv` command line arguments based on - :c:member:`~PyConfig.argv`. These parameters are similar to those passed - to the program's :c:func:`main` function with the difference that the - first entry should refer to the script file to be executed rather than - the executable hosting the Python interpreter. If there isn't a script - that will be run, the first entry in :c:member:`~PyConfig.argv` can be an - empty string. +Example:: - Set :c:member:`~PyConfig.parse_argv` to ``1`` to parse - :c:member:`~PyConfig.argv` the same way the regular Python parses Python - command line arguments and then to strip Python arguments from - :c:member:`~PyConfig.argv`. + PyStatus alloc(void **ptr, size_t size) + { + *ptr = PyMem_RawMalloc(size); + if (*ptr == NULL) { + return PyStatus_NoMemory(); + } + return PyStatus_Ok(); + } - If :c:member:`~PyConfig.argv` is empty, an empty string is added to - ensure that :data:`sys.argv` always exists and is never empty. + int main(int argc, char **argv) + { + void *ptr; + PyStatus status = alloc(&ptr, 16); + if (PyStatus_Exception(status)) { + Py_ExitStatusException(status); + } + PyMem_Free(ptr); + return 0; + } - Default: ``NULL``. - See also the :c:member:`~PyConfig.orig_argv` member. +PyPreConfig +----------- - .. c:member:: int safe_path +.. c:type:: PyPreConfig - If equals to zero, ``Py_RunMain()`` prepends a potentially unsafe path to - :data:`sys.path` at startup: + Structure used to preinitialize Python. - * If :c:member:`argv[0] ` is equal to ``L"-m"`` - (``python -m module``), prepend the current working directory. - * If running a script (``python script.py``), prepend the script's - directory. If it's a symbolic link, resolve symbolic links. - * Otherwise (``python -c code`` and ``python``), prepend an empty string, - which means the current working directory. + .. c:namespace:: NULL - Set to ``1`` by the :option:`-P` command line option and the - :envvar:`PYTHONSAFEPATH` environment variable. + Function to initialize a preconfiguration: - Default: ``0`` in Python config, ``1`` in isolated config. + .. c:function:: void PyPreConfig_InitPythonConfig(PyPreConfig *preconfig) - .. versionadded:: 3.11 + Initialize the preconfiguration with :ref:`Python Configuration + `. - .. c:member:: wchar_t* base_exec_prefix + .. c:function:: void PyPreConfig_InitIsolatedConfig(PyPreConfig *preconfig) - :data:`sys.base_exec_prefix`. + Initialize the preconfiguration with :ref:`Isolated Configuration + `. - Default: ``NULL``. + .. c:namespace:: PyPreConfig - Part of the :ref:`Python Path Configuration ` output. + Structure fields: - See also :c:member:`PyConfig.exec_prefix`. + .. c:member:: int allocator - .. c:member:: wchar_t* base_executable + Name of the Python memory allocators: - Python base executable: :data:`sys._base_executable`. + * ``PYMEM_ALLOCATOR_NOT_SET`` (``0``): don't change memory allocators + (use defaults). + * ``PYMEM_ALLOCATOR_DEFAULT`` (``1``): :ref:`default memory allocators + `. + * ``PYMEM_ALLOCATOR_DEBUG`` (``2``): :ref:`default memory allocators + ` with :ref:`debug hooks + `. + * ``PYMEM_ALLOCATOR_MALLOC`` (``3``): use ``malloc()`` of the C library. + * ``PYMEM_ALLOCATOR_MALLOC_DEBUG`` (``4``): force usage of + ``malloc()`` with :ref:`debug hooks `. + * ``PYMEM_ALLOCATOR_PYMALLOC`` (``5``): :ref:`Python pymalloc memory + allocator `. + * ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` (``6``): :ref:`Python pymalloc + memory allocator ` with :ref:`debug hooks + `. + * ``PYMEM_ALLOCATOR_MIMALLOC`` (``6``): use ``mimalloc``, a fast + malloc replacement. + * ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` (``7``): use ``mimalloc``, a fast + malloc replacement with :ref:`debug hooks `. - Set by the :envvar:`__PYVENV_LAUNCHER__` environment variable. - Set from :c:member:`PyConfig.executable` if ``NULL``. + ``PYMEM_ALLOCATOR_PYMALLOC`` and ``PYMEM_ALLOCATOR_PYMALLOC_DEBUG`` are + not supported if Python is :option:`configured using --without-pymalloc + <--without-pymalloc>`. - Default: ``NULL``. + ``PYMEM_ALLOCATOR_MIMALLOC`` and ``PYMEM_ALLOCATOR_MIMALLOC_DEBUG`` are + not supported if Python is :option:`configured using --without-mimalloc + <--without-mimalloc>` or if the underlying atomic support isn't + available. - Part of the :ref:`Python Path Configuration ` output. + See :ref:`Memory Management `. - See also :c:member:`PyConfig.executable`. + Default: ``PYMEM_ALLOCATOR_NOT_SET``. - .. c:member:: wchar_t* base_prefix + .. c:member:: int configure_locale - :data:`sys.base_prefix`. + Set the LC_CTYPE locale to the user preferred locale. - Default: ``NULL``. + If equals to ``0``, set :c:member:`~PyPreConfig.coerce_c_locale` and + :c:member:`~PyPreConfig.coerce_c_locale_warn` members to ``0``. - Part of the :ref:`Python Path Configuration ` output. + See the :term:`locale encoding`. - See also :c:member:`PyConfig.prefix`. + Default: ``1`` in Python config, ``0`` in isolated config. - .. c:member:: int buffered_stdio + .. c:member:: int coerce_c_locale - If equals to ``0`` and :c:member:`~PyConfig.configure_c_stdio` is non-zero, - disable buffering on the C streams stdout and stderr. + If equals to ``2``, coerce the C locale. - Set to ``0`` by the :option:`-u` command line option and the - :envvar:`PYTHONUNBUFFERED` environment variable. + If equals to ``1``, read the LC_CTYPE locale to decide if it should be + coerced. - stdin is always opened in buffered mode. + See the :term:`locale encoding`. - Default: ``1``. + Default: ``-1`` in Python config, ``0`` in isolated config. - .. c:member:: int bytes_warning + .. c:member:: int coerce_c_locale_warn - If equals to ``1``, issue a warning when comparing :class:`bytes` or - :class:`bytearray` with :class:`str`, or comparing :class:`bytes` with - :class:`int`. + If non-zero, emit a warning if the C locale is coerced. - If equal or greater to ``2``, raise a :exc:`BytesWarning` exception in these - cases. + Default: ``-1`` in Python config, ``0`` in isolated config. - Incremented by the :option:`-b` command line option. + .. c:member:: int dev_mode - Default: ``0``. + :ref:`Python Development Mode `: see + :c:member:`PyConfig.dev_mode`. - .. c:member:: int warn_default_encoding + Default: ``-1`` in Python mode, ``0`` in isolated mode. - If non-zero, emit a :exc:`EncodingWarning` warning when :class:`io.TextIOWrapper` - uses its default encoding. See :ref:`io-encoding-warning` for details. + .. c:member:: int isolated - Default: ``0``. + Isolated mode: see :c:member:`PyConfig.isolated`. - .. versionadded:: 3.10 + Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int code_debug_ranges + .. c:member:: int legacy_windows_fs_encoding - If equals to ``0``, disables the inclusion of the end line and column - mappings in code objects. Also disables traceback printing carets to - specific error locations. + If non-zero: - Set to ``0`` by the :envvar:`PYTHONNODEBUGRANGES` environment variable - and by the :option:`-X no_debug_ranges <-X>` command line option. + * Set :c:member:`PyPreConfig.utf8_mode` to ``0``, + * Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``, + * Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``. - Default: ``1``. + Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment + variable value. - .. versionadded:: 3.11 + Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for + Windows specific code. - .. c:member:: wchar_t* check_hash_pycs_mode + Default: ``0``. - Control the validation behavior of hash-based ``.pyc`` files: - value of the :option:`--check-hash-based-pycs` command line option. + .. c:member:: int parse_argv - Valid values: + If non-zero, :c:func:`Py_PreInitializeFromArgs` and + :c:func:`Py_PreInitializeFromBytesArgs` parse their ``argv`` argument the + same way the regular Python parses command line arguments: see + :ref:`Command Line Arguments `. - - ``L"always"``: Hash the source file for invalidation regardless of - value of the 'check_source' flag. - - ``L"never"``: Assume that hash-based pycs always are valid. - - ``L"default"``: The 'check_source' flag in hash-based pycs - determines invalidation. + Default: ``1`` in Python config, ``0`` in isolated config. - Default: ``L"default"``. + .. c:member:: int use_environment - See also :pep:`552` "Deterministic pycs". + Use :ref:`environment variables `? See + :c:member:`PyConfig.use_environment`. - .. c:member:: int configure_c_stdio + Default: ``1`` in Python config and ``0`` in isolated config. - If non-zero, configure C standard streams: + .. c:member:: int utf8_mode - * On Windows, set the binary mode (``O_BINARY``) on stdin, stdout and - stderr. - * If :c:member:`~PyConfig.buffered_stdio` equals zero, disable buffering - of stdin, stdout and stderr streams. - * If :c:member:`~PyConfig.interactive` is non-zero, enable stream - buffering on stdin and stdout (only stdout on Windows). + If non-zero, enable the :ref:`Python UTF-8 Mode `. - Default: ``1`` in Python config, ``0`` in isolated config. + Set to ``0`` or ``1`` by the :option:`-X utf8 <-X>` command line option + and the :envvar:`PYTHONUTF8` environment variable. - .. c:member:: int dev_mode + Also set to ``1`` if the ``LC_CTYPE`` locale is ``C`` or ``POSIX``. - If non-zero, enable the :ref:`Python Development Mode `. + Default: ``-1`` in Python config and ``0`` in isolated config. - Set to ``1`` by the :option:`-X dev <-X>` option and the - :envvar:`PYTHONDEVMODE` environment variable. - Default: ``-1`` in Python mode, ``0`` in isolated mode. +.. _c-preinit: - .. c:member:: int dump_refs +Preinitialize Python with PyPreConfig +------------------------------------- - Dump Python references? +The preinitialization of Python: - If non-zero, dump all objects which are still alive at exit. +* Set the Python memory allocators (:c:member:`PyPreConfig.allocator`) +* Configure the LC_CTYPE locale (:term:`locale encoding`) +* Set the :ref:`Python UTF-8 Mode ` + (:c:member:`PyPreConfig.utf8_mode`) - Set to ``1`` by the :envvar:`PYTHONDUMPREFS` environment variable. +The current preconfiguration (``PyPreConfig`` type) is stored in +``_PyRuntime.preconfig``. - Needs a special build of Python with the ``Py_TRACE_REFS`` macro defined: - see the :option:`configure --with-trace-refs option <--with-trace-refs>`. +Functions to preinitialize Python: - Default: ``0``. +.. c:function:: PyStatus Py_PreInitialize(const PyPreConfig *preconfig) - .. c:member:: wchar_t* exec_prefix + Preinitialize Python from *preconfig* preconfiguration. - The site-specific directory prefix where the platform-dependent Python - files are installed: :data:`sys.exec_prefix`. + *preconfig* must not be ``NULL``. - Default: ``NULL``. +.. c:function:: PyStatus Py_PreInitializeFromBytesArgs(const PyPreConfig *preconfig, int argc, char * const *argv) - Part of the :ref:`Python Path Configuration ` output. + Preinitialize Python from *preconfig* preconfiguration. - See also :c:member:`PyConfig.base_exec_prefix`. + Parse *argv* command line arguments (bytes strings) if + :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. - .. c:member:: wchar_t* executable + *preconfig* must not be ``NULL``. - The absolute path of the executable binary for the Python interpreter: - :data:`sys.executable`. +.. c:function:: PyStatus Py_PreInitializeFromArgs(const PyPreConfig *preconfig, int argc, wchar_t * const * argv) - Default: ``NULL``. + Preinitialize Python from *preconfig* preconfiguration. - Part of the :ref:`Python Path Configuration ` output. + Parse *argv* command line arguments (wide strings) if + :c:member:`~PyPreConfig.parse_argv` of *preconfig* is non-zero. - See also :c:member:`PyConfig.base_executable`. + *preconfig* must not be ``NULL``. - .. c:member:: int faulthandler +The caller is responsible to handle exceptions (error or exit) using +:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. - Enable faulthandler? +For :ref:`Python Configuration ` +(:c:func:`PyPreConfig_InitPythonConfig`), if Python is initialized with +command line arguments, the command line arguments must also be passed to +preinitialize Python, since they have an effect on the pre-configuration +like encodings. For example, the :option:`-X utf8 <-X>` command line option +enables the :ref:`Python UTF-8 Mode `. - If non-zero, call :func:`faulthandler.enable` at startup. +``PyMem_SetAllocator()`` can be called after :c:func:`Py_PreInitialize` and +before :c:func:`Py_InitializeFromConfig` to install a custom memory allocator. +It can be called before :c:func:`Py_PreInitialize` if +:c:member:`PyPreConfig.allocator` is set to ``PYMEM_ALLOCATOR_NOT_SET``. - Set to ``1`` by :option:`-X faulthandler <-X>` and the - :envvar:`PYTHONFAULTHANDLER` environment variable. +Python memory allocation functions like :c:func:`PyMem_RawMalloc` must not be +used before the Python preinitialization, whereas calling directly ``malloc()`` +and ``free()`` is always safe. :c:func:`Py_DecodeLocale` must not be called +before the Python preinitialization. - Default: ``-1`` in Python mode, ``0`` in isolated mode. +Example using the preinitialization to enable +the :ref:`Python UTF-8 Mode `:: - .. c:member:: wchar_t* filesystem_encoding + PyStatus status; + PyPreConfig preconfig; + PyPreConfig_InitPythonConfig(&preconfig); - :term:`Filesystem encoding `: - :func:`sys.getfilesystemencoding`. + preconfig.utf8_mode = 1; - On macOS, Android and VxWorks: use ``"utf-8"`` by default. + status = Py_PreInitialize(&preconfig); + if (PyStatus_Exception(status)) { + Py_ExitStatusException(status); + } - On Windows: use ``"utf-8"`` by default, or ``"mbcs"`` if - :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of - :c:type:`PyPreConfig` is non-zero. + /* at this point, Python speaks UTF-8 */ - Default encoding on other platforms: + Py_Initialize(); + /* ... use Python API here ... */ + Py_Finalize(); - * ``"utf-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. - * ``"ascii"`` if Python detects that ``nl_langinfo(CODESET)`` announces - the ASCII encoding, whereas the ``mbstowcs()`` function - decodes from a different encoding (usually Latin1). - * ``"utf-8"`` if ``nl_langinfo(CODESET)`` returns an empty string. - * Otherwise, use the :term:`locale encoding`: - ``nl_langinfo(CODESET)`` result. - At Python startup, the encoding name is normalized to the Python codec - name. For example, ``"ANSI_X3.4-1968"`` is replaced with ``"ascii"``. +PyConfig +-------- - See also the :c:member:`~PyConfig.filesystem_errors` member. +.. c:type:: PyConfig - .. c:member:: wchar_t* filesystem_errors + Structure containing most parameters to configure Python. - :term:`Filesystem error handler `: - :func:`sys.getfilesystemencodeerrors`. + When done, the :c:func:`PyConfig_Clear` function must be used to release the + configuration memory. - On Windows: use ``"surrogatepass"`` by default, or ``"replace"`` if - :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of - :c:type:`PyPreConfig` is non-zero. + .. c:namespace:: NULL - On other platforms: use ``"surrogateescape"`` by default. + Structure methods: - Supported error handlers: + .. c:function:: void PyConfig_InitPythonConfig(PyConfig *config) - * ``"strict"`` - * ``"surrogateescape"`` - * ``"surrogatepass"`` (only supported with the UTF-8 encoding) + Initialize configuration with the :ref:`Python Configuration + `. - See also the :c:member:`~PyConfig.filesystem_encoding` member. + .. c:function:: void PyConfig_InitIsolatedConfig(PyConfig *config) - .. c:member:: unsigned long hash_seed - .. c:member:: int use_hash_seed + Initialize configuration with the :ref:`Isolated Configuration + `. - Randomized hash function seed. + .. c:function:: PyStatus PyConfig_SetString(PyConfig *config, wchar_t * const *config_str, const wchar_t *str) - If :c:member:`~PyConfig.use_hash_seed` is zero, a seed is chosen randomly - at Python startup, and :c:member:`~PyConfig.hash_seed` is ignored. + Copy the wide character string *str* into ``*config_str``. - Set by the :envvar:`PYTHONHASHSEED` environment variable. + :ref:`Preinitialize Python ` if needed. - Default *use_hash_seed* value: ``-1`` in Python mode, ``0`` in isolated - mode. + .. c:function:: PyStatus PyConfig_SetBytesString(PyConfig *config, wchar_t * const *config_str, const char *str) - .. c:member:: wchar_t* home + Decode *str* using :c:func:`Py_DecodeLocale` and set the result into + ``*config_str``. - Set the default Python "home" directory, that is, the location of the - standard Python libraries (see :envvar:`PYTHONHOME`). + :ref:`Preinitialize Python ` if needed. - Set by the :envvar:`PYTHONHOME` environment variable. + .. c:function:: PyStatus PyConfig_SetArgv(PyConfig *config, int argc, wchar_t * const *argv) - Default: ``NULL``. + Set command line arguments (:c:member:`~PyConfig.argv` member of + *config*) from the *argv* list of wide character strings. - Part of the :ref:`Python Path Configuration ` input. + :ref:`Preinitialize Python ` if needed. - .. c:member:: int import_time + .. c:function:: PyStatus PyConfig_SetBytesArgv(PyConfig *config, int argc, char * const *argv) - If non-zero, profile import time. + Set command line arguments (:c:member:`~PyConfig.argv` member of + *config*) from the *argv* list of bytes strings. Decode bytes using + :c:func:`Py_DecodeLocale`. - Set the ``1`` by the :option:`-X importtime <-X>` option and the - :envvar:`PYTHONPROFILEIMPORTTIME` environment variable. + :ref:`Preinitialize Python ` if needed. - Default: ``0``. + .. c:function:: PyStatus PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items) - .. c:member:: int inspect + Set the list of wide strings *list* to *length* and *items*. - Enter interactive mode after executing a script or a command. + :ref:`Preinitialize Python ` if needed. - If greater than ``0``, enable inspect: when a script is passed as first - argument or the -c option is used, enter interactive mode after executing - the script or the command, even when :data:`sys.stdin` does not appear to - be a terminal. + .. c:function:: PyStatus PyConfig_Read(PyConfig *config) - Incremented by the :option:`-i` command line option. Set to ``1`` if the - :envvar:`PYTHONINSPECT` environment variable is non-empty. + Read all Python configuration. - Default: ``0``. + Fields which are already initialized are left unchanged. - .. c:member:: int install_signal_handlers + Fields for :ref:`path configuration ` are no longer + calculated or modified when calling this function, as of Python 3.11. - Install Python signal handlers? + The :c:func:`PyConfig_Read` function only parses + :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` + is set to ``2`` after arguments are parsed. Since Python arguments are + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would + parse the application options as Python options. - Default: ``1`` in Python mode, ``0`` in isolated mode. + :ref:`Preinitialize Python ` if needed. - .. c:member:: int interactive + .. versionchanged:: 3.10 + The :c:member:`PyConfig.argv` arguments are now only parsed once, + :c:member:`PyConfig.parse_argv` is set to ``2`` after arguments are + parsed, and arguments are only parsed if + :c:member:`PyConfig.parse_argv` equals ``1``. - If greater than ``0``, enable the interactive mode (REPL). + .. versionchanged:: 3.11 + :c:func:`PyConfig_Read` no longer calculates all paths, and so fields + listed under :ref:`Python Path Configuration ` may + no longer be updated until :c:func:`Py_InitializeFromConfig` is + called. - Incremented by the :option:`-i` command line option. + .. c:function:: void PyConfig_Clear(PyConfig *config) - Default: ``0``. + Release configuration memory. - .. c:member:: int int_max_str_digits + Most ``PyConfig`` methods :ref:`preinitialize Python ` if needed. + In that case, the Python preinitialization configuration + (:c:type:`PyPreConfig`) in based on the :c:type:`PyConfig`. If configuration + fields which are in common with :c:type:`PyPreConfig` are tuned, they must + be set before calling a :c:type:`PyConfig` method: - Configures the :ref:`integer string conversion length limitation - `. An initial value of ``-1`` means the value will - be taken from the command line or environment or otherwise default to - 4300 (:data:`sys.int_info.default_max_str_digits`). A value of ``0`` - disables the limitation. Values greater than zero but less than 640 - (:data:`sys.int_info.str_digits_check_threshold`) are unsupported and - will produce an error. + * :c:member:`PyConfig.dev_mode` + * :c:member:`PyConfig.isolated` + * :c:member:`PyConfig.parse_argv` + * :c:member:`PyConfig.use_environment` - Configured by the :option:`-X int_max_str_digits <-X>` command line - flag or the :envvar:`PYTHONINTMAXSTRDIGITS` environment variable. + Moreover, if :c:func:`PyConfig_SetArgv` or :c:func:`PyConfig_SetBytesArgv` + is used, this method must be called before other methods, since the + preinitialization configuration depends on command line arguments (if + :c:member:`~PyConfig.parse_argv` is non-zero). - Default: ``-1`` in Python mode. 4300 - (:data:`sys.int_info.default_max_str_digits`) in isolated mode. + The caller of these methods is responsible to handle exceptions (error or + exit) using ``PyStatus_Exception()`` and ``Py_ExitStatusException()``. - .. versionadded:: 3.12 + .. c:namespace:: PyConfig - .. c:member:: int cpu_count + Structure fields: - If the value of :c:member:`~PyConfig.cpu_count` is not ``-1`` then it will - override the return values of :func:`os.cpu_count`, - :func:`os.process_cpu_count`, and :func:`multiprocessing.cpu_count`. + .. c:member:: PyWideStringList argv - Configured by the :samp:`-X cpu_count={n|default}` command line - flag or the :envvar:`PYTHON_CPU_COUNT` environment variable. + .. index:: + single: main() + single: argv (in module sys) - Default: ``-1``. + Set :data:`sys.argv` command line arguments based on + :c:member:`~PyConfig.argv`. These parameters are similar to those passed + to the program's :c:func:`main` function with the difference that the + first entry should refer to the script file to be executed rather than + the executable hosting the Python interpreter. If there isn't a script + that will be run, the first entry in :c:member:`~PyConfig.argv` can be an + empty string. - .. versionadded:: 3.13 + Set :c:member:`~PyConfig.parse_argv` to ``1`` to parse + :c:member:`~PyConfig.argv` the same way the regular Python parses Python + command line arguments and then to strip Python arguments from + :c:member:`~PyConfig.argv`. - .. c:member:: int isolated + If :c:member:`~PyConfig.argv` is empty, an empty string is added to + ensure that :data:`sys.argv` always exists and is never empty. - If greater than ``0``, enable isolated mode: + Default: ``NULL``. - * Set :c:member:`~PyConfig.safe_path` to ``1``: - don't prepend a potentially unsafe path to :data:`sys.path` at Python - startup, such as the current directory, the script's directory or an - empty string. - * Set :c:member:`~PyConfig.use_environment` to ``0``: ignore ``PYTHON`` - environment variables. - * Set :c:member:`~PyConfig.user_site_directory` to ``0``: don't add the user - site directory to :data:`sys.path`. - * Python REPL doesn't import :mod:`readline` nor enable default readline - configuration on interactive prompts. + See also the :c:member:`~PyConfig.orig_argv` member. - Set to ``1`` by the :option:`-I` command line option. + .. c:member:: int safe_path - Default: ``0`` in Python mode, ``1`` in isolated mode. + If equals to zero, ``Py_RunMain()`` prepends a potentially unsafe path to + :data:`sys.path` at startup: - See also the :ref:`Isolated Configuration ` and - :c:member:`PyPreConfig.isolated`. + * If :c:member:`argv[0] ` is equal to ``L"-m"`` + (``python -m module``), prepend the current working directory. + * If running a script (``python script.py``), prepend the script's + directory. If it's a symbolic link, resolve symbolic links. + * Otherwise (``python -c code`` and ``python``), prepend an empty string, + which means the current working directory. - .. c:member:: int legacy_windows_stdio + Set to ``1`` by the :option:`-P` command line option and the + :envvar:`PYTHONSAFEPATH` environment variable. - If non-zero, use :class:`io.FileIO` instead of - :class:`!io._WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` - and :data:`sys.stderr`. + Default: ``0`` in Python config, ``1`` in isolated config. - Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment - variable is set to a non-empty string. + .. versionadded:: 3.11 - Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for - Windows specific code. + .. c:member:: wchar_t* base_exec_prefix - Default: ``0``. + :data:`sys.base_exec_prefix`. - See also the :pep:`528` (Change Windows console encoding to UTF-8). + Default: ``NULL``. - .. c:member:: int malloc_stats + Part of the :ref:`Python Path Configuration ` output. - If non-zero, dump statistics on :ref:`Python pymalloc memory allocator - ` at exit. + See also :c:member:`PyConfig.exec_prefix`. - Set to ``1`` by the :envvar:`PYTHONMALLOCSTATS` environment variable. + .. c:member:: wchar_t* base_executable - The option is ignored if Python is :option:`configured using - the --without-pymalloc option <--without-pymalloc>`. + Python base executable: :data:`sys._base_executable`. - Default: ``0``. + Set by the :envvar:`__PYVENV_LAUNCHER__` environment variable. - .. c:member:: wchar_t* platlibdir + Set from :c:member:`PyConfig.executable` if ``NULL``. - Platform library directory name: :data:`sys.platlibdir`. + Default: ``NULL``. - Set by the :envvar:`PYTHONPLATLIBDIR` environment variable. + Part of the :ref:`Python Path Configuration ` output. - Default: value of the ``PLATLIBDIR`` macro which is set by the - :option:`configure --with-platlibdir option <--with-platlibdir>` - (default: ``"lib"``, or ``"DLLs"`` on Windows). + See also :c:member:`PyConfig.executable`. - Part of the :ref:`Python Path Configuration ` input. + .. c:member:: wchar_t* base_prefix - .. versionadded:: 3.9 + :data:`sys.base_prefix`. - .. versionchanged:: 3.11 - This macro is now used on Windows to locate the standard - library extension modules, typically under ``DLLs``. However, - for compatibility, note that this value is ignored for any - non-standard layouts, including in-tree builds and virtual - environments. + Default: ``NULL``. - .. c:member:: wchar_t* pythonpath_env + Part of the :ref:`Python Path Configuration ` output. - Module search paths (:data:`sys.path`) as a string separated by ``DELIM`` - (:data:`os.pathsep`). + See also :c:member:`PyConfig.prefix`. - Set by the :envvar:`PYTHONPATH` environment variable. + .. c:member:: int buffered_stdio - Default: ``NULL``. + If equals to ``0`` and :c:member:`~PyConfig.configure_c_stdio` is non-zero, + disable buffering on the C streams stdout and stderr. - Part of the :ref:`Python Path Configuration ` input. + Set to ``0`` by the :option:`-u` command line option and the + :envvar:`PYTHONUNBUFFERED` environment variable. - .. c:member:: PyWideStringList module_search_paths - .. c:member:: int module_search_paths_set + stdin is always opened in buffered mode. - Module search paths: :data:`sys.path`. + Default: ``1``. - If :c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, - :c:func:`Py_InitializeFromConfig` will replace - :c:member:`~PyConfig.module_search_paths` and sets - :c:member:`~PyConfig.module_search_paths_set` to ``1``. + .. c:member:: int bytes_warning - Default: empty list (``module_search_paths``) and ``0`` - (``module_search_paths_set``). + If equals to ``1``, issue a warning when comparing :class:`bytes` or + :class:`bytearray` with :class:`str`, or comparing :class:`bytes` with + :class:`int`. - Part of the :ref:`Python Path Configuration ` output. + If equal or greater to ``2``, raise a :exc:`BytesWarning` exception in these + cases. - .. c:member:: int optimization_level + Incremented by the :option:`-b` command line option. - Compilation optimization level: + Default: ``0``. - * ``0``: Peephole optimizer, set ``__debug__`` to ``True``. - * ``1``: Level 0, remove assertions, set ``__debug__`` to ``False``. - * ``2``: Level 1, strip docstrings. + .. c:member:: int warn_default_encoding - Incremented by the :option:`-O` command line option. Set to the - :envvar:`PYTHONOPTIMIZE` environment variable value. + If non-zero, emit a :exc:`EncodingWarning` warning when :class:`io.TextIOWrapper` + uses its default encoding. See :ref:`io-encoding-warning` for details. Default: ``0``. - .. c:member:: PyWideStringList orig_argv + .. versionadded:: 3.10 - The list of the original command line arguments passed to the Python - executable: :data:`sys.orig_argv`. + .. c:member:: int code_debug_ranges - If :c:member:`~PyConfig.orig_argv` list is empty and - :c:member:`~PyConfig.argv` is not a list only containing an empty - string, :c:func:`PyConfig_Read` copies :c:member:`~PyConfig.argv` into - :c:member:`~PyConfig.orig_argv` before modifying - :c:member:`~PyConfig.argv` (if :c:member:`~PyConfig.parse_argv` is - non-zero). + If equals to ``0``, disables the inclusion of the end line and column + mappings in code objects. Also disables traceback printing carets to + specific error locations. - See also the :c:member:`~PyConfig.argv` member and the - :c:func:`Py_GetArgcArgv` function. + Set to ``0`` by the :envvar:`PYTHONNODEBUGRANGES` environment variable + and by the :option:`-X no_debug_ranges <-X>` command line option. - Default: empty list. + Default: ``1``. - .. versionadded:: 3.10 + .. versionadded:: 3.11 - .. c:member:: int parse_argv + .. c:member:: wchar_t* check_hash_pycs_mode - Parse command line arguments? + Control the validation behavior of hash-based ``.pyc`` files: + value of the :option:`--check-hash-based-pycs` command line option. - If equals to ``1``, parse :c:member:`~PyConfig.argv` the same way the regular - Python parses :ref:`command line arguments `, and strip - Python arguments from :c:member:`~PyConfig.argv`. + Valid values: - The :c:func:`PyConfig_Read` function only parses - :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` - is set to ``2`` after arguments are parsed. Since Python arguments are - stripped from :c:member:`PyConfig.argv`, parsing arguments twice would - parse the application options as Python options. + - ``L"always"``: Hash the source file for invalidation regardless of + value of the 'check_source' flag. + - ``L"never"``: Assume that hash-based pycs always are valid. + - ``L"default"``: The 'check_source' flag in hash-based pycs + determines invalidation. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Default: ``L"default"``. - .. versionchanged:: 3.10 - The :c:member:`PyConfig.argv` arguments are now only parsed if - :c:member:`PyConfig.parse_argv` equals to ``1``. + See also :pep:`552` "Deterministic pycs". - .. c:member:: int parser_debug + .. c:member:: int configure_c_stdio - Parser debug mode. If greater than ``0``, turn on parser debugging output (for expert only, depending - on compilation options). + If non-zero, configure C standard streams: - Incremented by the :option:`-d` command line option. Set to the - :envvar:`PYTHONDEBUG` environment variable value. + * On Windows, set the binary mode (``O_BINARY``) on stdin, stdout and + stderr. + * If :c:member:`~PyConfig.buffered_stdio` equals zero, disable buffering + of stdin, stdout and stderr streams. + * If :c:member:`~PyConfig.interactive` is non-zero, enable stream + buffering on stdin and stdout (only stdout on Windows). - Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro - must be defined). + Default: ``1`` in Python config, ``0`` in isolated config. - Default: ``0``. - - .. c:member:: int pathconfig_warnings + .. c:member:: int dev_mode - If non-zero, calculation of path configuration is allowed to log - warnings into ``stderr``. If equals to ``0``, suppress these warnings. + If non-zero, enable the :ref:`Python Development Mode `. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Set to ``1`` by the :option:`-X dev <-X>` option and the + :envvar:`PYTHONDEVMODE` environment variable. - Part of the :ref:`Python Path Configuration ` input. + Default: ``-1`` in Python mode, ``0`` in isolated mode. - .. versionchanged:: 3.11 - Now also applies on Windows. + .. c:member:: int dump_refs - .. c:member:: wchar_t* prefix + Dump Python references? - The site-specific directory prefix where the platform independent Python - files are installed: :data:`sys.prefix`. + If non-zero, dump all objects which are still alive at exit. - Default: ``NULL``. + Set to ``1`` by the :envvar:`PYTHONDUMPREFS` environment variable. - Part of the :ref:`Python Path Configuration ` output. + Needs a special build of Python with the ``Py_TRACE_REFS`` macro defined: + see the :option:`configure --with-trace-refs option <--with-trace-refs>`. - See also :c:member:`PyConfig.base_prefix`. + Default: ``0``. - .. c:member:: wchar_t* program_name + .. c:member:: wchar_t* dump_refs_file - Program name used to initialize :c:member:`~PyConfig.executable` and in - early error messages during Python initialization. + Filename where to dump Python references. - * On macOS, use :envvar:`PYTHONEXECUTABLE` environment variable if set. - * If the ``WITH_NEXT_FRAMEWORK`` macro is defined, use - :envvar:`__PYVENV_LAUNCHER__` environment variable if set. - * Use ``argv[0]`` of :c:member:`~PyConfig.argv` if available and - non-empty. - * Otherwise, use ``L"python"`` on Windows, or ``L"python3"`` on other - platforms. + Set by the :envvar:`PYTHONDUMPREFSFILE` environment variable. Default: ``NULL``. - Part of the :ref:`Python Path Configuration ` input. - - .. c:member:: wchar_t* pycache_prefix - - Directory where cached ``.pyc`` files are written: - :data:`sys.pycache_prefix`. + .. versionadded:: 3.11 - Set by the :option:`-X pycache_prefix=PATH <-X>` command line option and - the :envvar:`PYTHONPYCACHEPREFIX` environment variable. - The command-line option takes precedence. + .. c:member:: wchar_t* exec_prefix - If ``NULL``, :data:`sys.pycache_prefix` is set to ``None``. + The site-specific directory prefix where the platform-dependent Python + files are installed: :data:`sys.exec_prefix`. Default: ``NULL``. - .. c:member:: int quiet - - Quiet mode. If greater than ``0``, don't display the copyright and version at - Python startup in interactive mode. - - Incremented by the :option:`-q` command line option. - - Default: ``0``. + Part of the :ref:`Python Path Configuration ` output. - .. c:member:: wchar_t* run_command + See also :c:member:`PyConfig.base_exec_prefix`. - Value of the :option:`-c` command line option. + .. c:member:: wchar_t* executable - Used by :c:func:`Py_RunMain`. + The absolute path of the executable binary for the Python interpreter: + :data:`sys.executable`. Default: ``NULL``. - .. c:member:: wchar_t* run_filename - - Filename passed on the command line: trailing command line argument - without :option:`-c` or :option:`-m`. It is used by the - :c:func:`Py_RunMain` function. + Part of the :ref:`Python Path Configuration ` output. - For example, it is set to ``script.py`` by the ``python3 script.py arg`` - command line. + See also :c:member:`PyConfig.base_executable`. - See also the :c:member:`PyConfig.skip_source_first_line` option. + .. c:member:: int faulthandler - Default: ``NULL``. + Enable faulthandler? - .. c:member:: wchar_t* run_module + If non-zero, call :func:`faulthandler.enable` at startup. - Value of the :option:`-m` command line option. + Set to ``1`` by :option:`-X faulthandler <-X>` and the + :envvar:`PYTHONFAULTHANDLER` environment variable. - Used by :c:func:`Py_RunMain`. + Default: ``-1`` in Python mode, ``0`` in isolated mode. - Default: ``NULL``. + .. c:member:: wchar_t* filesystem_encoding - .. c:member:: wchar_t* run_presite + :term:`Filesystem encoding `: + :func:`sys.getfilesystemencoding`. - ``package.module`` path to module that should be imported before - ``site.py`` is run. + On macOS, Android and VxWorks: use ``"utf-8"`` by default. - Set by the :option:`-X presite=package.module <-X>` command-line - option and the :envvar:`PYTHON_PRESITE` environment variable. - The command-line option takes precedence. + On Windows: use ``"utf-8"`` by default, or ``"mbcs"`` if + :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of + :c:type:`PyPreConfig` is non-zero. - Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro - must be defined). + Default encoding on other platforms: - Default: ``NULL``. + * ``"utf-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. + * ``"ascii"`` if Python detects that ``nl_langinfo(CODESET)`` announces + the ASCII encoding, whereas the ``mbstowcs()`` function + decodes from a different encoding (usually Latin1). + * ``"utf-8"`` if ``nl_langinfo(CODESET)`` returns an empty string. + * Otherwise, use the :term:`locale encoding`: + ``nl_langinfo(CODESET)`` result. - .. c:member:: int show_ref_count + At Python startup, the encoding name is normalized to the Python codec + name. For example, ``"ANSI_X3.4-1968"`` is replaced with ``"ascii"``. - Show total reference count at exit (excluding :term:`immortal` objects)? + See also the :c:member:`~PyConfig.filesystem_errors` member. - Set to ``1`` by :option:`-X showrefcount <-X>` command line option. + .. c:member:: wchar_t* filesystem_errors - Needs a :ref:`debug build of Python ` (the ``Py_REF_DEBUG`` - macro must be defined). + :term:`Filesystem error handler `: + :func:`sys.getfilesystemencodeerrors`. - Default: ``0``. + On Windows: use ``"surrogatepass"`` by default, or ``"replace"`` if + :c:member:`~PyPreConfig.legacy_windows_fs_encoding` of + :c:type:`PyPreConfig` is non-zero. - .. c:member:: int site_import + On other platforms: use ``"surrogateescape"`` by default. - Import the :mod:`site` module at startup? + Supported error handlers: - If equal to zero, disable the import of the module site and the - site-dependent manipulations of :data:`sys.path` that it entails. + * ``"strict"`` + * ``"surrogateescape"`` + * ``"surrogatepass"`` (only supported with the UTF-8 encoding) - Also disable these manipulations if the :mod:`site` module is explicitly - imported later (call :func:`site.main` if you want them to be triggered). + See also the :c:member:`~PyConfig.filesystem_encoding` member. - Set to ``0`` by the :option:`-S` command line option. + .. c:member:: int use_frozen_modules - :data:`sys.flags.no_site ` is set to the inverted value of - :c:member:`~PyConfig.site_import`. + If non-zero, use frozen modules. - Default: ``1``. + Set by the :envvar:`PYTHON_FROZEN_MODULES` environment variable. - .. c:member:: int skip_source_first_line + Default: ``1`` in a release build, or ``0`` in a :ref:`debug build + `. - If non-zero, skip the first line of the :c:member:`PyConfig.run_filename` - source. + .. c:member:: unsigned long hash_seed + .. c:member:: int use_hash_seed - It allows the usage of non-Unix forms of ``#!cmd``. This is intended for - a DOS specific hack only. + Randomized hash function seed. - Set to ``1`` by the :option:`-x` command line option. + If :c:member:`~PyConfig.use_hash_seed` is zero, a seed is chosen randomly + at Python startup, and :c:member:`~PyConfig.hash_seed` is ignored. - Default: ``0``. + Set by the :envvar:`PYTHONHASHSEED` environment variable. - .. c:member:: wchar_t* stdio_encoding - .. c:member:: wchar_t* stdio_errors + Default *use_hash_seed* value: ``-1`` in Python mode, ``0`` in isolated + mode. - Encoding and encoding errors of :data:`sys.stdin`, :data:`sys.stdout` and - :data:`sys.stderr` (but :data:`sys.stderr` always uses - ``"backslashreplace"`` error handler). + .. c:member:: wchar_t* home - Use the :envvar:`PYTHONIOENCODING` environment variable if it is - non-empty. + Set the default Python "home" directory, that is, the location of the + standard Python libraries (see :envvar:`PYTHONHOME`). - Default encoding: + Set by the :envvar:`PYTHONHOME` environment variable. - * ``"UTF-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. - * Otherwise, use the :term:`locale encoding`. + Default: ``NULL``. - Default error handler: + Part of the :ref:`Python Path Configuration ` input. - * On Windows: use ``"surrogateescape"``. - * ``"surrogateescape"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero, - or if the LC_CTYPE locale is "C" or "POSIX". - * ``"strict"`` otherwise. + .. c:member:: int import_time - See also :c:member:`PyConfig.legacy_windows_stdio`. + If non-zero, profile import time. - .. c:member:: int tracemalloc + Set the ``1`` by the :option:`-X importtime <-X>` option and the + :envvar:`PYTHONPROFILEIMPORTTIME` environment variable. - Enable tracemalloc? + Default: ``0``. - If non-zero, call :func:`tracemalloc.start` at startup. + .. c:member:: int inspect - Set by :option:`-X tracemalloc=N <-X>` command line option and by the - :envvar:`PYTHONTRACEMALLOC` environment variable. + Enter interactive mode after executing a script or a command. - Default: ``-1`` in Python mode, ``0`` in isolated mode. + If greater than ``0``, enable inspect: when a script is passed as first + argument or the -c option is used, enter interactive mode after executing + the script or the command, even when :data:`sys.stdin` does not appear to + be a terminal. - .. c:member:: int perf_profiling + Incremented by the :option:`-i` command line option. Set to ``1`` if the + :envvar:`PYTHONINSPECT` environment variable is non-empty. - Enable the Linux ``perf`` profiler support? + Default: ``0``. - If equals to ``1``, enable support for the Linux ``perf`` profiler. + .. c:member:: int install_signal_handlers - If equals to ``2``, enable support for the Linux ``perf`` profiler with - DWARF JIT support. + Install Python signal handlers? - Set to ``1`` by :option:`-X perf <-X>` command-line option and the - :envvar:`PYTHONPERFSUPPORT` environment variable. + Default: ``1`` in Python mode, ``0`` in isolated mode. - Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and - the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable. + .. c:member:: int interactive - Default: ``-1``. + If greater than ``0``, enable the interactive mode (REPL). - .. seealso:: - See :ref:`perf_profiling` for more information. + Incremented by the :option:`-i` command line option. - .. versionadded:: 3.12 + Default: ``0``. - .. c:member:: int use_environment + .. c:member:: int int_max_str_digits - Use :ref:`environment variables `? + Configures the :ref:`integer string conversion length limitation + `. An initial value of ``-1`` means the value will + be taken from the command line or environment or otherwise default to + 4300 (:data:`sys.int_info.default_max_str_digits`). A value of ``0`` + disables the limitation. Values greater than zero but less than 640 + (:data:`sys.int_info.str_digits_check_threshold`) are unsupported and + will produce an error. - If equals to zero, ignore the :ref:`environment variables - `. + Configured by the :option:`-X int_max_str_digits <-X>` command line + flag or the :envvar:`PYTHONINTMAXSTRDIGITS` environment variable. - Set to ``0`` by the :option:`-E` environment variable. + Default: ``-1`` in Python mode. 4300 + (:data:`sys.int_info.default_max_str_digits`) in isolated mode. - Default: ``1`` in Python config and ``0`` in isolated config. + .. versionadded:: 3.12 - .. c:member:: int use_system_logger + .. c:member:: int cpu_count - If non-zero, ``stdout`` and ``stderr`` will be redirected to the system - log. + If the value of :c:member:`~PyConfig.cpu_count` is not ``-1`` then it will + override the return values of :func:`os.cpu_count`, + :func:`os.process_cpu_count`, and :func:`multiprocessing.cpu_count`. - Only available on macOS 10.12 and later, and on iOS. + Configured by the :samp:`-X cpu_count={n|default}` command line + flag or the :envvar:`PYTHON_CPU_COUNT` environment variable. - Default: ``0`` (don't use system log). + Default: ``-1``. - .. versionadded:: 3.13.2 + .. versionadded:: 3.13 - .. c:member:: int user_site_directory + .. c:member:: int isolated - If non-zero, add the user site directory to :data:`sys.path`. + If greater than ``0``, enable isolated mode: - Set to ``0`` by the :option:`-s` and :option:`-I` command line options. + * Set :c:member:`~PyConfig.safe_path` to ``1``: + don't prepend a potentially unsafe path to :data:`sys.path` at Python + startup, such as the current directory, the script's directory or an + empty string. + * Set :c:member:`~PyConfig.use_environment` to ``0``: ignore ``PYTHON`` + environment variables. + * Set :c:member:`~PyConfig.user_site_directory` to ``0``: don't add the user + site directory to :data:`sys.path`. + * Python REPL doesn't import :mod:`readline` nor enable default readline + configuration on interactive prompts. - Set to ``0`` by the :envvar:`PYTHONNOUSERSITE` environment variable. + Set to ``1`` by the :option:`-I` command line option. - Default: ``1`` in Python mode, ``0`` in isolated mode. + Default: ``0`` in Python mode, ``1`` in isolated mode. - .. c:member:: int verbose + See also the :ref:`Isolated Configuration ` and + :c:member:`PyPreConfig.isolated`. - Verbose mode. If greater than ``0``, print a message each time a module is - imported, showing the place (filename or built-in module) from which - it is loaded. + .. c:member:: int legacy_windows_stdio - If greater than or equal to ``2``, print a message for each file that is - checked for when searching for a module. Also provides information on - module cleanup at exit. + If non-zero, use :class:`io.FileIO` instead of + :class:`!io._WindowsConsoleIO` for :data:`sys.stdin`, :data:`sys.stdout` + and :data:`sys.stderr`. - Incremented by the :option:`-v` command line option. + Set to ``1`` if the :envvar:`PYTHONLEGACYWINDOWSSTDIO` environment + variable is set to a non-empty string. - Set by the :envvar:`PYTHONVERBOSE` environment variable value. + Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for + Windows specific code. Default: ``0``. - .. c:member:: PyWideStringList warnoptions + See also the :pep:`528` (Change Windows console encoding to UTF-8). - Options of the :mod:`warnings` module to build warnings filters, lowest - to highest priority: :data:`sys.warnoptions`. + .. c:member:: int malloc_stats - The :mod:`warnings` module adds :data:`sys.warnoptions` in the reverse - order: the last :c:member:`PyConfig.warnoptions` item becomes the first - item of :data:`warnings.filters` which is checked first (highest - priority). + If non-zero, dump statistics on :ref:`Python pymalloc memory allocator + ` at exit. - The :option:`-W` command line options adds its value to - :c:member:`~PyConfig.warnoptions`, it can be used multiple times. + Set to ``1`` by the :envvar:`PYTHONMALLOCSTATS` environment variable. - The :envvar:`PYTHONWARNINGS` environment variable can also be used to add - warning options. Multiple options can be specified, separated by commas - (``,``). + The option is ignored if Python is :option:`configured using + the --without-pymalloc option <--without-pymalloc>`. - Default: empty list. + Default: ``0``. - .. c:member:: int write_bytecode + .. c:member:: wchar_t* platlibdir - If equal to ``0``, Python won't try to write ``.pyc`` files on the import of - source modules. + Platform library directory name: :data:`sys.platlibdir`. - Set to ``0`` by the :option:`-B` command line option and the - :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. + Set by the :envvar:`PYTHONPLATLIBDIR` environment variable. - :data:`sys.dont_write_bytecode` is initialized to the inverted value of - :c:member:`~PyConfig.write_bytecode`. + Default: value of the ``PLATLIBDIR`` macro which is set by the + :option:`configure --with-platlibdir option <--with-platlibdir>` + (default: ``"lib"``, or ``"DLLs"`` on Windows). - Default: ``1``. + Part of the :ref:`Python Path Configuration ` input. - .. c:member:: PyWideStringList xoptions + .. versionadded:: 3.9 - Values of the :option:`-X` command line options: :data:`sys._xoptions`. + .. versionchanged:: 3.11 + This macro is now used on Windows to locate the standard + library extension modules, typically under ``DLLs``. However, + for compatibility, note that this value is ignored for any + non-standard layouts, including in-tree builds and virtual + environments. - Default: empty list. + .. c:member:: wchar_t* pythonpath_env -If :c:member:`~PyConfig.parse_argv` is non-zero, :c:member:`~PyConfig.argv` -arguments are parsed the same way the regular Python parses :ref:`command line -arguments `, and Python arguments are stripped from -:c:member:`~PyConfig.argv`. + Module search paths (:data:`sys.path`) as a string separated by ``DELIM`` + (:data:`os.pathsep`). -The :c:member:`~PyConfig.xoptions` options are parsed to set other options: see -the :option:`-X` command line option. + Set by the :envvar:`PYTHONPATH` environment variable. -.. versionchanged:: 3.9 + Default: ``NULL``. - The ``show_alloc_count`` field has been removed. + Part of the :ref:`Python Path Configuration ` input. + .. c:member:: PyWideStringList module_search_paths + .. c:member:: int module_search_paths_set -.. _init-from-config: + Module search paths: :data:`sys.path`. -Initialization with PyConfig ----------------------------- + If :c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, + :c:func:`Py_InitializeFromConfig` will replace + :c:member:`~PyConfig.module_search_paths` and sets + :c:member:`~PyConfig.module_search_paths_set` to ``1``. -Initializing the interpreter from a populated configuration struct is handled -by calling :c:func:`Py_InitializeFromConfig`. + Default: empty list (``module_search_paths``) and ``0`` + (``module_search_paths_set``). -The caller is responsible to handle exceptions (error or exit) using -:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. + Part of the :ref:`Python Path Configuration ` output. -If :c:func:`PyImport_FrozenModules`, :c:func:`PyImport_AppendInittab` or -:c:func:`PyImport_ExtendInittab` are used, they must be set or called after -Python preinitialization and before the Python initialization. If Python is -initialized multiple times, :c:func:`PyImport_AppendInittab` or -:c:func:`PyImport_ExtendInittab` must be called before each Python -initialization. + .. c:member:: int optimization_level -The current configuration (``PyConfig`` type) is stored in -``PyInterpreterState.config``. + Compilation optimization level: -Example setting the program name:: + * ``0``: Peephole optimizer, set ``__debug__`` to ``True``. + * ``1``: Level 0, remove assertions, set ``__debug__`` to ``False``. + * ``2``: Level 1, strip docstrings. - void init_python(void) - { - PyStatus status; + Incremented by the :option:`-O` command line option. Set to the + :envvar:`PYTHONOPTIMIZE` environment variable value. - PyConfig config; - PyConfig_InitPythonConfig(&config); + Default: ``0``. - /* Set the program name. Implicitly preinitialize Python. */ - status = PyConfig_SetString(&config, &config.program_name, - L"/path/to/my_program"); - if (PyStatus_Exception(status)) { - goto exception; - } + .. c:member:: PyWideStringList orig_argv - status = Py_InitializeFromConfig(&config); - if (PyStatus_Exception(status)) { - goto exception; - } - PyConfig_Clear(&config); - return; + The list of the original command line arguments passed to the Python + executable: :data:`sys.orig_argv`. - exception: - PyConfig_Clear(&config); - Py_ExitStatusException(status); - } + If :c:member:`~PyConfig.orig_argv` list is empty and + :c:member:`~PyConfig.argv` is not a list only containing an empty + string, :c:func:`PyConfig_Read` copies :c:member:`~PyConfig.argv` into + :c:member:`~PyConfig.orig_argv` before modifying + :c:member:`~PyConfig.argv` (if :c:member:`~PyConfig.parse_argv` is + non-zero). -More complete example modifying the default configuration, read the -configuration, and then override some parameters. Note that since -3.11, many parameters are not calculated until initialization, and -so values cannot be read from the configuration structure. Any values -set before initialize is called will be left unchanged by -initialization:: + See also the :c:member:`~PyConfig.argv` member and the + :c:func:`Py_GetArgcArgv` function. - PyStatus init_python(const char *program_name) - { - PyStatus status; + Default: empty list. - PyConfig config; - PyConfig_InitPythonConfig(&config); + .. versionadded:: 3.10 - /* Set the program name before reading the configuration - (decode byte string from the locale encoding). + .. c:member:: int parse_argv - Implicitly preinitialize Python. */ - status = PyConfig_SetBytesString(&config, &config.program_name, - program_name); - if (PyStatus_Exception(status)) { - goto done; - } + Parse command line arguments? - /* Read all configuration at once */ - status = PyConfig_Read(&config); - if (PyStatus_Exception(status)) { - goto done; - } + If equals to ``1``, parse :c:member:`~PyConfig.argv` the same way the regular + Python parses :ref:`command line arguments `, and strip + Python arguments from :c:member:`~PyConfig.argv`. - /* Specify sys.path explicitly */ - /* If you want to modify the default set of paths, finish - initialization first and then use PySys_GetObject("path") */ - config.module_search_paths_set = 1; - status = PyWideStringList_Append(&config.module_search_paths, - L"/path/to/stdlib"); - if (PyStatus_Exception(status)) { - goto done; - } - status = PyWideStringList_Append(&config.module_search_paths, - L"/path/to/more/modules"); - if (PyStatus_Exception(status)) { - goto done; - } + The :c:func:`PyConfig_Read` function only parses + :c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv` + is set to ``2`` after arguments are parsed. Since Python arguments are + stripped from :c:member:`PyConfig.argv`, parsing arguments twice would + parse the application options as Python options. - /* Override executable computed by PyConfig_Read() */ - status = PyConfig_SetString(&config, &config.executable, - L"/path/to/my_executable"); - if (PyStatus_Exception(status)) { - goto done; - } + Default: ``1`` in Python mode, ``0`` in isolated mode. - status = Py_InitializeFromConfig(&config); + .. versionchanged:: 3.10 + The :c:member:`PyConfig.argv` arguments are now only parsed if + :c:member:`PyConfig.parse_argv` equals to ``1``. - done: - PyConfig_Clear(&config); - return status; - } + .. c:member:: int parser_debug + Parser debug mode. If greater than ``0``, turn on parser debugging output (for expert only, depending + on compilation options). -.. _init-isolated-conf: + Incremented by the :option:`-d` command line option. Set to the + :envvar:`PYTHONDEBUG` environment variable value. -Isolated Configuration ----------------------- + Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro + must be defined). -:c:func:`PyPreConfig_InitIsolatedConfig` and -:c:func:`PyConfig_InitIsolatedConfig` functions create a configuration to -isolate Python from the system. For example, to embed Python into an -application. + Default: ``0``. -This configuration ignores global configuration variables, environment -variables, command line arguments (:c:member:`PyConfig.argv` is not parsed) -and user site directory. The C standard streams (ex: ``stdout``) and the -LC_CTYPE locale are left unchanged. Signal handlers are not installed. + .. c:member:: int pathconfig_warnings -Configuration files are still used with this configuration to determine -paths that are unspecified. Ensure :c:member:`PyConfig.home` is specified -to avoid computing the default path configuration. + If non-zero, calculation of path configuration is allowed to log + warnings into ``stderr``. If equals to ``0``, suppress these warnings. + Default: ``1`` in Python mode, ``0`` in isolated mode. -.. _init-python-config: + Part of the :ref:`Python Path Configuration ` input. -Python Configuration --------------------- + .. versionchanged:: 3.11 + Now also applies on Windows. -:c:func:`PyPreConfig_InitPythonConfig` and :c:func:`PyConfig_InitPythonConfig` -functions create a configuration to build a customized Python which behaves as -the regular Python. + .. c:member:: wchar_t* prefix -Environments variables and command line arguments are used to configure -Python, whereas global configuration variables are ignored. + The site-specific directory prefix where the platform independent Python + files are installed: :data:`sys.prefix`. -This function enables C locale coercion (:pep:`538`) -and :ref:`Python UTF-8 Mode ` -(:pep:`540`) depending on the LC_CTYPE locale, :envvar:`PYTHONUTF8` and -:envvar:`PYTHONCOERCECLOCALE` environment variables. + Default: ``NULL``. + Part of the :ref:`Python Path Configuration ` output. -.. _init-path-config: + See also :c:member:`PyConfig.base_prefix`. -Python Path Configuration -------------------------- + .. c:member:: wchar_t* program_name -:c:type:`PyConfig` contains multiple fields for the path configuration: + Program name used to initialize :c:member:`~PyConfig.executable` and in + early error messages during Python initialization. -* Path configuration inputs: + * On macOS, use :envvar:`PYTHONEXECUTABLE` environment variable if set. + * If the ``WITH_NEXT_FRAMEWORK`` macro is defined, use + :envvar:`__PYVENV_LAUNCHER__` environment variable if set. + * Use ``argv[0]`` of :c:member:`~PyConfig.argv` if available and + non-empty. + * Otherwise, use ``L"python"`` on Windows, or ``L"python3"`` on other + platforms. - * :c:member:`PyConfig.home` - * :c:member:`PyConfig.platlibdir` - * :c:member:`PyConfig.pathconfig_warnings` - * :c:member:`PyConfig.program_name` - * :c:member:`PyConfig.pythonpath_env` - * current working directory: to get absolute paths - * ``PATH`` environment variable to get the program full path - (from :c:member:`PyConfig.program_name`) - * ``__PYVENV_LAUNCHER__`` environment variable - * (Windows only) Application paths in the registry under - "Software\Python\PythonCore\X.Y\PythonPath" of HKEY_CURRENT_USER and - HKEY_LOCAL_MACHINE (where X.Y is the Python version). + Default: ``NULL``. -* Path configuration output fields: + Part of the :ref:`Python Path Configuration ` input. - * :c:member:`PyConfig.base_exec_prefix` - * :c:member:`PyConfig.base_executable` - * :c:member:`PyConfig.base_prefix` - * :c:member:`PyConfig.exec_prefix` - * :c:member:`PyConfig.executable` - * :c:member:`PyConfig.module_search_paths_set`, - :c:member:`PyConfig.module_search_paths` - * :c:member:`PyConfig.prefix` + .. c:member:: wchar_t* pycache_prefix -If at least one "output field" is not set, Python calculates the path -configuration to fill unset fields. If -:c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, -:c:member:`~PyConfig.module_search_paths` is overridden and -:c:member:`~PyConfig.module_search_paths_set` is set to ``1``. + Directory where cached ``.pyc`` files are written: + :data:`sys.pycache_prefix`. -It is possible to completely ignore the function calculating the default -path configuration by setting explicitly all path configuration output -fields listed above. A string is considered as set even if it is non-empty. -``module_search_paths`` is considered as set if -``module_search_paths_set`` is set to ``1``. In this case, -``module_search_paths`` will be used without modification. + Set by the :option:`-X pycache_prefix=PATH <-X>` command line option and + the :envvar:`PYTHONPYCACHEPREFIX` environment variable. + The command-line option takes precedence. -Set :c:member:`~PyConfig.pathconfig_warnings` to ``0`` to suppress warnings when -calculating the path configuration (Unix only, Windows does not log any warning). + If ``NULL``, :data:`sys.pycache_prefix` is set to ``None``. -If :c:member:`~PyConfig.base_prefix` or :c:member:`~PyConfig.base_exec_prefix` -fields are not set, they inherit their value from :c:member:`~PyConfig.prefix` -and :c:member:`~PyConfig.exec_prefix` respectively. + Default: ``NULL``. -:c:func:`Py_RunMain` and :c:func:`Py_Main` modify :data:`sys.path`: + .. c:member:: int quiet -* If :c:member:`~PyConfig.run_filename` is set and is a directory which contains a - ``__main__.py`` script, prepend :c:member:`~PyConfig.run_filename` to - :data:`sys.path`. -* If :c:member:`~PyConfig.isolated` is zero: + Quiet mode. If greater than ``0``, don't display the copyright and version at + Python startup in interactive mode. - * If :c:member:`~PyConfig.run_module` is set, prepend the current directory - to :data:`sys.path`. Do nothing if the current directory cannot be read. - * If :c:member:`~PyConfig.run_filename` is set, prepend the directory of the - filename to :data:`sys.path`. - * Otherwise, prepend an empty string to :data:`sys.path`. + Incremented by the :option:`-q` command line option. -If :c:member:`~PyConfig.site_import` is non-zero, :data:`sys.path` can be -modified by the :mod:`site` module. If -:c:member:`~PyConfig.user_site_directory` is non-zero and the user's -site-package directory exists, the :mod:`site` module appends the user's -site-package directory to :data:`sys.path`. + Default: ``0``. -The following configuration files are used by the path configuration: + .. c:member:: wchar_t* run_command -* ``pyvenv.cfg`` -* ``._pth`` file (ex: ``python._pth``) -* ``pybuilddir.txt`` (Unix only) + Value of the :option:`-c` command line option. -If a ``._pth`` file is present: + Used by :c:func:`Py_RunMain`. -* Set :c:member:`~PyConfig.isolated` to ``1``. -* Set :c:member:`~PyConfig.use_environment` to ``0``. -* Set :c:member:`~PyConfig.site_import` to ``0``. -* Set :c:member:`~PyConfig.safe_path` to ``1``. + Default: ``NULL``. -If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in -the same directory as :c:member:`~PyConfig.executable`, or its parent, -:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that -location. When this happens, :c:member:`~PyConfig.base_prefix` and -:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the -base installation. See :ref:`sys-path-init-virtual-environments` for more -information. + .. c:member:: wchar_t* run_filename -The ``__PYVENV_LAUNCHER__`` environment variable is used to set -:c:member:`PyConfig.base_executable`. + Filename passed on the command line: trailing command line argument + without :option:`-c` or :option:`-m`. It is used by the + :c:func:`Py_RunMain` function. + + For example, it is set to ``script.py`` by the ``python3 script.py arg`` + command line. + + See also the :c:member:`PyConfig.skip_source_first_line` option. + + Default: ``NULL``. + + .. c:member:: wchar_t* run_module + + Value of the :option:`-m` command line option. + + Used by :c:func:`Py_RunMain`. + + Default: ``NULL``. + + .. c:member:: wchar_t* run_presite + + ``package.module`` path to module that should be imported before + ``site.py`` is run. + + Set by the :option:`-X presite=package.module <-X>` command-line + option and the :envvar:`PYTHON_PRESITE` environment variable. + The command-line option takes precedence. + + Needs a :ref:`debug build of Python ` (the ``Py_DEBUG`` macro + must be defined). + + Default: ``NULL``. + + .. c:member:: int show_ref_count + + Show total reference count at exit (excluding :term:`immortal` objects)? + + Set to ``1`` by :option:`-X showrefcount <-X>` command line option. + + Needs a :ref:`debug build of Python ` (the ``Py_REF_DEBUG`` + macro must be defined). -.. versionchanged:: 3.14 + Default: ``0``. - :c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now - set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`, - therefore affected by :option:`-S`. + .. c:member:: int site_import -.. _pyinitconfig_api: + Import the :mod:`site` module at startup? -PyInitConfig C API -================== + If equal to zero, disable the import of the module site and the + site-dependent manipulations of :data:`sys.path` that it entails. -C API to configure the Python initialization (:pep:`741`). + Also disable these manipulations if the :mod:`site` module is explicitly + imported later (call :func:`site.main` if you want them to be triggered). -.. versionadded:: 3.14 + Set to ``0`` by the :option:`-S` command line option. -Create Config -------------- + :data:`sys.flags.no_site ` is set to the inverted value of + :c:member:`~PyConfig.site_import`. -.. c:struct:: PyInitConfig + Default: ``1``. - Opaque structure to configure the Python initialization. + .. c:member:: int skip_source_first_line + If non-zero, skip the first line of the :c:member:`PyConfig.run_filename` + source. -.. c:function:: PyInitConfig* PyInitConfig_Create(void) + It allows the usage of non-Unix forms of ``#!cmd``. This is intended for + a DOS specific hack only. - Create a new initialization configuration using :ref:`Isolated Configuration - ` default values. + Set to ``1`` by the :option:`-x` command line option. - It must be freed by :c:func:`PyInitConfig_Free`. + Default: ``0``. - Return ``NULL`` on memory allocation failure. + .. c:member:: wchar_t* stdio_encoding + .. c:member:: wchar_t* stdio_errors + Encoding and encoding errors of :data:`sys.stdin`, :data:`sys.stdout` and + :data:`sys.stderr` (but :data:`sys.stderr` always uses + ``"backslashreplace"`` error handler). -.. c:function:: void PyInitConfig_Free(PyInitConfig *config) + Use the :envvar:`PYTHONIOENCODING` environment variable if it is + non-empty. - Free memory of the initialization configuration *config*. + Default encoding: - If *config* is ``NULL``, no operation is performed. + * ``"UTF-8"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero. + * Otherwise, use the :term:`locale encoding`. + Default error handler: -Error Handling --------------- + * On Windows: use ``"surrogateescape"``. + * ``"surrogateescape"`` if :c:member:`PyPreConfig.utf8_mode` is non-zero, + or if the LC_CTYPE locale is "C" or "POSIX". + * ``"strict"`` otherwise. -.. c:function:: int PyInitConfig_GetError(PyInitConfig* config, const char **err_msg) + See also :c:member:`PyConfig.legacy_windows_stdio`. - Get the *config* error message. + .. c:member:: int tracemalloc - * Set *\*err_msg* and return ``1`` if an error is set. - * Set *\*err_msg* to ``NULL`` and return ``0`` otherwise. + Enable tracemalloc? - An error message is an UTF-8 encoded string. + If non-zero, call :func:`tracemalloc.start` at startup. - If *config* has an exit code, format the exit code as an error - message. + Set by :option:`-X tracemalloc=N <-X>` command line option and by the + :envvar:`PYTHONTRACEMALLOC` environment variable. - The error message remains valid until another ``PyInitConfig`` - function is called with *config*. The caller doesn't have to free the - error message. + Default: ``-1`` in Python mode, ``0`` in isolated mode. + .. c:member:: int perf_profiling -.. c:function:: int PyInitConfig_GetExitCode(PyInitConfig* config, int *exitcode) + Enable the Linux ``perf`` profiler support? - Get the *config* exit code. + If equals to ``1``, enable support for the Linux ``perf`` profiler. - * Set *\*exitcode* and return ``1`` if *config* has an exit code set. - * Return ``0`` if *config* has no exit code set. + If equals to ``2``, enable support for the Linux ``perf`` profiler with + DWARF JIT support. - Only the ``Py_InitializeFromInitConfig()`` function can set an exit - code if the ``parse_argv`` option is non-zero. + Set to ``1`` by :option:`-X perf <-X>` command-line option and the + :envvar:`PYTHONPERFSUPPORT` environment variable. - An exit code can be set when parsing the command line failed (exit - code ``2``) or when a command line option asks to display the command - line help (exit code ``0``). + Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and + the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable. + Default: ``-1``. -Get Options ------------ + .. seealso:: + See :ref:`perf_profiling` for more information. -The configuration option *name* parameter must be a non-NULL -null-terminated UTF-8 encoded string. + .. versionadded:: 3.12 -.. c:function:: int PyInitConfig_HasOption(PyInitConfig *config, const char *name) + .. c:member:: wchar_t* stdlib_dir - Test if the configuration has an option called *name*. + Directory of the Python standard library. - Return ``1`` if the option exists, or return ``0`` otherwise. + Default: ``NULL``. + .. versionadded:: 3.11 -.. c:function:: int PyInitConfig_GetInt(PyInitConfig *config, const char *name, int64_t *value) + .. c:member:: int use_environment - Get an integer configuration option. + Use :ref:`environment variables `? - * Set *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + If equals to zero, ignore the :ref:`environment variables + `. + Set to ``0`` by the :option:`-E` environment variable. -.. c:function:: int PyInitConfig_GetStr(PyInitConfig *config, const char *name, char **value) + Default: ``1`` in Python config and ``0`` in isolated config. - Get a string configuration option as a null-terminated UTF-8 - encoded string. + .. c:member:: int use_system_logger - * Set *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + If non-zero, ``stdout`` and ``stderr`` will be redirected to the system + log. - *\*value* can be set to ``NULL`` if the option is an optional string and the - option is unset. + Only available on macOS 10.12 and later, and on iOS. - On success, the string must be released with ``free(value)`` if it's not - ``NULL``. + Default: ``0`` (don't use system log). + .. versionadded:: 3.13.2 -.. c:function:: int PyInitConfig_GetStrList(PyInitConfig *config, const char *name, size_t *length, char ***items) + .. c:member:: int user_site_directory - Get a string list configuration option as an array of - null-terminated UTF-8 encoded strings. + If non-zero, add the user site directory to :data:`sys.path`. - * Set *\*length* and *\*value*, and return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Set to ``0`` by the :option:`-s` and :option:`-I` command line options. - On success, the string list must be released with - ``PyInitConfig_FreeStrList(length, items)``. + Set to ``0`` by the :envvar:`PYTHONNOUSERSITE` environment variable. + Default: ``1`` in Python mode, ``0`` in isolated mode. -.. c:function:: void PyInitConfig_FreeStrList(size_t length, char **items) + .. c:member:: int verbose - Free memory of a string list created by - ``PyInitConfig_GetStrList()``. + Verbose mode. If greater than ``0``, print a message each time a module is + imported, showing the place (filename or built-in module) from which + it is loaded. + If greater than or equal to ``2``, print a message for each file that is + checked for when searching for a module. Also provides information on + module cleanup at exit. -Set Options ------------ + Incremented by the :option:`-v` command line option. -The configuration option *name* parameter must be a non-NULL null-terminated -UTF-8 encoded string. + Set by the :envvar:`PYTHONVERBOSE` environment variable value. -Some configuration options have side effects on other options. This logic is -only implemented when ``Py_InitializeFromInitConfig()`` is called, not by the -"Set" functions below. For example, setting ``dev_mode`` to ``1`` does not set -``faulthandler`` to ``1``. + Default: ``0``. -.. c:function:: int PyInitConfig_SetInt(PyInitConfig *config, const char *name, int64_t value) + .. c:member:: PyWideStringList warnoptions - Set an integer configuration option. + Options of the :mod:`warnings` module to build warnings filters, lowest + to highest priority: :data:`sys.warnoptions`. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + The :mod:`warnings` module adds :data:`sys.warnoptions` in the reverse + order: the last :c:member:`PyConfig.warnoptions` item becomes the first + item of :data:`warnings.filters` which is checked first (highest + priority). + The :option:`-W` command line options adds its value to + :c:member:`~PyConfig.warnoptions`, it can be used multiple times. -.. c:function:: int PyInitConfig_SetStr(PyInitConfig *config, const char *name, const char *value) + The :envvar:`PYTHONWARNINGS` environment variable can also be used to add + warning options. Multiple options can be specified, separated by commas + (``,``). - Set a string configuration option from a null-terminated UTF-8 - encoded string. The string is copied. + Default: empty list. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + .. c:member:: int write_bytecode + If equal to ``0``, Python won't try to write ``.pyc`` files on the import of + source modules. -.. c:function:: int PyInitConfig_SetStrList(PyInitConfig *config, const char *name, size_t length, char * const *items) + Set to ``0`` by the :option:`-B` command line option and the + :envvar:`PYTHONDONTWRITEBYTECODE` environment variable. - Set a string list configuration option from an array of - null-terminated UTF-8 encoded strings. The string list is copied. + :data:`sys.dont_write_bytecode` is initialized to the inverted value of + :c:member:`~PyConfig.write_bytecode`. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Default: ``1``. + .. c:member:: PyWideStringList xoptions -Module ------- + Values of the :option:`-X` command line options: :data:`sys._xoptions`. -.. c:function:: int PyInitConfig_AddModule(PyInitConfig *config, const char *name, PyObject* (*initfunc)(void)) + Default: empty list. - Add a built-in extension module to the table of built-in modules. + .. c:member:: int _pystats - The new module can be imported by the name *name*, and uses the function - *initfunc* as the initialization function called on the first attempted - import. + If non-zero, write performance statistics at Python exit. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. + Need a special build with the ``Py_STATS`` macro: + see :option:`--enable-pystats`. - If Python is initialized multiple times, ``PyInitConfig_AddModule()`` must - be called at each Python initialization. + Default: ``0``. - Similar to the :c:func:`PyImport_AppendInittab` function. +If :c:member:`~PyConfig.parse_argv` is non-zero, :c:member:`~PyConfig.argv` +arguments are parsed the same way the regular Python parses :ref:`command line +arguments `, and Python arguments are stripped from +:c:member:`~PyConfig.argv`. +The :c:member:`~PyConfig.xoptions` options are parsed to set other options: see +the :option:`-X` command line option. -Initialize Python ------------------ +.. versionchanged:: 3.9 -.. c:function:: int Py_InitializeFromInitConfig(PyInitConfig *config) + The ``show_alloc_count`` field has been removed. - Initialize Python from the initialization configuration. - * Return ``0`` on success. - * Set an error in *config* and return ``-1`` on error. - * Set an exit code in *config* and return ``-1`` if Python wants to - exit. +.. _init-from-config: - See ``PyInitConfig_GetExitcode()`` for the exit code case. +Initialization with PyConfig +---------------------------- +Initializing the interpreter from a populated configuration struct is handled +by calling :c:func:`Py_InitializeFromConfig`. -Example -------- +The caller is responsible to handle exceptions (error or exit) using +:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`. -Example initializing Python, set configuration options of various types, -return ``-1`` on error: +If :c:func:`PyImport_FrozenModules`, :c:func:`PyImport_AppendInittab` or +:c:func:`PyImport_ExtendInittab` are used, they must be set or called after +Python preinitialization and before the Python initialization. If Python is +initialized multiple times, :c:func:`PyImport_AppendInittab` or +:c:func:`PyImport_ExtendInittab` must be called before each Python +initialization. -.. code-block:: c +The current configuration (``PyConfig`` type) is stored in +``PyInterpreterState.config``. - int init_python(void) - { - PyInitConfig *config = PyInitConfig_Create(); - if (config == NULL) { - printf("PYTHON INIT ERROR: memory allocation failed\n"); - return -1; - } +Example setting the program name:: - // Set an integer (dev mode) - if (PyInitConfig_SetInt(config, "dev_mode", 1) < 0) { - goto error; - } + void init_python(void) + { + PyStatus status; - // Set a list of UTF-8 strings (argv) - char *argv[] = {"my_program", "-c", "pass"}; - if (PyInitConfig_SetStrList(config, "argv", - Py_ARRAY_LENGTH(argv), argv) < 0) { - goto error; - } + PyConfig config; + PyConfig_InitPythonConfig(&config); - // Set a UTF-8 string (program name) - if (PyInitConfig_SetStr(config, "program_name", L"my_program") < 0) { - goto error; + /* Set the program name. Implicitly preinitialize Python. */ + status = PyConfig_SetString(&config, &config.program_name, + L"/path/to/my_program"); + if (PyStatus_Exception(status)) { + goto exception; } - // Initialize Python with the configuration - if (Py_InitializeFromInitConfig(config) < 0) { - goto error; + status = Py_InitializeFromConfig(&config); + if (PyStatus_Exception(status)) { + goto exception; } - PyInitConfig_Free(config); - return 0; - - error: - { - // Display the error message - // This uncommon braces style is used, because you cannot make - // goto targets point to variable declarations. - const char *err_msg; - (void)PyInitConfig_GetError(config, &err_msg); - printf("PYTHON INIT ERROR: %s\n", err_msg); - PyInitConfig_Free(config); + PyConfig_Clear(&config); + return; - return -1; - } + exception: + PyConfig_Clear(&config); + Py_ExitStatusException(status); } +More complete example modifying the default configuration, read the +configuration, and then override some parameters. Note that since +3.11, many parameters are not calculated until initialization, and +so values cannot be read from the configuration structure. Any values +set before initialize is called will be left unchanged by +initialization:: -Runtime Python configuration API -================================ - -The configuration option *name* parameter must be a non-NULL null-terminated -UTF-8 encoded string. - -Some options are read from the :mod:`sys` attributes. For example, the option -``"argv"`` is read from :data:`sys.argv`. + PyStatus init_python(const char *program_name) + { + PyStatus status; + PyConfig config; + PyConfig_InitPythonConfig(&config); -.. c:function:: PyObject* PyConfig_Get(const char *name) + /* Set the program name before reading the configuration + (decode byte string from the locale encoding). - Get the current runtime value of a configuration option as a Python object. + Implicitly preinitialize Python. */ + status = PyConfig_SetBytesString(&config, &config.program_name, + program_name); + if (PyStatus_Exception(status)) { + goto done; + } - * Return a new reference on success. - * Set an exception and return ``NULL`` on error. + /* Read all configuration at once */ + status = PyConfig_Read(&config); + if (PyStatus_Exception(status)) { + goto done; + } - The object type depends on the configuration option. It can be: + /* Specify sys.path explicitly */ + /* If you want to modify the default set of paths, finish + initialization first and then use PySys_GetObject("path") */ + config.module_search_paths_set = 1; + status = PyWideStringList_Append(&config.module_search_paths, + L"/path/to/stdlib"); + if (PyStatus_Exception(status)) { + goto done; + } + status = PyWideStringList_Append(&config.module_search_paths, + L"/path/to/more/modules"); + if (PyStatus_Exception(status)) { + goto done; + } - * ``bool`` - * ``int`` - * ``str`` - * ``list[str]`` - * ``dict[str, str]`` + /* Override executable computed by PyConfig_Read() */ + status = PyConfig_SetString(&config, &config.executable, + L"/path/to/my_executable"); + if (PyStatus_Exception(status)) { + goto done; + } - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. + status = Py_InitializeFromConfig(&config); - .. versionadded:: 3.14 + done: + PyConfig_Clear(&config); + return status; + } -.. c:function:: int PyConfig_GetInt(const char *name, int *value) +.. _init-isolated-conf: - Similar to :c:func:`PyConfig_Get`, but get the value as a C int. +Isolated Configuration +---------------------- - * Return ``0`` on success. - * Set an exception and return ``-1`` on error. +:c:func:`PyPreConfig_InitIsolatedConfig` and +:c:func:`PyConfig_InitIsolatedConfig` functions create a configuration to +isolate Python from the system. For example, to embed Python into an +application. - .. versionadded:: 3.14 +This configuration ignores global configuration variables, environment +variables, command line arguments (:c:member:`PyConfig.argv` is not parsed) +and user site directory. The C standard streams (ex: ``stdout``) and the +LC_CTYPE locale are left unchanged. Signal handlers are not installed. +Configuration files are still used with this configuration to determine +paths that are unspecified. Ensure :c:member:`PyConfig.home` is specified +to avoid computing the default path configuration. -.. c:function:: PyObject* PyConfig_Names(void) - Get all configuration option names as a ``frozenset``. +.. _init-python-config: - * Return a new reference on success. - * Set an exception and return ``NULL`` on error. +Python Configuration +-------------------- - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. +:c:func:`PyPreConfig_InitPythonConfig` and :c:func:`PyConfig_InitPythonConfig` +functions create a configuration to build a customized Python which behaves as +the regular Python. - .. versionadded:: 3.14 +Environments variables and command line arguments are used to configure +Python, whereas global configuration variables are ignored. +This function enables C locale coercion (:pep:`538`) +and :ref:`Python UTF-8 Mode ` +(:pep:`540`) depending on the LC_CTYPE locale, :envvar:`PYTHONUTF8` and +:envvar:`PYTHONCOERCECLOCALE` environment variables. -.. c:function:: int PyConfig_Set(const char *name, PyObject *value) - Set the current runtime value of a configuration option. +.. _init-path-config: - * Raise a :exc:`ValueError` if there is no option *name*. - * Raise a :exc:`ValueError` if *value* is an invalid value. - * Raise a :exc:`ValueError` if the option is read-only (cannot be set). - * Raise a :exc:`TypeError` if *value* has not the proper type. +Python Path Configuration +------------------------- - The caller must hold the GIL. The function cannot be called before - Python initialization nor after Python finalization. +:c:type:`PyConfig` contains multiple fields for the path configuration: - .. versionadded:: 3.14 +* Path configuration inputs: + * :c:member:`PyConfig.home` + * :c:member:`PyConfig.platlibdir` + * :c:member:`PyConfig.pathconfig_warnings` + * :c:member:`PyConfig.program_name` + * :c:member:`PyConfig.pythonpath_env` + * current working directory: to get absolute paths + * ``PATH`` environment variable to get the program full path + (from :c:member:`PyConfig.program_name`) + * ``__PYVENV_LAUNCHER__`` environment variable + * (Windows only) Application paths in the registry under + "Software\Python\PythonCore\X.Y\PythonPath" of HKEY_CURRENT_USER and + HKEY_LOCAL_MACHINE (where X.Y is the Python version). -Py_GetArgcArgv() -================ +* Path configuration output fields: -.. c:function:: void Py_GetArgcArgv(int *argc, wchar_t ***argv) + * :c:member:`PyConfig.base_exec_prefix` + * :c:member:`PyConfig.base_executable` + * :c:member:`PyConfig.base_prefix` + * :c:member:`PyConfig.exec_prefix` + * :c:member:`PyConfig.executable` + * :c:member:`PyConfig.module_search_paths_set`, + :c:member:`PyConfig.module_search_paths` + * :c:member:`PyConfig.prefix` - Get the original command line arguments, before Python modified them. +If at least one "output field" is not set, Python calculates the path +configuration to fill unset fields. If +:c:member:`~PyConfig.module_search_paths_set` is equal to ``0``, +:c:member:`~PyConfig.module_search_paths` is overridden and +:c:member:`~PyConfig.module_search_paths_set` is set to ``1``. - See also :c:member:`PyConfig.orig_argv` member. +It is possible to completely ignore the function calculating the default +path configuration by setting explicitly all path configuration output +fields listed above. A string is considered as set even if it is non-empty. +``module_search_paths`` is considered as set if +``module_search_paths_set`` is set to ``1``. In this case, +``module_search_paths`` will be used without modification. +Set :c:member:`~PyConfig.pathconfig_warnings` to ``0`` to suppress warnings when +calculating the path configuration (Unix only, Windows does not log any warning). -Multi-Phase Initialization Private Provisional API -================================================== +If :c:member:`~PyConfig.base_prefix` or :c:member:`~PyConfig.base_exec_prefix` +fields are not set, they inherit their value from :c:member:`~PyConfig.prefix` +and :c:member:`~PyConfig.exec_prefix` respectively. -This section is a private provisional API introducing multi-phase -initialization, the core feature of :pep:`432`: +:c:func:`Py_RunMain` and :c:func:`Py_Main` modify :data:`sys.path`: -* "Core" initialization phase, "bare minimum Python": +* If :c:member:`~PyConfig.run_filename` is set and is a directory which contains a + ``__main__.py`` script, prepend :c:member:`~PyConfig.run_filename` to + :data:`sys.path`. +* If :c:member:`~PyConfig.isolated` is zero: - * Builtin types; - * Builtin exceptions; - * Builtin and frozen modules; - * The :mod:`sys` module is only partially initialized - (ex: :data:`sys.path` doesn't exist yet). + * If :c:member:`~PyConfig.run_module` is set, prepend the current directory + to :data:`sys.path`. Do nothing if the current directory cannot be read. + * If :c:member:`~PyConfig.run_filename` is set, prepend the directory of the + filename to :data:`sys.path`. + * Otherwise, prepend an empty string to :data:`sys.path`. -* "Main" initialization phase, Python is fully initialized: +If :c:member:`~PyConfig.site_import` is non-zero, :data:`sys.path` can be +modified by the :mod:`site` module. If +:c:member:`~PyConfig.user_site_directory` is non-zero and the user's +site-package directory exists, the :mod:`site` module appends the user's +site-package directory to :data:`sys.path`. - * Install and configure :mod:`importlib`; - * Apply the :ref:`Path Configuration `; - * Install signal handlers; - * Finish :mod:`sys` module initialization (ex: create :data:`sys.stdout` - and :data:`sys.path`); - * Enable optional features like :mod:`faulthandler` and :mod:`tracemalloc`; - * Import the :mod:`site` module; - * etc. +The following configuration files are used by the path configuration: -Private provisional API: +* ``pyvenv.cfg`` +* ``._pth`` file (ex: ``python._pth``) +* ``pybuilddir.txt`` (Unix only) -* :c:member:`PyConfig._init_main`: if set to ``0``, - :c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase. +If a ``._pth`` file is present: -.. c:function:: PyStatus _Py_InitializeMain(void) +* Set :c:member:`~PyConfig.isolated` to ``1``. +* Set :c:member:`~PyConfig.use_environment` to ``0``. +* Set :c:member:`~PyConfig.site_import` to ``0``. +* Set :c:member:`~PyConfig.safe_path` to ``1``. - Move to the "Main" initialization phase, finish the Python initialization. +If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in +the same directory as :c:member:`~PyConfig.executable`, or its parent, +:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that +location. When this happens, :c:member:`~PyConfig.base_prefix` and +:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the +base installation. See :ref:`sys-path-init-virtual-environments` for more +information. -No module is imported during the "Core" phase and the ``importlib`` module is -not configured: the :ref:`Path Configuration ` is only -applied during the "Main" phase. It may allow to customize Python in Python to -override or tune the :ref:`Path Configuration `, maybe -install a custom :data:`sys.meta_path` importer or an import hook, etc. +The ``__PYVENV_LAUNCHER__`` environment variable is used to set +:c:member:`PyConfig.base_executable`. -It may become possible to calculate the :ref:`Path Configuration -` in Python, after the Core phase and before the Main phase, -which is one of the :pep:`432` motivation. +.. versionchanged:: 3.14 -The "Core" phase is not properly defined: what should be and what should -not be available at this phase is not specified yet. The API is marked -as private and provisional: the API can be modified or even be removed -anytime until a proper public API is designed. + :c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now + set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`, + therefore affected by :option:`-S`. -Example running Python code between "Core" and "Main" initialization -phases:: - void init_python(void) - { - PyStatus status; +Py_GetArgcArgv() +================ - PyConfig config; - PyConfig_InitPythonConfig(&config); - config._init_main = 0; +.. c:function:: void Py_GetArgcArgv(int *argc, wchar_t ***argv) - /* ... customize 'config' configuration ... */ + Get the original command line arguments, before Python modified them. - status = Py_InitializeFromConfig(&config); - PyConfig_Clear(&config); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } + See also :c:member:`PyConfig.orig_argv` member. - /* Use sys.stderr because sys.stdout is only created - by _Py_InitializeMain() */ - int res = PyRun_SimpleString( - "import sys; " - "print('Run Python code before _Py_InitializeMain', " - "file=sys.stderr)"); - if (res < 0) { - exit(1); - } +Delaying main module execution +============================== - /* ... put more configuration code here ... */ +In some embedding use cases, it may be desirable to separate interpreter initialization +from the execution of the main module. - status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - } +This separation can be achieved by setting ``PyConfig.run_command`` to the empty +string during initialization (to prevent the interpreter from dropping into the +interactive prompt), and then subsequently executing the desired main module +code using ``__main__.__dict__`` as the global namespace. diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index 084ba513493ffe..25d9e62e387279 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -824,6 +824,6 @@ The :c:type:`PyLongWriter` API can be used to import an integer. Discard a :c:type:`PyLongWriter` created by :c:func:`PyLongWriter_Create`. - *writer* must not be ``NULL``. + If *writer* is ``NULL``, no operation is performed. The writer instance and the *digits* array are invalid after the call. diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index f19b86a8dbfb66..94110d48ed7d85 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -1054,6 +1054,15 @@ These are the UTF-8 codec APIs: As :c:func:`PyUnicode_AsUTF8AndSize`, but does not store the size. + .. warning:: + + This function does not have any special behavior for + `null characters `_ embedded within + *unicode*. As a result, strings containing null characters will remain in the returned + string, which some C functions might interpret as the end of the string, leading to + truncation. If truncation is an issue, it is recommended to use :c:func:`PyUnicode_AsUTF8AndSize` + instead. + .. versionadded:: 3.3 .. versionchanged:: 3.7 diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst index 9f02bdb5896563..1ef4181d52eb10 100644 --- a/Doc/c-api/veryhigh.rst +++ b/Doc/c-api/veryhigh.rst @@ -348,8 +348,20 @@ the same library that the Python runtime is using. .. versionchanged:: 3.8 Added *cf_feature_version* field. + The available compiler flags are accessible as macros: -.. c:var:: int CO_FUTURE_DIVISION + .. c:namespace:: NULL - This bit can be set in *flags* to cause division operator ``/`` to be - interpreted as "true division" according to :pep:`238`. + .. c:macro:: PyCF_ALLOW_TOP_LEVEL_AWAIT + PyCF_ONLY_AST + PyCF_OPTIMIZED_AST + PyCF_TYPE_COMMENTS + + See :ref:`compiler flags ` in documentation of the + :py:mod:`!ast` Python module, which exports these constants under + the same names. + + .. c:var:: int CO_FUTURE_DIVISION + + This bit can be set in *flags* to cause division operator ``/`` to be + interpreted as "true division" according to :pep:`238`. diff --git a/Doc/conf.py b/Doc/conf.py index ae08c7fa288080..1aeecaeb3073f5 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -9,9 +9,6 @@ import importlib import os import sys -import time - -import sphinx # Make our custom extensions available to Sphinx sys.path.append(os.path.abspath('tools/extensions')) @@ -28,8 +25,10 @@ 'audit_events', 'availability', 'c_annotations', + 'changes', 'glossary_search', 'lexers', + 'pydoc_topics', 'pyspecific', 'sphinx.ext.coverage', 'sphinx.ext.doctest', @@ -97,7 +96,8 @@ highlight_language = 'python3' # Minimum version of sphinx required -needs_sphinx = '7.2.6' +# Keep this version in sync with ``Doc/requirements.txt``. +needs_sphinx = '8.1.3' # Create table of contents entries for domain objects (e.g. functions, classes, # attributes, etc.). Default is True. @@ -376,13 +376,7 @@ # This 'Last updated on:' timestamp is inserted at the bottom of every page. html_last_updated_fmt = '%b %d, %Y (%H:%M UTC)' -if sphinx.version_info[:2] >= (8, 1): - html_last_updated_use_utc = True -else: - html_time = int(os.environ.get('SOURCE_DATE_EPOCH', time.time())) - html_last_updated_fmt = time.strftime( - html_last_updated_fmt, time.gmtime(html_time) - ) +html_last_updated_use_utc = True # Path to find HTML templates to override theme templates_path = ['tools/templates'] @@ -566,8 +560,6 @@ r'https://github.com/python/cpython/tree/.*': 'https://github.com/python/cpython/blob/.*', # Intentional HTTP use at Misc/NEWS.d/3.5.0a1.rst r'http://www.python.org/$': 'https://www.python.org/$', - # Used in license page, keep as is - r'https://www.zope.org/': r'https://www.zope.dev/', # Microsoft's redirects to learn.microsoft.com r'https://msdn.microsoft.com/.*': 'https://learn.microsoft.com/.*', r'https://docs.microsoft.com/.*': 'https://learn.microsoft.com/.*', @@ -619,16 +611,6 @@ } extlinks_detect_hardcoded_links = True -if sphinx.version_info[:2] < (8, 1): - # Sphinx 8.1 has in-built CVE and CWE roles. - extlinks |= { - "cve": ( - "https://www.cve.org/CVERecord?id=CVE-%s", - "CVE-%s", - ), - "cwe": ("https://cwe.mitre.org/data/definitions/%s.html", "CWE-%s"), - } - # Options for c_annotations extension # ----------------------------------- diff --git a/Doc/constraints.txt b/Doc/constraints.txt index 26ac1862dbac0b..29cd4be1d3c8db 100644 --- a/Doc/constraints.txt +++ b/Doc/constraints.txt @@ -13,14 +13,12 @@ packaging<25 Pygments<3 requests<3 snowballstemmer<3 -# keep lower-bounds until Sphinx 8.1 is released -# https://github.com/sphinx-doc/sphinx/pull/12756 -sphinxcontrib-applehelp>=1.0.7,<3 -sphinxcontrib-devhelp>=1.0.6,<3 -sphinxcontrib-htmlhelp>=2.0.6,<3 -sphinxcontrib-jsmath>=1.0.1,<2 -sphinxcontrib-qthelp>=1.0.6,<3 -sphinxcontrib-serializinghtml>=1.1.9,<3 +sphinxcontrib-applehelp<3 +sphinxcontrib-devhelp<3 +sphinxcontrib-htmlhelp<3 +sphinxcontrib-jsmath<2 +sphinxcontrib-qthelp<3 +sphinxcontrib-serializinghtml<3 # Direct dependencies of Jinja2 (Jinja is a dependency of Sphinx, see above) MarkupSafe<3 diff --git a/Doc/deprecations/c-api-pending-removal-in-3.14.rst b/Doc/deprecations/c-api-pending-removal-in-3.14.rst index 9e10bf2691e5c8..c805074669811a 100644 --- a/Doc/deprecations/c-api-pending-removal-in-3.14.rst +++ b/Doc/deprecations/c-api-pending-removal-in-3.14.rst @@ -6,67 +6,3 @@ Pending removal in Python 3.14 * Creating :c:data:`immutable types ` with mutable bases (:gh:`95388`). - -* Functions to configure Python's initialization, deprecated in Python 3.11: - - * :c:func:`!PySys_SetArgvEx()`: - Set :c:member:`PyConfig.argv` instead. - * :c:func:`!PySys_SetArgv()`: - Set :c:member:`PyConfig.argv` instead. - * :c:func:`!Py_SetProgramName()`: - Set :c:member:`PyConfig.program_name` instead. - * :c:func:`!Py_SetPythonHome()`: - Set :c:member:`PyConfig.home` instead. - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. - -* Global configuration variables: - - * :c:var:`Py_DebugFlag`: - Use :c:member:`PyConfig.parser_debug` instead. - * :c:var:`Py_VerboseFlag`: - Use :c:member:`PyConfig.verbose` instead. - * :c:var:`Py_QuietFlag`: - Use :c:member:`PyConfig.quiet` instead. - * :c:var:`Py_InteractiveFlag`: - Use :c:member:`PyConfig.interactive` instead. - * :c:var:`Py_InspectFlag`: - Use :c:member:`PyConfig.inspect` instead. - * :c:var:`Py_OptimizeFlag`: - Use :c:member:`PyConfig.optimization_level` instead. - * :c:var:`Py_NoSiteFlag`: - Use :c:member:`PyConfig.site_import` instead. - * :c:var:`Py_BytesWarningFlag`: - Use :c:member:`PyConfig.bytes_warning` instead. - * :c:var:`Py_FrozenFlag`: - Use :c:member:`PyConfig.pathconfig_warnings` instead. - * :c:var:`Py_IgnoreEnvironmentFlag`: - Use :c:member:`PyConfig.use_environment` instead. - * :c:var:`Py_DontWriteBytecodeFlag`: - Use :c:member:`PyConfig.write_bytecode` instead. - * :c:var:`Py_NoUserSiteDirectory`: - Use :c:member:`PyConfig.user_site_directory` instead. - * :c:var:`Py_UnbufferedStdioFlag`: - Use :c:member:`PyConfig.buffered_stdio` instead. - * :c:var:`Py_HashRandomizationFlag`: - Use :c:member:`PyConfig.use_hash_seed` - and :c:member:`PyConfig.hash_seed` instead. - * :c:var:`Py_IsolatedFlag`: - Use :c:member:`PyConfig.isolated` instead. - * :c:var:`Py_LegacyWindowsFSEncodingFlag`: - Use :c:member:`PyPreConfig.legacy_windows_fs_encoding` instead. - * :c:var:`Py_LegacyWindowsStdioFlag`: - Use :c:member:`PyConfig.legacy_windows_stdio` instead. - * :c:var:`!Py_FileSystemDefaultEncoding`: - Use :c:member:`PyConfig.filesystem_encoding` instead. - * :c:var:`!Py_HasFileSystemDefaultEncoding`: - Use :c:member:`PyConfig.filesystem_encoding` instead. - * :c:var:`!Py_FileSystemDefaultEncodeErrors`: - Use :c:member:`PyConfig.filesystem_errors` instead. - * :c:var:`!Py_UTF8Mode`: - Use :c:member:`PyPreConfig.utf8_mode` instead. - (see :c:func:`Py_PreInitialize`) - - The :c:func:`Py_InitializeFromConfig` API should be used with - :c:type:`PyConfig` instead. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst index 0ce0f9c118c094..ac31b3cc8cd451 100644 --- a/Doc/deprecations/c-api-pending-removal-in-3.15.rst +++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst @@ -5,7 +5,9 @@ Pending removal in Python 3.15 * The :c:func:`PyImport_ImportModuleNoBlock`: Use :c:func:`PyImport_ImportModule` instead. * :c:func:`PyWeakref_GetObject` and :c:func:`PyWeakref_GET_OBJECT`: - Use :c:func:`PyWeakref_GetRef` instead. + Use :c:func:`PyWeakref_GetRef` instead. The `pythoncapi-compat project + `__ can be used to get + :c:func:`PyWeakref_GetRef` on Python 3.12 and older. * :c:type:`Py_UNICODE` type and the :c:macro:`!Py_UNICODE_WIDE` macro: Use :c:type:`wchar_t` instead. * Python initialization functions: @@ -23,5 +25,90 @@ Pending removal in Python 3.15 * :c:func:`Py_GetProgramName`: Get :data:`sys.executable` instead. * :c:func:`Py_GetPythonHome`: - Get :c:member:`PyConfig.home` + Get :c:func:`PyConfig_Get("home") ` or the :envvar:`PYTHONHOME` environment variable instead. + + See also the :c:func:`PyConfig_Get` function. + +* Functions to configure Python's initialization, deprecated in Python 3.11: + + * :c:func:`!PySys_SetArgvEx()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!PySys_SetArgv()`: + Set :c:member:`PyConfig.argv` instead. + * :c:func:`!Py_SetProgramName()`: + Set :c:member:`PyConfig.program_name` instead. + * :c:func:`!Py_SetPythonHome()`: + Set :c:member:`PyConfig.home` instead. + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` instead. + +* Global configuration variables: + + * :c:var:`Py_DebugFlag`: + Use :c:member:`PyConfig.parser_debug` or + :c:func:`PyConfig_Get("parser_debug") ` instead. + * :c:var:`Py_VerboseFlag`: + Use :c:member:`PyConfig.verbose` or + :c:func:`PyConfig_Get("verbose") ` instead. + * :c:var:`Py_QuietFlag`: + Use :c:member:`PyConfig.quiet` or + :c:func:`PyConfig_Get("quiet") ` instead. + * :c:var:`Py_InteractiveFlag`: + Use :c:member:`PyConfig.interactive` or + :c:func:`PyConfig_Get("interactive") ` instead. + * :c:var:`Py_InspectFlag`: + Use :c:member:`PyConfig.inspect` or + :c:func:`PyConfig_Get("inspect") ` instead. + * :c:var:`Py_OptimizeFlag`: + Use :c:member:`PyConfig.optimization_level` or + :c:func:`PyConfig_Get("optimization_level") ` instead. + * :c:var:`Py_NoSiteFlag`: + Use :c:member:`PyConfig.site_import` or + :c:func:`PyConfig_Get("site_import") ` instead. + * :c:var:`Py_BytesWarningFlag`: + Use :c:member:`PyConfig.bytes_warning` or + :c:func:`PyConfig_Get("bytes_warning") ` instead. + * :c:var:`Py_FrozenFlag`: + Use :c:member:`PyConfig.pathconfig_warnings` or + :c:func:`PyConfig_Get("pathconfig_warnings") ` instead. + * :c:var:`Py_IgnoreEnvironmentFlag`: + Use :c:member:`PyConfig.use_environment` or + :c:func:`PyConfig_Get("use_environment") ` instead. + * :c:var:`Py_DontWriteBytecodeFlag`: + Use :c:member:`PyConfig.write_bytecode` or + :c:func:`PyConfig_Get("write_bytecode") ` instead. + * :c:var:`Py_NoUserSiteDirectory`: + Use :c:member:`PyConfig.user_site_directory` or + :c:func:`PyConfig_Get("user_site_directory") ` instead. + * :c:var:`Py_UnbufferedStdioFlag`: + Use :c:member:`PyConfig.buffered_stdio` or + :c:func:`PyConfig_Get("buffered_stdio") ` instead. + * :c:var:`Py_HashRandomizationFlag`: + Use :c:member:`PyConfig.use_hash_seed` + and :c:member:`PyConfig.hash_seed` or + :c:func:`PyConfig_Get("hash_seed") ` instead. + * :c:var:`Py_IsolatedFlag`: + Use :c:member:`PyConfig.isolated` or + :c:func:`PyConfig_Get("isolated") ` instead. + * :c:var:`Py_LegacyWindowsFSEncodingFlag`: + Use :c:member:`PyPreConfig.legacy_windows_fs_encoding` or + :c:func:`PyConfig_Get("legacy_windows_fs_encoding") ` instead. + * :c:var:`Py_LegacyWindowsStdioFlag`: + Use :c:member:`PyConfig.legacy_windows_stdio` or + :c:func:`PyConfig_Get("legacy_windows_stdio") ` instead. + * :c:var:`!Py_FileSystemDefaultEncoding`, :c:var:`!Py_HasFileSystemDefaultEncoding`: + Use :c:member:`PyConfig.filesystem_encoding` or + :c:func:`PyConfig_Get("filesystem_encoding") ` instead. + * :c:var:`!Py_FileSystemDefaultEncodeErrors`: + Use :c:member:`PyConfig.filesystem_errors` or + :c:func:`PyConfig_Get("filesystem_errors") ` instead. + * :c:var:`!Py_UTF8Mode`: + Use :c:member:`PyPreConfig.utf8_mode` or + :c:func:`PyConfig_Get("utf8_mode") ` instead. + (see :c:func:`Py_PreInitialize`) + + The :c:func:`Py_InitializeFromConfig` API should be used with + :c:type:`PyConfig` to set these options. Or :c:func:`PyConfig_Get` can be + used to get these options at runtime. diff --git a/Doc/deprecations/c-api-pending-removal-in-3.18.rst b/Doc/deprecations/c-api-pending-removal-in-3.18.rst new file mode 100644 index 00000000000000..0689d8b4f9e959 --- /dev/null +++ b/Doc/deprecations/c-api-pending-removal-in-3.18.rst @@ -0,0 +1,19 @@ +Pending removal in Python 3.18 +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Deprecated private functions (:gh:`128863`): + + * :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. + * :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. + * :c:func:`!_PyDict_Pop()`: :c:func:`PyDict_Pop`. + * :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. + * :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. + * :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. + * :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. + * :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. + * :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + + The `pythoncapi-compat project + `__ can be used to get these + new public functions on Python 3.13 and older. diff --git a/Doc/deprecations/pending-removal-in-3.15.rst b/Doc/deprecations/pending-removal-in-3.15.rst index 3b03e1f49e6754..390bbff2835cf8 100644 --- a/Doc/deprecations/pending-removal-in-3.15.rst +++ b/Doc/deprecations/pending-removal-in-3.15.rst @@ -51,6 +51,11 @@ Pending removal in Python 3.15 This function is only useful for Jython support, has a confusing API, and is largely untested. +* :mod:`sysconfig`: + + * The ``check_home`` argument of :func:`sysconfig.is_python_build` has been + deprecated since Python 3.12. + * :mod:`threading`: * :func:`~threading.RLock` will take no arguments in Python 3.15. diff --git a/Doc/deprecations/pending-removal-in-3.16.rst b/Doc/deprecations/pending-removal-in-3.16.rst index 41b30defdba0bc..b408a6d72febe0 100644 --- a/Doc/deprecations/pending-removal-in-3.16.rst +++ b/Doc/deprecations/pending-removal-in-3.16.rst @@ -80,6 +80,12 @@ Pending removal in Python 3.16 has been deprecated since Python 3.13. Use the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment variable instead. +* :mod:`sysconfig`: + + * The :func:`!sysconfig.expand_makefile_vars` function + has been deprecated since Python 3.14. + Use the ``vars`` argument of :func:`sysconfig.get_paths` instead. + * :mod:`tarfile`: * The undocumented and unused :attr:`!TarFile.tarfile` attribute diff --git a/Doc/faq/programming.rst b/Doc/faq/programming.rst index fa7b22bde1dc6f..776bab1ed5b779 100644 --- a/Doc/faq/programming.rst +++ b/Doc/faq/programming.rst @@ -1906,28 +1906,30 @@ In the standard library code, you will see several common patterns for correctly using identity tests: 1) As recommended by :pep:`8`, an identity test is the preferred way to check -for ``None``. This reads like plain English in code and avoids confusion with -other objects that may have boolean values that evaluate to false. + for ``None``. This reads like plain English in code and avoids confusion + with other objects that may have boolean values that evaluate to false. 2) Detecting optional arguments can be tricky when ``None`` is a valid input -value. In those situations, you can create a singleton sentinel object -guaranteed to be distinct from other objects. For example, here is how -to implement a method that behaves like :meth:`dict.pop`:: + value. In those situations, you can create a singleton sentinel object + guaranteed to be distinct from other objects. For example, here is how + to implement a method that behaves like :meth:`dict.pop`: - _sentinel = object() + .. code-block:: python - def pop(self, key, default=_sentinel): - if key in self: - value = self[key] - del self[key] - return value - if default is _sentinel: - raise KeyError(key) - return default + _sentinel = object() + + def pop(self, key, default=_sentinel): + if key in self: + value = self[key] + del self[key] + return value + if default is _sentinel: + raise KeyError(key) + return default 3) Container implementations sometimes need to augment equality tests with -identity tests. This prevents the code from being confused by objects such as -``float('NaN')`` that are not equal to themselves. + identity tests. This prevents the code from being confused by objects + such as ``float('NaN')`` that are not equal to themselves. For example, here is the implementation of :meth:`!collections.abc.Sequence.__contains__`:: diff --git a/Doc/glossary.rst b/Doc/glossary.rst index f67f3ecad0bc40..e3a14601398e89 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -115,7 +115,7 @@ Glossary :keyword:`yield` expression. Each :keyword:`yield` temporarily suspends processing, remembering the - location execution state (including local variables and pending + execution state (including local variables and pending try-statements). When the *asynchronous generator iterator* effectively resumes with another awaitable returned by :meth:`~object.__anext__`, it picks up where it left off. See :pep:`492` and :pep:`525`. @@ -564,7 +564,7 @@ Glossary An object created by a :term:`generator` function. Each :keyword:`yield` temporarily suspends processing, remembering the - location execution state (including local variables and pending + execution state (including local variables and pending try-statements). When the *generator iterator* resumes, it picks up where it left off (in contrast to functions which start fresh on every invocation). @@ -811,9 +811,11 @@ Glossary processed. loader - An object that loads a module. It must define a method named - :meth:`load_module`. A loader is typically returned by a - :term:`finder`. See also: + An object that loads a module. + It must define the :meth:`!exec_module` and :meth:`!create_module` methods + to implement the :class:`~importlib.abc.Loader` interface. + A loader is typically returned by a :term:`finder`. + See also: * :ref:`finders-and-loaders` * :class:`importlib.abc.Loader` diff --git a/Doc/howto/mro.rst b/Doc/howto/mro.rst index 46db516e16dae4..0872bedcd3a2d3 100644 --- a/Doc/howto/mro.rst +++ b/Doc/howto/mro.rst @@ -398,7 +398,7 @@ with inheritance diagram We see that class G inherits from F and E, with F *before* E: therefore we would expect the attribute *G.remember2buy* to be inherited by -*F.rembermer2buy* and not by *E.remember2buy*: nevertheless Python 2.2 +*F.remember2buy* and not by *E.remember2buy*: nevertheless Python 2.2 gives >>> G.remember2buy # doctest: +SKIP diff --git a/Doc/library/__main__.rst b/Doc/library/__main__.rst index 647ff9da04d10d..4407ba2f7714dd 100644 --- a/Doc/library/__main__.rst +++ b/Doc/library/__main__.rst @@ -292,10 +292,7 @@ Here is an example module that consumes the ``__main__`` namespace:: if not did_user_define_their_name(): raise ValueError('Define the variable `my_name`!') - if '__file__' in dir(__main__): - print(__main__.my_name, "found in file", __main__.__file__) - else: - print(__main__.my_name) + print(__main__.my_name) Example usage of this module could be as follows:: @@ -330,7 +327,7 @@ status code 0, indicating success: .. code-block:: shell-session $ python start.py - Dinsdale found in file /path/to/start.py + Dinsdale Note that importing ``__main__`` doesn't cause any issues with unintentionally running top-level code meant for script use which is put in the @@ -361,8 +358,5 @@ defined in the REPL becomes part of the ``__main__`` scope:: >>> namely.print_user_name() Jabberwocky -Note that in this case the ``__main__`` scope doesn't contain a ``__file__`` -attribute as it's interactive. - The ``__main__`` scope is used in the implementation of :mod:`pdb` and :mod:`rlcompleter`. diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 3bf38a2212c0e0..15ef33e195904d 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -392,9 +392,9 @@ Creating Futures and Tasks If *factory* is ``None`` the default task factory will be set. Otherwise, *factory* must be a *callable* with the signature matching - ``(loop, coro, context=None)``, where *loop* is a reference to the active + ``(loop, coro, **kwargs)``, where *loop* is a reference to the active event loop, and *coro* is a coroutine object. The callable - must return a :class:`asyncio.Future`-compatible object. + must pass on all *kwargs*, and return a :class:`asyncio.Task`-compatible object. .. method:: loop.get_task_factory() diff --git a/Doc/library/asyncio-graph.rst b/Doc/library/asyncio-graph.rst new file mode 100644 index 00000000000000..814bfe6e269bf9 --- /dev/null +++ b/Doc/library/asyncio-graph.rst @@ -0,0 +1,145 @@ +.. currentmodule:: asyncio + + +.. _asyncio-graph: + +======================== +Call Graph Introspection +======================== + +**Source code:** :source:`Lib/asyncio/graph.py` + +------------------------------------- + +asyncio has powerful runtime call graph introspection utilities +to trace the entire call graph of a running *coroutine* or *task*, or +a suspended *future*. These utilities and the underlying machinery +can be used from within a Python program or by external profilers +and debuggers. + +.. versionadded:: next + + +.. function:: print_call_graph(future=None, /, *, file=None, depth=1, limit=None) + + Print the async call graph for the current task or the provided + :class:`Task` or :class:`Future`. + + This function prints entries starting from the top frame and going + down towards the invocation point. + + The function receives an optional *future* argument. + If not passed, the current running task will be used. + + If the function is called on *the current task*, the optional + keyword-only *depth* argument can be used to skip the specified + number of frames from top of the stack. + + If the optional keyword-only *limit* argument is provided, each call stack + in the resulting graph is truncated to include at most ``abs(limit)`` + entries. If *limit* is positive, the entries left are the closest to + the invocation point. If *limit* is negative, the topmost entries are + left. If *limit* is omitted or ``None``, all entries are present. + If *limit* is ``0``, the call stack is not printed at all, only + "awaited by" information is printed. + + If *file* is omitted or ``None``, the function will print + to :data:`sys.stdout`. + + **Example:** + + The following Python code: + + .. code-block:: python + + import asyncio + + async def test(): + asyncio.print_call_graph() + + async def main(): + async with asyncio.TaskGroup() as g: + g.create_task(test(), name='test') + + asyncio.run(main()) + + will print:: + + * Task(name='test', id=0x1039f0fe0) + + Call stack: + | File 't2.py', line 4, in async test() + + Awaited by: + * Task(name='Task-1', id=0x103a5e060) + + Call stack: + | File 'taskgroups.py', line 107, in async TaskGroup.__aexit__() + | File 't2.py', line 7, in async main() + +.. function:: format_call_graph(future=None, /, *, depth=1, limit=None) + + Like :func:`print_call_graph`, but returns a string. + If *future* is ``None`` and there's no current task, + the function returns an empty string. + + +.. function:: capture_call_graph(future=None, /, *, depth=1, limit=None) + + Capture the async call graph for the current task or the provided + :class:`Task` or :class:`Future`. + + The function receives an optional *future* argument. + If not passed, the current running task will be used. If there's no + current task, the function returns ``None``. + + If the function is called on *the current task*, the optional + keyword-only *depth* argument can be used to skip the specified + number of frames from top of the stack. + + Returns a ``FutureCallGraph`` data class object: + + * ``FutureCallGraph(future, call_stack, awaited_by)`` + + Where *future* is a reference to a :class:`Future` or + a :class:`Task` (or their subclasses.) + + ``call_stack`` is a tuple of ``FrameCallGraphEntry`` objects. + + ``awaited_by`` is a tuple of ``FutureCallGraph`` objects. + + * ``FrameCallGraphEntry(frame)`` + + Where *frame* is a frame object of a regular Python function + in the call stack. + + +Low level utility functions +=========================== + +To introspect an async call graph asyncio requires cooperation from +control flow structures, such as :func:`shield` or :class:`TaskGroup`. +Any time an intermediate :class:`Future` object with low-level APIs like +:meth:`Future.add_done_callback() ` is +involved, the following two functions should be used to inform asyncio +about how exactly such intermediate future objects are connected with +the tasks they wrap or control. + + +.. function:: future_add_to_awaited_by(future, waiter, /) + + Record that *future* is awaited on by *waiter*. + + Both *future* and *waiter* must be instances of + :class:`Future` or :class:`Task` or their subclasses, + otherwise the call would have no effect. + + A call to ``future_add_to_awaited_by()`` must be followed by an + eventual call to the :func:`future_discard_from_awaited_by` function + with the same arguments. + + +.. function:: future_discard_from_awaited_by(future, waiter, /) + + Record that *future* is no longer awaited on by *waiter*. + + Both *future* and *waiter* must be instances of + :class:`Future` or :class:`Task` or their subclasses, otherwise + the call would have no effect. diff --git a/Doc/library/asyncio.rst b/Doc/library/asyncio.rst index 5f83b3a2658da4..7d368dae49dc1d 100644 --- a/Doc/library/asyncio.rst +++ b/Doc/library/asyncio.rst @@ -99,6 +99,7 @@ You can experiment with an ``asyncio`` concurrent context in the :term:`REPL`: asyncio-subprocess.rst asyncio-queue.rst asyncio-exceptions.rst + asyncio-graph.rst .. toctree:: :caption: Low-level APIs diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 164d706e7738e2..615138302e1379 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -266,8 +266,8 @@ Fundamental data types (1) The constructor accepts any object with a truth value. -Additionally, if IEC 60559 compatible complex arithmetic (Annex G) is supported, the following -complex types are available: +Additionally, if IEC 60559 compatible complex arithmetic (Annex G) is supported +in both C and ``libffi``, the following complex types are available: +----------------------------------+---------------------------------+-----------------+ | ctypes type | C type | Python type | diff --git a/Doc/library/email.contentmanager.rst b/Doc/library/email.contentmanager.rst index a86e227429b06d..b33fe82a6e4c9f 100644 --- a/Doc/library/email.contentmanager.rst +++ b/Doc/library/email.contentmanager.rst @@ -157,7 +157,13 @@ Currently the email package provides only one concrete content manager, :exc:`ValueError`. * For ``str`` objects, if *cte* is not set use heuristics to - determine the most compact encoding. + determine the most compact encoding. Prior to encoding, + :meth:`str.splitlines` is used to normalize all line boundaries, + ensuring that each line of the payload is terminated by the + current policy's :data:`~email.policy.Policy.linesep` property + (even if the original string did not end with one). + * For ``bytes`` objects, *cte* is taken to be base64 if not set, + and the aforementioned newline translation is not performed. * For :class:`~email.message.EmailMessage`, per :rfc:`2046`, raise an error if a *cte* of ``quoted-printable`` or ``base64`` is requested for *subtype* ``rfc822``, and for any *cte* other than diff --git a/Doc/library/http.cookies.rst b/Doc/library/http.cookies.rst index ad37a0fca4742d..eb196320721194 100644 --- a/Doc/library/http.cookies.rst +++ b/Doc/library/http.cookies.rst @@ -142,6 +142,7 @@ Morsel Objects version httponly samesite + partitioned The attribute :attr:`httponly` specifies that the cookie is only transferred in HTTP requests, and is not accessible through JavaScript. This is intended @@ -151,6 +152,19 @@ Morsel Objects send the cookie along with cross-site requests. This helps to mitigate CSRF attacks. Valid values for this attribute are "Strict" and "Lax". + The attribute :attr:`partitioned` indicates to user agents that these + cross-site cookies *should* only be available in the same top-level context + that the cookie was first set in. For this to be accepted by the user agent, + you **must** also set ``Secure``. + + In addition, it is recommended to use the ``__Host`` prefix when setting + partitioned cookies to make them bound to the hostname and not the + registrable domain. Read + `CHIPS (Cookies Having Independent Partitioned State)`_ + for full details and examples. + + .. _CHIPS (Cookies Having Independent Partitioned State): https://github.com/privacycg/CHIPS/blob/main/README.md + The keys are case-insensitive and their default value is ``''``. .. versionchanged:: 3.5 @@ -165,6 +179,9 @@ Morsel Objects .. versionchanged:: 3.8 Added support for the :attr:`samesite` attribute. + .. versionchanged:: 3.14 + Added support for the :attr:`partitioned` attribute. + .. attribute:: Morsel.value diff --git a/Doc/library/importlib.rst b/Doc/library/importlib.rst index 9e088a598a6c08..b935fc0e42a4bd 100644 --- a/Doc/library/importlib.rst +++ b/Doc/library/importlib.rst @@ -380,13 +380,15 @@ ABC hierarchy:: .. class:: ResourceLoader + *Superseded by TraversableResources* + An abstract base class for a :term:`loader` which implements the optional :pep:`302` protocol for loading arbitrary resources from the storage back-end. .. deprecated:: 3.7 This ABC is deprecated in favour of supporting resource loading - through :class:`importlib.resources.abc.ResourceReader`. + through :class:`importlib.resources.abc.TraversableResources`. .. abstractmethod:: get_data(path) diff --git a/Doc/library/inspect.rst b/Doc/library/inspect.rst index 0085207d3055f2..544efed1a76b96 100644 --- a/Doc/library/inspect.rst +++ b/Doc/library/inspect.rst @@ -150,6 +150,12 @@ attributes (see :ref:`import-mod-attrs` for module attributes): | | f_locals | local namespace seen by | | | | this frame | +-----------------+-------------------+---------------------------+ +| | f_generator | returns the generator or | +| | | coroutine object that | +| | | owns this frame, or | +| | | ``None`` if the frame is | +| | | of a regular function | ++-----------------+-------------------+---------------------------+ | | f_trace | tracing function for this | | | | frame, or ``None`` | +-----------------+-------------------+---------------------------+ @@ -310,6 +316,10 @@ attributes (see :ref:`import-mod-attrs` for module attributes): Add ``__builtins__`` attribute to functions. +.. versionchanged:: next + + Add ``f_generator`` attribute to frames. + .. function:: getmembers(object[, predicate]) Return all the members of an object in a list of ``(name, value)`` diff --git a/Doc/library/os.rst b/Doc/library/os.rst index 6fec27bc6f68a2..7d3596622862ea 100644 --- a/Doc/library/os.rst +++ b/Doc/library/os.rst @@ -1659,6 +1659,33 @@ or `the MSDN `_ on Windo :exc:`InterruptedError` exception (see :pep:`475` for the rationale). +.. function:: readinto(fd, buffer, /) + + Read from a file descriptor *fd* into a mutable + :ref:`buffer object ` *buffer*. + + The *buffer* should be mutable and :term:`bytes-like `. On + success, returns the number of bytes read. Less bytes may be read than the + size of the buffer. The underlying system call will be retried when + interrupted by a signal, unless the signal handler raises an exception. + Other errors will not be retried and an error will be raised. + + Returns 0 if *fd* is at end of file or if the provided *buffer* has + length 0 (which can be used to check for errors without reading data). + Never returns negative. + + .. note:: + + This function is intended for low-level I/O and must be applied to a file + descriptor as returned by :func:`os.open` or :func:`os.pipe`. To read a + "file object" returned by the built-in function :func:`open`, or + :data:`sys.stdin`, use its member functions, for example + :meth:`io.BufferedIOBase.readinto`, :meth:`io.BufferedIOBase.read`, or + :meth:`io.TextIOBase.read` + + .. versionadded:: next + + .. function:: sendfile(out_fd, in_fd, offset, count) sendfile(out_fd, in_fd, offset, count, headers=(), trailers=(), flags=0) @@ -5411,6 +5438,8 @@ information, consult your Unix manpages. The following scheduling policies are exposed if they are supported by the operating system. +.. _os-scheduling-policy: + .. data:: SCHED_OTHER The default scheduling policy. @@ -5514,7 +5543,7 @@ operating system. .. function:: sched_yield() - Voluntarily relinquish the CPU. + Voluntarily relinquish the CPU. See :manpage:`sched_yield(2)` for details. .. function:: sched_setaffinity(pid, mask, /) diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index dd6293c722e7ad..151fd60532048a 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -8,7 +8,7 @@ This module provides access to some variables used or maintained by the interpreter and to functions that interact strongly with the interpreter. It is -always available. +always available. Unless explicitly noted otherwise, all variables are read-only. .. data:: abiflags diff --git a/Doc/library/time.rst b/Doc/library/time.rst index 6265c2214eaa0d..804e2679027bd4 100644 --- a/Doc/library/time.rst +++ b/Doc/library/time.rst @@ -385,6 +385,8 @@ Functions The suspension time may be longer than requested by an arbitrary amount, because of the scheduling of other activity in the system. + .. rubric:: Windows implementation + On Windows, if *secs* is zero, the thread relinquishes the remainder of its time slice to any other thread that is ready to run. If there are no other threads ready to run, the function returns immediately, and the thread @@ -393,12 +395,19 @@ Functions `_ which provides resolution of 100 nanoseconds. If *secs* is zero, ``Sleep(0)`` is used. - Unix implementation: + .. rubric:: Unix implementation * Use ``clock_nanosleep()`` if available (resolution: 1 nanosecond); * Or use ``nanosleep()`` if available (resolution: 1 nanosecond); * Or use ``select()`` (resolution: 1 microsecond). + .. note:: + + To emulate a "no-op", use :keyword:`pass` instead of ``time.sleep(0)``. + + To voluntarily relinquish the CPU, specify a real-time :ref:`scheduling + policy ` and use :func:`os.sched_yield` instead. + .. audit-event:: time.sleep secs .. versionchanged:: 3.5 diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst index f719319a302a23..b80917eae66f8b 100644 --- a/Doc/library/tokenize.rst +++ b/Doc/library/tokenize.rst @@ -91,11 +91,10 @@ write back the modified script. sequences with at least two elements, the token type and the token string. Any additional sequence elements are ignored. - The reconstructed script is returned as a single string. The result is - guaranteed to tokenize back to match the input so that the conversion is - lossless and round-trips are assured. The guarantee applies only to the - token type and token string as the spacing between tokens (column - positions) may change. + The result is guaranteed to tokenize back to match the input so that the + conversion is lossless and round-trips are assured. The guarantee applies + only to the token type and token string as the spacing between tokens + (column positions) may change. It returns bytes, encoded using the :data:`~token.ENCODING` token, which is the first token sequence output by :func:`.tokenize`. If there is no diff --git a/Doc/library/turtle.rst b/Doc/library/turtle.rst index 548b9d3543fb94..98dfa0f53bc6a5 100644 --- a/Doc/library/turtle.rst +++ b/Doc/library/turtle.rst @@ -213,6 +213,31 @@ useful when working with learners for whom typing is not a skill. use turtle graphics with a learner. +Automatically begin and end filling +----------------------------------- + +Starting with Python 3.14, you can use the :func:`fill` :term:`context manager` +instead of :func:`begin_fill` and :func:`end_fill` to automatically begin and +end fill. Here is an example:: + + with fill(): + for i in range(4): + forward(100) + right(90) + + forward(200) + +The code above is equivalent to:: + + begin_fill() + for i in range(4): + forward(100) + right(90) + end_fill() + + forward(200) + + Use the ``turtle`` module namespace ----------------------------------- @@ -351,6 +376,7 @@ Pen control Filling | :func:`filling` + | :func:`fill` | :func:`begin_fill` | :func:`end_fill` @@ -381,6 +407,7 @@ Using events | :func:`ondrag` Special Turtle methods + | :func:`poly` | :func:`begin_poly` | :func:`end_poly` | :func:`get_poly` @@ -403,6 +430,7 @@ Window control | :func:`setworldcoordinates` Animation control + | :func:`no_animation` | :func:`delay` | :func:`tracer` | :func:`update` @@ -1275,6 +1303,29 @@ Filling ... else: ... turtle.pensize(3) +.. function:: fill() + + Fill the shape drawn in the ``with turtle.fill():`` block. + + .. doctest:: + :skipif: _tkinter is None + + >>> turtle.color("black", "red") + >>> with turtle.fill(): + ... turtle.circle(80) + + Using :func:`!fill` is equivalent to adding the :func:`begin_fill` before the + fill-block and :func:`end_fill` after the fill-block: + + .. doctest:: + :skipif: _tkinter is None + + >>> turtle.color("black", "red") + >>> turtle.begin_fill() + >>> turtle.circle(80) + >>> turtle.end_fill() + + .. versionadded:: next .. function:: begin_fill() @@ -1648,6 +1699,23 @@ Using events Special Turtle methods ---------------------- + +.. function:: poly() + + Record the vertices of a polygon drawn in the ``with turtle.poly():`` block. + The first and last vertices will be connected. + + .. doctest:: + :skipif: _tkinter is None + + >>> with turtle.poly(): + ... turtle.forward(100) + ... turtle.right(60) + ... turtle.forward(100) + + .. versionadded:: next + + .. function:: begin_poly() Start recording the vertices of a polygon. Current turtle position is first @@ -1926,6 +1994,23 @@ Window control Animation control ----------------- +.. function:: no_animation() + + Temporarily disable turtle animation. The code written inside the + ``no_animation`` block will not be animated; + once the code block is exited, the drawing will appear. + + .. doctest:: + :skipif: _tkinter is None + + >>> with screen.no_animation(): + ... for dist in range(2, 400, 2): + ... fd(dist) + ... rt(90) + + .. versionadded:: next + + .. function:: delay(delay=None) :param delay: positive integer diff --git a/Doc/license.rst b/Doc/license.rst index 428dc22b817ebe..61a26ab2867498 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -11,59 +11,63 @@ History of the software ======================= Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see https://www.cwi.nl/) in the Netherlands as a +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands as a successor of a language called ABC. Guido remains Python's principal author, although it includes many contributions from others. In 1995, Guido continued his work on Python at the Corporation for National -Research Initiatives (CNRI, see https://www.cnri.reston.va.us/) in Reston, +Research Initiatives (CNRI, see https://www.cnri.reston.va.us) in Reston, Virginia where he released several versions of the software. In May 2000, Guido and the Python core development team moved to BeOpen.com to form the BeOpen PythonLabs team. In October of the same year, the PythonLabs -team moved to Digital Creations (now Zope Corporation; see -https://www.zope.org/). In 2001, the Python Software Foundation (PSF, see +team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see https://www.python.org/psf/) was formed, a non-profit organization created -specifically to own Python-related Intellectual Property. Zope Corporation is a +specifically to own Python-related Intellectual Property. Zope Corporation was a sponsoring member of the PSF. -All Python releases are Open Source (see https://opensource.org/ for the Open +All Python releases are Open Source (see https://opensource.org for the Open Source Definition). Historically, most, but not all, Python releases have also been GPL-compatible; the table below summarizes the various releases. -+----------------+--------------+------------+------------+-----------------+ -| Release | Derived from | Year | Owner | GPL compatible? | -+================+==============+============+============+=================+ -| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes | -+----------------+--------------+------------+------------+-----------------+ -| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes | -+----------------+--------------+------------+------------+-----------------+ -| 1.6 | 1.5.2 | 2000 | CNRI | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.0 | 1.6 | 2000 | BeOpen.com | no | -+----------------+--------------+------------+------------+-----------------+ -| 1.6.1 | 1.6 | 2001 | CNRI | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.1 | 2.0+1.6.1 | 2001 | PSF | no | -+----------------+--------------+------------+------------+-----------------+ -| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.2 | 2.1.1 | 2002 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.1.3 | 2.1.2 | 2002 | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ -| 2.2 and above | 2.1.1 | 2001-now | PSF | yes | -+----------------+--------------+------------+------------+-----------------+ ++----------------+--------------+------------+------------+---------------------+ +| Release | Derived from | Year | Owner | GPL-compatible? (1) | ++================+==============+============+============+=====================+ +| 0.9.0 thru 1.2 | n/a | 1991-1995 | CWI | yes | ++----------------+--------------+------------+------------+---------------------+ +| 1.3 thru 1.5.2 | 1.2 | 1995-1999 | CNRI | yes | ++----------------+--------------+------------+------------+---------------------+ +| 1.6 | 1.5.2 | 2000 | CNRI | no | ++----------------+--------------+------------+------------+---------------------+ +| 2.0 | 1.6 | 2000 | BeOpen.com | no | ++----------------+--------------+------------+------------+---------------------+ +| 1.6.1 | 1.6 | 2001 | CNRI | yes (2) | ++----------------+--------------+------------+------------+---------------------+ +| 2.1 | 2.0+1.6.1 | 2001 | PSF | no | ++----------------+--------------+------------+------------+---------------------+ +| 2.0.1 | 2.0+1.6.1 | 2001 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.1 | 2.1+2.0.1 | 2001 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.2 | 2.1.1 | 2002 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.1.3 | 2.1.2 | 2002 | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ +| 2.2 and above | 2.1.1 | 2001-now | PSF | yes | ++----------------+--------------+------------+------------+---------------------+ .. note:: - GPL-compatible doesn't mean that we're distributing Python under the GPL. All - Python licenses, unlike the GPL, let you distribute a modified version without - making your changes open source. The GPL-compatible licenses make it possible to - combine Python with other software that is released under the GPL; the others - don't. + (1) GPL-compatible doesn't mean that we're distributing Python under the GPL. + All Python licenses, unlike the GPL, let you distribute a modified version + without making your changes open source. The GPL-compatible licenses make + it possible to combine Python with other software that is released under + the GPL; the others don't. + + (2) According to Richard Stallman, 1.6.1 is not GPL-compatible, because its license + has a choice of law clause. According to CNRI, however, Stallman's lawyer has + told CNRI's lawyer that 1.6.1 is "not incompatible" with the GPL. Thanks to the many outside volunteers who have worked under Guido's direction to make these releases possible. @@ -73,10 +77,10 @@ Terms and conditions for accessing or otherwise using Python ============================================================ Python software and documentation are licensed under the -:ref:`PSF License Agreement `. +Python Software Foundation License Version 2. Starting with Python 3.8.6, examples, recipes, and other code in -the documentation are dual licensed under the PSF License Agreement +the documentation are dual licensed under the PSF License Version 2 and the :ref:`Zero-Clause BSD license `. Some software incorporated into Python is under different licenses. @@ -86,39 +90,38 @@ See :ref:`OtherLicenses` for an incomplete list of these licenses. .. _PSF-license: -PSF LICENSE AGREEMENT FOR PYTHON |release| ------------------------------------------- +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- .. parsed-literal:: 1. This LICENSE AGREEMENT is between the Python Software Foundation ("PSF"), and - the Individual or Organization ("Licensee") accessing and otherwise using Python - |release| software in source or binary form and its associated documentation. + the Individual or Organization ("Licensee") accessing and otherwise using this + software ("Python") in source or binary form and its associated documentation. 2. Subject to the terms and conditions of this License Agreement, PSF hereby grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, analyze, test, perform and/or display publicly, prepare derivative works, - distribute, and otherwise use Python |release| alone or in any derivative + distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright © 2001 Python Software Foundation; All Rights - Reserved" are retained in Python |release| alone or in any derivative version + Reserved" are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or - incorporates Python |release| or any part thereof, and wants to make the + incorporates Python or any part thereof, and wants to make the derivative work available to others as provided herein, then Licensee hereby - agrees to include in any such work a brief summary of the changes made to Python - |release|. + agrees to include in any such work a brief summary of the changes made to Python. - 4. PSF is making Python |release| available to Licensee on an "AS IS" basis. + 4. PSF is making Python available to Licensee on an "AS IS" basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE - USE OF PYTHON |release| WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. + USE OF PYTHON WILL NOT INFRINGE ANY THIRD PARTY RIGHTS. - 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON |release| + 5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS A RESULT OF - MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON |release|, OR ANY DERIVATIVE + MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. 6. This License Agreement will automatically terminate upon a material breach of @@ -130,7 +133,7 @@ PSF LICENSE AGREEMENT FOR PYTHON |release| trademark sense to endorse or promote products or services of Licensee, or any third party. - 8. By copying, installing or otherwise using Python |release|, Licensee agrees + 8. By copying, installing or otherwise using Python, Licensee agrees to be bound by the terms and conditions of this License Agreement. @@ -205,7 +208,7 @@ CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 Agreement. This Agreement together with Python 1.6.1 may be located on the internet using the following unique, persistent identifier (known as a handle): 1895.22/1013. This Agreement may also be obtained from a proxy server on the - internet using the following URL: http://hdl.handle.net/1895.22/1013." + internet using the following URL: http://hdl.handle.net/1895.22/1013". 3. In the event Licensee prepares a derivative work that is based on or incorporates Python 1.6.1 or any part thereof, and wants to make the derivative @@ -273,8 +276,8 @@ CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 .. _BSD0: -ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON |release| DOCUMENTATION ----------------------------------------------------------------------- +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +------------------------------------------------------------ .. parsed-literal:: diff --git a/Doc/make.bat b/Doc/make.bat index 87d8359ef112bb..ede793ed3c6d70 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -144,12 +144,12 @@ if exist ..\Misc\NEWS ( ) if defined PAPER ( - set SPHINXOPTS=-D latex_elements.papersize=%PAPER% %SPHINXOPTS% + set SPHINXOPTS=--define latex_elements.papersize=%PAPER% %SPHINXOPTS% ) if "%1" EQU "htmlhelp" ( - set SPHINXOPTS=-D html_theme_options.body_max_width=none %SPHINXOPTS% + set SPHINXOPTS=--define html_theme_options.body_max_width=none %SPHINXOPTS% ) -cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% -b%1 -dbuild\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9" +cmd /S /C "%SPHINXBUILD% %SPHINXOPTS% --builder %1 --doctree-dir build\doctrees . "%BUILDDIR%\%1" %2 %3 %4 %5 %6 %7 %8 %9" if "%1" EQU "htmlhelp" ( "%HTMLHELP%" "%BUILDDIR%\htmlhelp\python%DISTVERSION:.=%.hhp" diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst index 1b1e9f479cbe08..71cc0c83de567e 100644 --- a/Doc/reference/compound_stmts.rst +++ b/Doc/reference/compound_stmts.rst @@ -1217,8 +1217,10 @@ A function definition defines a user-defined function object (see section : | `parameter_list_no_posonly` parameter_list_no_posonly: `defparameter` ("," `defparameter`)* ["," [`parameter_list_starargs`]] : | `parameter_list_starargs` - parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," ["**" `parameter` [","]]] - : | "**" `parameter` [","] + parameter_list_starargs: "*" [`star_parameter`] ("," `defparameter`)* ["," [`parameter_star_kwargs`]] + : "*" ("," `defparameter`)+ ["," [`parameter_star_kwargs`]] + : | `parameter_star_kwargs` + parameter_star_kwargs: "**" `parameter` [","] parameter: `identifier` [":" `expression`] star_parameter: `identifier` [":" ["*"] `expression`] defparameter: `parameter` ["=" `expression`] diff --git a/Doc/reference/import.rst b/Doc/reference/import.rst index ac363e8cfa00dc..48fdd0f5d021c7 100644 --- a/Doc/reference/import.rst +++ b/Doc/reference/import.rst @@ -762,10 +762,10 @@ module. The current working directory -- denoted by an empty string -- is handled slightly differently from other entries on :data:`sys.path`. First, if the -current working directory is found to not exist, no value is stored in -:data:`sys.path_importer_cache`. Second, the value for the current working -directory is looked up fresh for each module lookup. Third, the path used for -:data:`sys.path_importer_cache` and returned by +current working directory cannot be determined or is found not to exist, no +value is stored in :data:`sys.path_importer_cache`. Second, the value for the +current working directory is looked up fresh for each module lookup. Third, +the path used for :data:`sys.path_importer_cache` and returned by :meth:`importlib.machinery.PathFinder.find_spec` will be the actual current working directory and not the empty string. diff --git a/Doc/requirements-oldest-sphinx.txt b/Doc/requirements-oldest-sphinx.txt deleted file mode 100644 index c8027a05706c21..00000000000000 --- a/Doc/requirements-oldest-sphinx.txt +++ /dev/null @@ -1,35 +0,0 @@ -# Requirements to build the Python documentation, for the oldest supported -# Sphinx version. -# -# We pin Sphinx and all of its dependencies to ensure a consistent environment. - -blurb -python-docs-theme>=2022.1 - -# Generated from: -# pip install "Sphinx~=7.2.6" -# pip freeze -# -# Sphinx 7.2.6 comes from ``needs_sphinx = '7.2.6'`` in ``Doc/conf.py``. - -alabaster==0.7.16 -babel==2.16.0 -certifi==2024.12.14 -charset-normalizer==3.4.0 -docutils==0.20.1 -idna==3.10 -imagesize==1.4.1 -Jinja2==3.1.5 -MarkupSafe==3.0.2 -packaging==24.2 -Pygments==2.18.0 -requests==2.32.3 -snowballstemmer==2.2.0 -Sphinx==7.2.6 -sphinxcontrib-applehelp==2.0.0 -sphinxcontrib-devhelp==2.0.0 -sphinxcontrib-htmlhelp==2.1.0 -sphinxcontrib-jsmath==1.0.1 -sphinxcontrib-qthelp==2.0.0 -sphinxcontrib-serializinghtml==2.0.0 -urllib3==2.3.0 diff --git a/Doc/requirements.txt b/Doc/requirements.txt index 5105786ccf283c..32ff8f74d05bb6 100644 --- a/Doc/requirements.txt +++ b/Doc/requirements.txt @@ -3,9 +3,10 @@ # Note that when updating this file, you will likely also have to update # the Doc/constraints.txt file. -# Sphinx version is pinned so that new versions that introduce new warnings +# The Sphinx version is pinned so that new versions that introduce new warnings # won't suddenly cause build failures. Updating the version is fine as long # as no warnings are raised by doing so. +# Keep this version in sync with ``Doc/conf.py``. sphinx~=8.1.0 blurb diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore index 6940c95ab2c9a1..ad24fe82f754fc 100644 --- a/Doc/tools/.nitignore +++ b/Doc/tools/.nitignore @@ -12,7 +12,6 @@ Doc/c-api/stable.rst Doc/c-api/type.rst Doc/c-api/typeobj.rst Doc/extending/extending.rst -Doc/glossary.rst Doc/library/ast.rst Doc/library/asyncio-extending.rst Doc/library/asyncio-subprocess.rst diff --git a/Doc/tools/extensions/c_annotations.py b/Doc/tools/extensions/c_annotations.py index 50065d34a2c27a..089614a1f6c421 100644 --- a/Doc/tools/extensions/c_annotations.py +++ b/Doc/tools/extensions/c_annotations.py @@ -16,7 +16,6 @@ from pathlib import Path from typing import TYPE_CHECKING -import sphinx from docutils import nodes from docutils.statemachine import StringList from sphinx import addnodes @@ -285,16 +284,6 @@ def setup(app: Sphinx) -> ExtensionMetadata: app.connect("builder-inited", init_annotations) app.connect("doctree-read", add_annotations) - if sphinx.version_info[:2] < (7, 2): - from docutils.parsers.rst import directives - from sphinx.domains.c import CObject - - # monkey-patch C object... - CObject.option_spec |= { - "no-index-entry": directives.flag, - "no-contents-entry": directives.flag, - } - return { "version": "1.0", "parallel_read_safe": True, diff --git a/Doc/tools/extensions/changes.py b/Doc/tools/extensions/changes.py new file mode 100644 index 00000000000000..8de5e7f78c6627 --- /dev/null +++ b/Doc/tools/extensions/changes.py @@ -0,0 +1,90 @@ +"""Support for documenting version of changes, additions, deprecations.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from sphinx.domains.changeset import ( + VersionChange, + versionlabel_classes, + versionlabels, +) +from sphinx.locale import _ as sphinx_gettext + +if TYPE_CHECKING: + from docutils.nodes import Node + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + + +def expand_version_arg(argument: str, release: str) -> str: + """Expand "next" to the current version""" + if argument == "next": + return sphinx_gettext("{} (unreleased)").format(release) + return argument + + +class PyVersionChange(VersionChange): + def run(self) -> list[Node]: + # Replace the 'next' special token with the current development version + self.arguments[0] = expand_version_arg( + self.arguments[0], self.config.release + ) + return super().run() + + +class DeprecatedRemoved(VersionChange): + required_arguments = 2 + + _deprecated_label = sphinx_gettext( + "Deprecated since version %s, will be removed in version %s" + ) + _removed_label = sphinx_gettext( + "Deprecated since version %s, removed in version %s" + ) + + def run(self) -> list[Node]: + # Replace the first two arguments (deprecated version and removed version) + # with a single tuple of both versions. + version_deprecated = expand_version_arg( + self.arguments[0], self.config.release + ) + version_removed = self.arguments.pop(1) + if version_removed == "next": + raise ValueError( + "deprecated-removed:: second argument cannot be `next`" + ) + self.arguments[0] = version_deprecated, version_removed + + # Set the label based on if we have reached the removal version + current_version = tuple(map(int, self.config.version.split("."))) + removed_version = tuple(map(int, version_removed.split("."))) + if current_version < removed_version: + versionlabels[self.name] = self._deprecated_label + versionlabel_classes[self.name] = "deprecated" + else: + versionlabels[self.name] = self._removed_label + versionlabel_classes[self.name] = "removed" + try: + return super().run() + finally: + # reset versionlabels and versionlabel_classes + versionlabels[self.name] = "" + versionlabel_classes[self.name] = "" + + +def setup(app: Sphinx) -> ExtensionMetadata: + # Override Sphinx's directives with support for 'next' + app.add_directive("versionadded", PyVersionChange, override=True) + app.add_directive("versionchanged", PyVersionChange, override=True) + app.add_directive("versionremoved", PyVersionChange, override=True) + app.add_directive("deprecated", PyVersionChange, override=True) + + # Register the ``.. deprecated-removed::`` directive + app.add_directive("deprecated-removed", DeprecatedRemoved) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pydoc_topics.py b/Doc/tools/extensions/pydoc_topics.py new file mode 100644 index 00000000000000..6e43df2e4bf9de --- /dev/null +++ b/Doc/tools/extensions/pydoc_topics.py @@ -0,0 +1,187 @@ +"""Support for building "topic help" for pydoc.""" + +from __future__ import annotations + +from time import asctime +from typing import TYPE_CHECKING + +from sphinx.builders.text import TextBuilder +from sphinx.util import logging +from sphinx.util.display import status_iterator +from sphinx.util.docutils import new_document +from sphinx.writers.text import TextTranslator + +if TYPE_CHECKING: + from collections.abc import Sequence, Set + + from sphinx.application import Sphinx + from sphinx.util.typing import ExtensionMetadata + +logger = logging.getLogger(__name__) + +_PYDOC_TOPIC_LABELS: Sequence[str] = sorted({ + "assert", + "assignment", + "assignment-expressions", + "async", + "atom-identifiers", + "atom-literals", + "attribute-access", + "attribute-references", + "augassign", + "await", + "binary", + "bitwise", + "bltin-code-objects", + "bltin-ellipsis-object", + "bltin-null-object", + "bltin-type-objects", + "booleans", + "break", + "callable-types", + "calls", + "class", + "comparisons", + "compound", + "context-managers", + "continue", + "conversions", + "customization", + "debugger", + "del", + "dict", + "dynamic-features", + "else", + "exceptions", + "execmodel", + "exprlists", + "floating", + "for", + "formatstrings", + "function", + "global", + "id-classes", + "identifiers", + "if", + "imaginary", + "import", + "in", + "integers", + "lambda", + "lists", + "naming", + "nonlocal", + "numbers", + "numeric-types", + "objects", + "operator-summary", + "pass", + "power", + "raise", + "return", + "sequence-types", + "shifting", + "slicings", + "specialattrs", + "specialnames", + "string-methods", + "strings", + "subscriptions", + "truth", + "try", + "types", + "typesfunctions", + "typesmapping", + "typesmethods", + "typesmodules", + "typesseq", + "typesseq-mutable", + "unary", + "while", + "with", + "yield", +}) + + +class PydocTopicsBuilder(TextBuilder): + name = "pydoc-topics" + + def init(self) -> None: + super().init() + self.topics: dict[str, str] = {} + + def get_outdated_docs(self) -> str: + # Return a string describing what an update build will build. + return "all pydoc topics" + + def write_documents(self, _docnames: Set[str]) -> None: + env = self.env + + labels: dict[str, tuple[str, str, str]] + labels = env.domains.standard_domain.labels + + # docname -> list of (topic_label, label_id) pairs + doc_labels: dict[str, list[tuple[str, str]]] = {} + for topic_label in _PYDOC_TOPIC_LABELS: + try: + docname, label_id, _section_name = labels[topic_label] + except KeyError: + logger.warning("label %r not in documentation", topic_label) + continue + doc_labels.setdefault(docname, []).append((topic_label, label_id)) + + for docname, label_ids in status_iterator( + doc_labels.items(), + "building topics... ", + length=len(doc_labels), + stringify_func=_display_labels, + ): + doctree = env.get_and_resolve_doctree(docname, builder=self) + doc_ids = doctree.ids + for topic_label, label_id in label_ids: + document = new_document("
") + document.append(doc_ids[label_id]) + visitor = TextTranslator(document, builder=self) + document.walkabout(visitor) + self.topics[topic_label] = visitor.body + + def finish(self) -> None: + topics_repr = "\n".join( + f" '{topic}': {_repr(self.topics[topic])}," + for topic in sorted(self.topics) + ) + topics = f"""\ +# Autogenerated by Sphinx on {asctime()} +# as part of the release process. + +topics = {{ +{topics_repr} +}} +""" + self.outdir.joinpath("topics.py").write_text(topics, encoding="utf-8") + + +def _display_labels(item: tuple[str, Sequence[tuple[str, str]]]) -> str: + _docname, label_ids = item + labels = [name for name, _id in label_ids] + if len(labels) > 4: + return f"{labels[0]}, {labels[1]}, ..., {labels[-2]}, {labels[-1]}" + return ", ".join(labels) + + +def _repr(text: str, /) -> str: + """Return a triple-single-quoted representation of text.""" + if "'''" not in text: + return f"r'''{text}'''" + text = text.replace("\\", "\\\\").replace("'''", r"\'\'\'") + return f"'''{text}'''" + + +def setup(app: Sphinx) -> ExtensionMetadata: + app.add_builder(PydocTopicsBuilder) + + return { + "version": "1.0", + "parallel_read_safe": True, + "parallel_write_safe": True, + } diff --git a/Doc/tools/extensions/pyspecific.py b/Doc/tools/extensions/pyspecific.py index bcb8a421e32d09..f363dfd4216929 100644 --- a/Doc/tools/extensions/pyspecific.py +++ b/Doc/tools/extensions/pyspecific.py @@ -12,21 +12,14 @@ import re import io from os import getenv, path -from time import asctime -from pprint import pformat from docutils import nodes -from docutils.io import StringOutput from docutils.parsers.rst import directives -from docutils.utils import new_document, unescape +from docutils.utils import unescape from sphinx import addnodes -from sphinx.builders import Builder -from sphinx.domains.changeset import VersionChange, versionlabels, versionlabel_classes from sphinx.domains.python import PyFunction, PyMethod, PyModule from sphinx.locale import _ as sphinx_gettext from sphinx.util.docutils import SphinxDirective -from sphinx.writers.text import TextWriter, TextTranslator -from sphinx.util.display import status_iterator ISSUE_URI = 'https://bugs.python.org/issue?@action=redirect&bpo=%s' @@ -41,16 +34,6 @@ Body.enum.converters['lowerroman'] = \ Body.enum.converters['upperroman'] = lambda x: None -# monkey-patch the productionlist directive to allow hyphens in group names -# https://github.com/sphinx-doc/sphinx/issues/11854 -from sphinx.domains import std - -std.token_re = re.compile(r'`((~?[\w-]*:)?\w+)`') - -# backport :no-index: -PyModule.option_spec['no-index'] = directives.flag - - # Support for marking up and linking to bugs.python.org issues def issue_role(typ, rawtext, text, lineno, inliner, options={}, content=[]): @@ -107,32 +90,6 @@ def run(self): return [pnode] -# Support for documenting decorators - -class PyDecoratorMixin(object): - def handle_signature(self, sig, signode): - ret = super(PyDecoratorMixin, self).handle_signature(sig, signode) - signode.insert(0, addnodes.desc_addname('@', '@')) - return ret - - def needs_arglist(self): - return False - - -class PyDecoratorFunction(PyDecoratorMixin, PyFunction): - def run(self): - # a decorator function is a function after all - self.name = 'py:function' - return PyFunction.run(self) - - -# TODO: Use sphinx.domains.python.PyDecoratorMethod when possible -class PyDecoratorMethod(PyDecoratorMixin, PyMethod): - def run(self): - self.name = 'py:method' - return PyMethod.run(self) - - class PyCoroutineMixin(object): def handle_signature(self, sig, signode): ret = super(PyCoroutineMixin, self).handle_signature(sig, signode) @@ -184,57 +141,6 @@ def run(self): return PyMethod.run(self) -# Support for documenting version of changes, additions, deprecations - -def expand_version_arg(argument, release): - """Expand "next" to the current version""" - if argument == 'next': - return sphinx_gettext('{} (unreleased)').format(release) - return argument - - -class PyVersionChange(VersionChange): - def run(self): - # Replace the 'next' special token with the current development version - self.arguments[0] = expand_version_arg(self.arguments[0], - self.config.release) - return super().run() - - -class DeprecatedRemoved(VersionChange): - required_arguments = 2 - - _deprecated_label = sphinx_gettext('Deprecated since version %s, will be removed in version %s') - _removed_label = sphinx_gettext('Deprecated since version %s, removed in version %s') - - def run(self): - # Replace the first two arguments (deprecated version and removed version) - # with a single tuple of both versions. - version_deprecated = expand_version_arg(self.arguments[0], - self.config.release) - version_removed = self.arguments.pop(1) - if version_removed == 'next': - raise ValueError( - 'deprecated-removed:: second argument cannot be `next`') - self.arguments[0] = version_deprecated, version_removed - - # Set the label based on if we have reached the removal version - current_version = tuple(map(int, self.config.version.split('.'))) - removed_version = tuple(map(int, version_removed.split('.'))) - if current_version < removed_version: - versionlabels[self.name] = self._deprecated_label - versionlabel_classes[self.name] = 'deprecated' - else: - versionlabels[self.name] = self._removed_label - versionlabel_classes[self.name] = 'removed' - try: - return super().run() - finally: - # reset versionlabels and versionlabel_classes - versionlabels[self.name] = '' - versionlabel_classes[self.name] = '' - - # Support for including Misc/NEWS issue_re = re.compile('(?:[Ii]ssue #|bpo-)([0-9]+)', re.I) @@ -275,69 +181,6 @@ def run(self): return [] -# Support for building "topic help" for pydoc - -pydoc_topic_labels = [ - 'assert', 'assignment', 'assignment-expressions', 'async', 'atom-identifiers', - 'atom-literals', 'attribute-access', 'attribute-references', 'augassign', 'await', - 'binary', 'bitwise', 'bltin-code-objects', 'bltin-ellipsis-object', - 'bltin-null-object', 'bltin-type-objects', 'booleans', - 'break', 'callable-types', 'calls', 'class', 'comparisons', 'compound', - 'context-managers', 'continue', 'conversions', 'customization', 'debugger', - 'del', 'dict', 'dynamic-features', 'else', 'exceptions', 'execmodel', - 'exprlists', 'floating', 'for', 'formatstrings', 'function', 'global', - 'id-classes', 'identifiers', 'if', 'imaginary', 'import', 'in', 'integers', - 'lambda', 'lists', 'naming', 'nonlocal', 'numbers', 'numeric-types', - 'objects', 'operator-summary', 'pass', 'power', 'raise', 'return', - 'sequence-types', 'shifting', 'slicings', 'specialattrs', 'specialnames', - 'string-methods', 'strings', 'subscriptions', 'truth', 'try', 'types', - 'typesfunctions', 'typesmapping', 'typesmethods', 'typesmodules', - 'typesseq', 'typesseq-mutable', 'unary', 'while', 'with', 'yield' -] - - -class PydocTopicsBuilder(Builder): - name = 'pydoc-topics' - - default_translator_class = TextTranslator - - def init(self): - self.topics = {} - self.secnumbers = {} - - def get_outdated_docs(self): - return 'all pydoc topics' - - def get_target_uri(self, docname, typ=None): - return '' # no URIs - - def write(self, *ignored): - writer = TextWriter(self) - for label in status_iterator(pydoc_topic_labels, - 'building topics... ', - length=len(pydoc_topic_labels)): - if label not in self.env.domaindata['std']['labels']: - self.env.logger.warning(f'label {label!r} not in documentation') - continue - docname, labelid, sectname = self.env.domaindata['std']['labels'][label] - doctree = self.env.get_and_resolve_doctree(docname, self) - document = new_document('
') - document.append(doctree.ids[labelid]) - destination = StringOutput(encoding='utf-8') - writer.write(document, destination) - self.topics[label] = writer.output - - def finish(self): - f = open(path.join(self.outdir, 'topics.py'), 'wb') - try: - f.write('# -*- coding: utf-8 -*-\n'.encode('utf-8')) - f.write(('# Autogenerated by Sphinx on %s\n' % asctime()).encode('utf-8')) - f.write('# as part of the release process.\n'.encode('utf-8')) - f.write(('topics = ' + pformat(self.topics) + '\n').encode('utf-8')) - finally: - f.close() - - # Support for documenting Opcodes opcode_sig_re = re.compile(r'(\w+(?:\+\d)?)(?:\s*\((.*)\))?') @@ -417,17 +260,9 @@ def setup(app): app.add_role('issue', issue_role) app.add_role('gh', gh_issue_role) app.add_directive('impl-detail', ImplementationDetail) - app.add_directive('versionadded', PyVersionChange, override=True) - app.add_directive('versionchanged', PyVersionChange, override=True) - app.add_directive('versionremoved', PyVersionChange, override=True) - app.add_directive('deprecated', PyVersionChange, override=True) - app.add_directive('deprecated-removed', DeprecatedRemoved) - app.add_builder(PydocTopicsBuilder) app.add_object_type('opcode', 'opcode', '%s (opcode)', parse_opcode_signature) app.add_object_type('pdbcommand', 'pdbcmd', '%s (pdb command)', parse_pdb_command) app.add_object_type('monitoring-event', 'monitoring-event', '%s (monitoring event)', parse_monitoring_event) - app.add_directive_to_domain('py', 'decorator', PyDecoratorFunction) - app.add_directive_to_domain('py', 'decoratormethod', PyDecoratorMethod) app.add_directive_to_domain('py', 'coroutinefunction', PyCoroutineFunction) app.add_directive_to_domain('py', 'coroutinemethod', PyCoroutineMethod) app.add_directive_to_domain('py', 'awaitablefunction', PyAwaitableFunction) diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 7db2f4820f346a..2a59cf3f62d4c5 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -1195,7 +1195,7 @@ conflict. .. envvar:: PYTHON_BASIC_REPL - If this variable is set to ``1``, the interpreter will not attempt to + If this variable is set to any value, the interpreter will not attempt to load the Python-based :term:`REPL` that requires :mod:`curses` and :mod:`readline`, and will instead use the traditional parser-based :term:`REPL`. diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index c2cfdf3186f86d..f2a8c711f4df88 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -1499,8 +1499,20 @@ All of the following modules were deprecated in Python 3.11, and are now removed: * :mod:`!aifc` + + * :pypi:`standard-aifc`: + Use the redistribution of ``aifc`` library from PyPI. + * :mod:`!audioop` + + * :pypi:`audioop-lts`: + Use ``audioop-lts`` library from PyPI. + * :mod:`!chunk` + + * :pypi:`standard-chunk`: + Use the redistribution of ``chunk`` library from PyPI. + * :mod:`!cgi` and :mod:`!cgitb` * :class:`!cgi.FieldStorage` can typically be replaced with @@ -1531,6 +1543,9 @@ and are now removed: For example, the :class:`email.message.EmailMessage` and :class:`email.message.Message` classes. + * :pypi:`standard-cgi`: and :pypi:`standard-cgitb`: + Use the redistribution of ``cgi`` and ``cgitb`` library from PyPI. + * :mod:`!crypt` and the private :mod:`!_crypt` extension. The :mod:`hashlib` module may be an appropriate replacement when simply hashing a value is required. @@ -1549,6 +1564,8 @@ and are now removed: Fork of the :mod:`!crypt` module, wrapper to the :manpage:`crypt_r(3)` library call and associated functionality. + * :pypi:`standard-crypt` and :pypi:`deprecated-crypt-alternative`: + Use the redistribution of ``crypt`` and reimplementation of ``_crypt`` libraries from PyPI. * :mod:`!imghdr`: The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries @@ -1556,30 +1573,65 @@ and are now removed: For example, the :func:`!puremagic.what` function can be used to replace the :func:`!imghdr.what` function for all file formats that were supported by :mod:`!imghdr`. + + * :pypi:`standard-imghdr`: + Use the redistribution of ``imghdr`` library from PyPI. + * :mod:`!mailcap`: Use the :mod:`mimetypes` module instead. + + * :pypi:`standard-mailcap`: + Use the redistribution of ``mailcap`` library from PyPI. + * :mod:`!msilib` * :mod:`!nis` * :mod:`!nntplib`: Use the :pypi:`pynntp` library from PyPI instead. + + * :pypi:`standard-nntplib`: + Use the redistribution of ``nntplib`` library from PyPI. + * :mod:`!ossaudiodev`: For audio playback, use the :pypi:`pygame` library from PyPI instead. * :mod:`!pipes`: Use the :mod:`subprocess` module instead. Use :func:`shlex.quote` to replace the undocumented ``pipes.quote`` function. + + * :pypi:`standard-pipes`: + Use the redistribution of ``pipes`` library from PyPI. + * :mod:`!sndhdr`: The :pypi:`filetype`, :pypi:`puremagic`, or :pypi:`python-magic` libraries should be used as replacements. + + * :pypi:`standard-sndhdr`: + Use the redistribution of ``sndhdr`` library from PyPI. + * :mod:`!spwd`: Use the :pypi:`python-pam` library from PyPI instead. * :mod:`!sunau` + + * :pypi:`standard-sunau`: + Use the redistribution of ``sunau`` library from PyPI. + * :mod:`!telnetlib`, Use the :pypi:`telnetlib3` or :pypi:`Exscript` libraries from PyPI instead. + + * :pypi:`standard-telnetlib`: + Use the redistribution of ``telnetlib`` library from PyPI. + * :mod:`!uu`: Use the :mod:`base64` module instead, as a modern alternative. + + * :pypi:`standard-uu`: + Use the redistribution of ``uu`` library from PyPI. + * :mod:`!xdrlib` + * :pypi:`standard-xdrlib`: + Use the redistribution of ``xdrlib`` library from PyPI. + (Contributed by Victor Stinner and Zachary Ware in :gh:`104773` and :gh:`104780`.) diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index 9f7ef101e56478..7b9e5aca782d06 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -351,6 +351,12 @@ ctypes callback functions that are passed dynamically-sized buffers. (Contributed by Rian Hunter in :gh:`112018`.) +* Complex types, :class:`~ctypes.c_float_complex`, + :class:`~ctypes.c_double_complex` and :class:`~ctypes.c_longdouble_complex`, + are now available if both the compiler and the ``libffi`` library support + complex C types. + (Contributed by Sergey B Kirpichev in :gh:`61103`). + datetime -------- @@ -555,6 +561,10 @@ os to the :mod:`os` module. (Contributed by James Roy in :gh:`127688`.) +* Add the :func:`os.readinto` function to read into a + :ref:`buffer object ` from a file descriptor. + (Contributed by Cody Maloney in :gh:`129205`.) + pathlib ------- @@ -660,6 +670,14 @@ tkinter (Contributed by Zhikang Yan in :gh:`126899`.) +turtle +------ + +* Add context managers for :func:`turtle.fill`, :func:`turtle.poly` + and :func:`turtle.no_animation`. + (Contributed by Marie Roald and Yngve Mardal Moe in :gh:`126350`.) + + unicodedata ----------- @@ -722,6 +740,12 @@ zipinfo (Contributed by Bénédikt Tran in :gh:`123424`.) +* :meth:`zipfile.ZipFile.writestr` now respect ``SOURCE_DATE_EPOCH`` that + distributions can set centrally and have build tools consume this in order + to produce reproducible output. + + (Contributed by Jiahao Li in :gh:`91279`.) + .. Add improved modules above alphabetically, not here at the end. Optimizations @@ -735,6 +759,11 @@ asyncio reduces memory usage. (Contributed by Kumar Aditya in :gh:`107803`.) +* :mod:`asyncio` has new utility functions for introspecting and printing + the program's call graph: :func:`asyncio.capture_call_graph` and + :func:`asyncio.print_call_graph`. + (Contributed by Yury Selivanov, Pablo Galindo Salgado, and Łukasz Langa + in :gh:`91048`.) base64 ------ @@ -1263,7 +1292,7 @@ New features * :c:func:`PyLongWriter_Finish`; * :c:func:`PyLongWriter_Discard`. - (Contributed by Victor Stinner in :gh:`102471`.) + (Contributed by Sergey B Kirpichev and Victor Stinner in :gh:`102471`.) * Add :c:func:`PyType_GetBaseByToken` and :c:data:`Py_tp_token` slot for easier superclass identification, which attempts to resolve the `type checking issue @@ -1356,14 +1385,40 @@ Deprecated .. include:: ../deprecations/c-api-pending-removal-in-3.15.rst +.. include:: ../deprecations/c-api-pending-removal-in-3.18.rst + .. include:: ../deprecations/c-api-pending-removal-in-future.rst * The ``PyMonitoring_FireBranchEvent`` function is deprecated and should be replaced with calls to :c:func:`PyMonitoring_FireBranchLeftEvent` and :c:func:`PyMonitoring_FireBranchRightEvent`. +* The following private functions are deprecated and planned for removal in + Python 3.18: + + * :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. + * :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. + * :c:func:`!_PyDict_Pop()`: use :c:func:`PyDict_Pop`. + * :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. + * :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. + * :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. + * :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. + * :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. + * :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + + The `pythoncapi-compat project`_ can be used to get these new public + functions on Python 3.13 and older. + + (Contributed by Victor Stinner in :gh:`128863`.) + + Removed ------- * Creating :c:data:`immutable types ` with mutable bases was deprecated since 3.12 and now raises a :exc:`TypeError`. + +* Remove the private ``_Py_InitializeMain()`` function. It was a + :term:`provisional API` added to Python 3.8 by :pep:`587`. + (Contributed by Victor Stinner in :gh:`129033`.) diff --git a/Include/cpython/bytesobject.h b/Include/cpython/bytesobject.h index cf3f0387ecf323..71c133f173f157 100644 --- a/Include/cpython/bytesobject.h +++ b/Include/cpython/bytesobject.h @@ -34,5 +34,9 @@ static inline Py_ssize_t PyBytes_GET_SIZE(PyObject *op) { PyAPI_FUNC(PyObject*) PyBytes_Join(PyObject *sep, PyObject *iterable); -// Alias kept for backward compatibility -#define _PyBytes_Join PyBytes_Join +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline PyObject* +_PyBytes_Join(PyObject *sep, PyObject *iterable) +{ + return PyBytes_Join(sep, iterable); +} diff --git a/Include/cpython/code.h b/Include/cpython/code.h index cb6261ddde941b..2bd3e08631f0ad 100644 --- a/Include/cpython/code.h +++ b/Include/cpython/code.h @@ -35,11 +35,12 @@ typedef struct { } _PyCoCached; /* Ancillary data structure used for instrumentation. - Line instrumentation creates an array of - these. One entry per code unit.*/ + Line instrumentation creates this with sufficient + space for one entry per code unit. The total size + of the data will be `bytes_per_entry * Py_SIZE(code)` */ typedef struct { - uint8_t original_opcode; - int8_t line_delta; + uint8_t bytes_per_entry; + uint8_t data[1]; } _PyCoLineInstrumentationData; diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h index 78473e54898fa5..df9ec7050fca1a 100644 --- a/Include/cpython/dictobject.h +++ b/Include/cpython/dictobject.h @@ -68,7 +68,12 @@ PyAPI_FUNC(PyObject *) _PyDict_NewPresized(Py_ssize_t minused); PyAPI_FUNC(int) PyDict_Pop(PyObject *dict, PyObject *key, PyObject **result); PyAPI_FUNC(int) PyDict_PopString(PyObject *dict, const char *key, PyObject **result); -PyAPI_FUNC(PyObject *) _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value); + +// Use PyDict_Pop() instead +Py_DEPRECATED(3.14) PyAPI_FUNC(PyObject *) _PyDict_Pop( + PyObject *dict, + PyObject *key, + PyObject *default_value); /* Dictionary watchers */ diff --git a/Include/cpython/fileutils.h b/Include/cpython/fileutils.h index 702f89aca324c5..626b1ad57b3846 100644 --- a/Include/cpython/fileutils.h +++ b/Include/cpython/fileutils.h @@ -6,9 +6,11 @@ PyAPI_FUNC(FILE*) Py_fopen( PyObject *path, const char *mode); -// Deprecated alias to Py_fopen() kept for backward compatibility -Py_DEPRECATED(3.14) PyAPI_FUNC(FILE*) _Py_fopen_obj( - PyObject *path, - const char *mode); +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline FILE* +_Py_fopen_obj(PyObject *path, const char *mode) +{ + return Py_fopen(path, mode); +} PyAPI_FUNC(int) Py_fclose(FILE *file); diff --git a/Include/cpython/import.h b/Include/cpython/import.h index 7daf0b84fcf71b..0fd61c28cafa0e 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -2,8 +2,6 @@ # error "this header file must not be included directly" #endif -PyMODINIT_FUNC PyInit__imp(void); - struct _inittab { const char *name; /* ASCII encoded string */ PyObject* (*initfunc)(void); diff --git a/Include/cpython/longintrepr.h b/Include/cpython/longintrepr.h index 357477b60d9a5a..4dd82600d562ee 100644 --- a/Include/cpython/longintrepr.h +++ b/Include/cpython/longintrepr.h @@ -100,12 +100,12 @@ struct _longobject { _PyLongValue long_value; }; -PyAPI_FUNC(PyLongObject*) _PyLong_New(Py_ssize_t); +Py_DEPRECATED(3.14) PyAPI_FUNC(PyLongObject*) _PyLong_New(Py_ssize_t); // Return a copy of src. PyAPI_FUNC(PyObject*) _PyLong_Copy(PyLongObject *src); -PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits( +Py_DEPRECATED(3.14) PyAPI_FUNC(PyLongObject*) _PyLong_FromDigits( int negative, Py_ssize_t digit_count, digit *digits); diff --git a/Include/cpython/longobject.h b/Include/cpython/longobject.h index 4d6e618f831ad8..7f28ad60b7467b 100644 --- a/Include/cpython/longobject.h +++ b/Include/cpython/longobject.h @@ -86,7 +86,7 @@ PyAPI_FUNC(int) PyLong_IsZero(PyObject *obj); - On failure, set an exception, and return -1. */ PyAPI_FUNC(int) PyLong_GetSign(PyObject *v, int *sign); -PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); +Py_DEPRECATED(3.14) PyAPI_FUNC(int) _PyLong_Sign(PyObject *v); /* _PyLong_NumBits. Return the number of bits needed to represent the absolute value of a long. For example, this returns 1 for 1 and -1, 2 diff --git a/Include/cpython/object.h b/Include/cpython/object.h index c8c6bc97fa32ee..ba31e2464abf84 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -475,9 +475,6 @@ partially-deallocated object. To check this, the tp_dealloc function must be passed as second argument to Py_TRASHCAN_BEGIN(). */ -/* Python 3.9 private API, invoked by the macros below. */ -PyAPI_FUNC(int) _PyTrash_begin(PyThreadState *tstate, PyObject *op); -PyAPI_FUNC(void) _PyTrash_end(PyThreadState *tstate); PyAPI_FUNC(void) _PyTrash_thread_deposit_object(PyThreadState *tstate, PyObject *op); PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(PyThreadState *tstate); diff --git a/Include/cpython/pyhash.h b/Include/cpython/pyhash.h index 876a7f0ea44f4d..a33ba10b8d3a37 100644 --- a/Include/cpython/pyhash.h +++ b/Include/cpython/pyhash.h @@ -29,9 +29,6 @@ /* Helpers for hash functions */ PyAPI_FUNC(Py_hash_t) _Py_HashDouble(PyObject *, double); -// Kept for backward compatibility -#define _Py_HashPointer Py_HashPointer - /* hash function definition */ typedef struct { @@ -44,6 +41,14 @@ typedef struct { PyAPI_FUNC(PyHash_FuncDef*) PyHash_GetFuncDef(void); PyAPI_FUNC(Py_hash_t) Py_HashPointer(const void *ptr); + +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline Py_hash_t +_Py_HashPointer(const void *ptr) +{ + return Py_HashPointer(ptr); +} + PyAPI_FUNC(Py_hash_t) PyObject_GenericHash(PyObject *); PyAPI_FUNC(Py_hash_t) Py_HashBuffer(const void *ptr, Py_ssize_t len); diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index e46dfe59ec4630..86ce6e6f79824a 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -25,9 +25,6 @@ PyAPI_FUNC(PyStatus) Py_PreInitializeFromArgs( PyAPI_FUNC(PyStatus) Py_InitializeFromConfig( const PyConfig *config); -// Python 3.8 provisional API (PEP 587) -PyAPI_FUNC(PyStatus) _Py_InitializeMain(void); - PyAPI_FUNC(int) Py_RunMain(void); diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index 32f68378ea5d72..cd6d9582496850 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -239,8 +239,12 @@ struct _ts { * if it is NULL. */ PyAPI_FUNC(PyThreadState *) PyThreadState_GetUnchecked(void); -// Alias kept for backward compatibility -#define _PyThreadState_UncheckedGet PyThreadState_GetUnchecked +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline PyThreadState* +_PyThreadState_UncheckedGet(void) +{ + return PyThreadState_GetUnchecked(); +} // Disable tracing and profiling. diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 46a01c8e591709..287de52b96202c 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -630,8 +630,12 @@ _PyUnicodeWriter_Dealloc(_PyUnicodeWriter *writer); PyAPI_FUNC(const char *) PyUnicode_AsUTF8(PyObject *unicode); -// Alias kept for backward compatibility -#define _PyUnicode_AsString PyUnicode_AsUTF8 +// Deprecated alias kept for backward compatibility +Py_DEPRECATED(3.14) static inline const char* +_PyUnicode_AsString(PyObject *unicode) +{ + return PyUnicode_AsUTF8(unicode); +} /* === Characters Type APIs =============================================== */ diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 80bd19a887871c..fea8665ae39ab5 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -264,7 +264,7 @@ PyAPI_DATA(const size_t) _Py_FunctionAttributeOffsets[]; PyAPI_FUNC(int) _PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right); PyAPI_FUNC(int) _PyEval_CheckExceptTypeValid(PyThreadState *tstate, PyObject* right); -PyAPI_FUNC(int) _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, PyObject **match, PyObject **rest); +PyAPI_FUNC(int) _PyEval_ExceptionGroupMatch(_PyInterpreterFrame *, PyObject* exc_value, PyObject *match_type, PyObject **match, PyObject **rest); PyAPI_FUNC(void) _PyEval_FormatAwaitableError(PyThreadState *tstate, PyTypeObject *type, int oparg); PyAPI_FUNC(void) _PyEval_FormatExcCheckArg(PyThreadState *tstate, PyObject *exc, const char *format_str, PyObject *obj); PyAPI_FUNC(void) _PyEval_FormatExcUnbound(PyThreadState *tstate, PyCodeObject *co, int oparg); diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h index d97fe81a2fc54a..01d41446fdb0cf 100644 --- a/Include/internal/pycore_code.h +++ b/Include/internal/pycore_code.h @@ -100,6 +100,7 @@ typedef struct { typedef struct { _Py_BackoffCounter counter; + uint16_t external_cache[4]; } _PyBinaryOpCache; #define INLINE_CACHE_ENTRIES_BINARY_OP CACHE_ENTRIES(_PyBinaryOpCache) @@ -438,7 +439,7 @@ write_u64(uint16_t *p, uint64_t val) } static inline void -write_obj(uint16_t *p, PyObject *val) +write_ptr(uint16_t *p, void *val) { memcpy(p, &val, sizeof(val)); } @@ -576,6 +577,16 @@ adaptive_counter_backoff(_Py_BackoffCounter counter) { return restart_backoff_counter(counter); } +/* Specialization Extensions */ + +/* callbacks for an external specialization */ +typedef int (*binaryopguardfunc)(PyObject *lhs, PyObject *rhs); +typedef PyObject *(*binaryopactionfunc)(PyObject *lhs, PyObject *rhs); + +typedef struct { + binaryopguardfunc guard; + binaryopactionfunc action; +} _PyBinaryOpSpecializationDescr; /* Comparison bit masks. */ diff --git a/Include/internal/pycore_debug_offsets.h b/Include/internal/pycore_debug_offsets.h index 184f4b9360b6d3..44feb079571a73 100644 --- a/Include/internal/pycore_debug_offsets.h +++ b/Include/internal/pycore_debug_offsets.h @@ -11,6 +11,42 @@ extern "C" { #define _Py_Debug_Cookie "xdebugpy" +#if defined(__APPLE__) +# include +#endif + +// Macros to burn global values in custom sections so out-of-process +// profilers can locate them easily. +#define GENERATE_DEBUG_SECTION(name, declaration) \ + _GENERATE_DEBUG_SECTION_WINDOWS(name) \ + _GENERATE_DEBUG_SECTION_APPLE(name) \ + declaration \ + _GENERATE_DEBUG_SECTION_LINUX(name) + +#if defined(MS_WINDOWS) +#define _GENERATE_DEBUG_SECTION_WINDOWS(name) \ + _Pragma(Py_STRINGIFY(section(Py_STRINGIFY(name), read, write))) \ + __declspec(allocate(Py_STRINGIFY(name))) +#else +#define _GENERATE_DEBUG_SECTION_WINDOWS(name) +#endif + +#if defined(__APPLE__) +#define _GENERATE_DEBUG_SECTION_APPLE(name) \ + __attribute__((section(SEG_DATA "," Py_STRINGIFY(name)))) \ + __attribute__((used)) +#else +#define _GENERATE_DEBUG_SECTION_APPLE(name) +#endif + +#if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) +#define _GENERATE_DEBUG_SECTION_LINUX(name) \ + __attribute__((section("." Py_STRINGIFY(name)))) \ + __attribute__((used)) +#else +#define _GENERATE_DEBUG_SECTION_LINUX(name) +#endif + #ifdef Py_GIL_DISABLED # define _Py_Debug_gilruntimestate_enabled offsetof(struct _gil_runtime_state, enabled) # define _Py_Debug_Free_Threaded 1 @@ -69,6 +105,7 @@ typedef struct _Py_DebugOffsets { uint64_t instr_ptr; uint64_t localsplus; uint64_t owner; + uint64_t stackpointer; } interpreter_frame; // Code object offset; @@ -113,6 +150,14 @@ typedef struct _Py_DebugOffsets { uint64_t ob_size; } list_object; + // PySet object offset; + struct _set_object { + uint64_t size; + uint64_t used; + uint64_t table; + uint64_t mask; + } set_object; + // PyDict object offset; struct _dict_object { uint64_t size; @@ -153,6 +198,14 @@ typedef struct _Py_DebugOffsets { uint64_t size; uint64_t collecting; } gc; + + // Generator object offset; + struct _gen_object { + uint64_t size; + uint64_t gi_name; + uint64_t gi_iframe; + uint64_t gi_frame_state; + } gen_object; } _Py_DebugOffsets; @@ -198,6 +251,7 @@ typedef struct _Py_DebugOffsets { .instr_ptr = offsetof(_PyInterpreterFrame, instr_ptr), \ .localsplus = offsetof(_PyInterpreterFrame, localsplus), \ .owner = offsetof(_PyInterpreterFrame, owner), \ + .stackpointer = offsetof(_PyInterpreterFrame, stackpointer), \ }, \ .code_object = { \ .size = sizeof(PyCodeObject), \ @@ -231,6 +285,12 @@ typedef struct _Py_DebugOffsets { .ob_item = offsetof(PyListObject, ob_item), \ .ob_size = offsetof(PyListObject, ob_base.ob_size), \ }, \ + .set_object = { \ + .size = sizeof(PySetObject), \ + .used = offsetof(PySetObject, used), \ + .table = offsetof(PySetObject, table), \ + .mask = offsetof(PySetObject, mask), \ + }, \ .dict_object = { \ .size = sizeof(PyDictObject), \ .ma_keys = offsetof(PyDictObject, ma_keys), \ @@ -260,6 +320,12 @@ typedef struct _Py_DebugOffsets { .size = sizeof(struct _gc_runtime_state), \ .collecting = offsetof(struct _gc_runtime_state, collecting), \ }, \ + .gen_object = { \ + .size = sizeof(PyGenObject), \ + .gi_name = offsetof(PyGenObject, gi_name), \ + .gi_iframe = offsetof(PyGenObject, gi_iframe), \ + .gi_frame_state = offsetof(PyGenObject, gi_frame_state), \ + }, \ } diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 74ac8f2148174c..f4c55ca6cf64d2 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -347,8 +347,7 @@ PyDictObject *_PyObject_MaterializeManagedDict_LockHeld(PyObject *); static inline Py_ssize_t _PyDict_UniqueId(PyDictObject *mp) { - // Offset by one so that _ma_watcher_tag=0 represents an unassigned id - return (Py_ssize_t)(mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT) - 1; + return (Py_ssize_t)(mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT); } static inline void diff --git a/Include/internal/pycore_frame.h b/Include/internal/pycore_frame.h index 96ae4dd22ecb43..14dc91e54298ce 100644 --- a/Include/internal/pycore_frame.h +++ b/Include/internal/pycore_frame.h @@ -56,7 +56,8 @@ enum _frameowner { FRAME_OWNED_BY_THREAD = 0, FRAME_OWNED_BY_GENERATOR = 1, FRAME_OWNED_BY_FRAME_OBJECT = 2, - FRAME_OWNED_BY_CSTACK = 3, + FRAME_OWNED_BY_INTERPRETER = 3, + FRAME_OWNED_BY_CSTACK = 4, }; typedef struct _PyInterpreterFrame { @@ -75,7 +76,12 @@ typedef struct _PyInterpreterFrame { _PyStackRef *stackpointer; uint16_t return_offset; /* Only relevant during a function call */ char owner; - char visited; +#ifdef Py_DEBUG + uint8_t visited:1; + uint8_t lltrace:7; +#else + uint8_t visited; +#endif /* Locals and stack */ _PyStackRef localsplus[1]; } _PyInterpreterFrame; @@ -264,7 +270,7 @@ _PyFrame_SetStackPointer(_PyInterpreterFrame *frame, _PyStackRef *stack_pointer) static inline bool _PyFrame_IsIncomplete(_PyInterpreterFrame *frame) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return true; } return frame->owner != FRAME_OWNED_BY_GENERATOR && diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 4ff34bf8ead7d0..b1806df2706097 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -45,12 +45,13 @@ static inline PyObject* _Py_FROM_GC(PyGC_Head *gc) { * the per-object lock. */ #ifdef Py_GIL_DISABLED -# define _PyGC_BITS_TRACKED (1) // Tracked by the GC -# define _PyGC_BITS_FINALIZED (2) // tp_finalize was called -# define _PyGC_BITS_UNREACHABLE (4) -# define _PyGC_BITS_FROZEN (8) -# define _PyGC_BITS_SHARED (16) -# define _PyGC_BITS_DEFERRED (64) // Use deferred reference counting +# define _PyGC_BITS_TRACKED (1<<0) // Tracked by the GC +# define _PyGC_BITS_FINALIZED (1<<1) // tp_finalize was called +# define _PyGC_BITS_UNREACHABLE (1<<2) +# define _PyGC_BITS_FROZEN (1<<3) +# define _PyGC_BITS_SHARED (1<<4) +# define _PyGC_BITS_ALIVE (1<<5) // Reachable from a known root. +# define _PyGC_BITS_DEFERRED (1<<6) // Use deferred reference counting #endif #ifdef Py_GIL_DISABLED @@ -330,6 +331,9 @@ struct _gc_runtime_state { collections, and are awaiting to undergo a full collection for the first time. */ Py_ssize_t long_lived_pending; + + /* True if gc.freeze() has been used. */ + int freeze_active; #endif }; diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index a3c14dceffd7a0..f745b09796753b 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -341,43 +341,6 @@ extern void _PyInterpreterState_SetWhence( extern const PyConfig* _PyInterpreterState_GetConfig(PyInterpreterState *interp); -// Get a copy of the current interpreter configuration. -// -// Return 0 on success. Raise an exception and return -1 on error. -// -// The caller must initialize 'config', using PyConfig_InitPythonConfig() -// for example. -// -// Python must be preinitialized to call this method. -// The caller must hold the GIL. -// -// Once done with the configuration, PyConfig_Clear() must be called to clear -// it. -// -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(int) _PyInterpreterState_GetConfigCopy( - struct PyConfig *config); - -// Set the configuration of the current interpreter. -// -// This function should be called during or just after the Python -// initialization. -// -// Update the sys module with the new configuration. If the sys module was -// modified directly after the Python initialization, these changes are lost. -// -// Some configuration like faulthandler or warnoptions can be updated in the -// configuration, but don't reconfigure Python (don't enable/disable -// faulthandler and don't reconfigure warnings filters). -// -// Return 0 on success. Raise an exception and return -1 on error. -// -// The configuration should come from _PyInterpreterState_GetConfigCopy(). -// -// Export for '_testinternalcapi' shared extension. -PyAPI_FUNC(int) _PyInterpreterState_SetConfig( - const struct PyConfig *config); - /* Runtime Feature Flags diff --git a/Include/internal/pycore_list.h b/Include/internal/pycore_list.h index 836ff30abfcedb..5d817891408481 100644 --- a/Include/internal/pycore_list.h +++ b/Include/internal/pycore_list.h @@ -61,7 +61,7 @@ typedef struct { union _PyStackRef; -PyAPI_FUNC(PyObject *)_PyList_FromStackRefSteal(const union _PyStackRef *src, Py_ssize_t n); +PyAPI_FUNC(PyObject *)_PyList_FromStackRefStealOnSuccess(const union _PyStackRef *src, Py_ssize_t n); PyAPI_FUNC(PyObject *)_PyList_AsTupleAndClear(PyListObject *v); #ifdef __cplusplus diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h index 8b3d6285c1e4e7..1dd155abf3babf 100644 --- a/Include/internal/pycore_magic_number.h +++ b/Include/internal/pycore_magic_number.h @@ -266,6 +266,7 @@ Known values: Python 3.14a4 3611 (Add NOT_TAKEN instruction) Python 3.14a4 3612 (Add POP_ITER and INSTRUMENTED_POP_ITER) Python 3.14a4 3613 (Add LOAD_CONST_MORTAL instruction) + Python 3.14a5 3614 (Add BINARY_OP_EXTEND) Python 3.15 will start with 3650 @@ -278,7 +279,7 @@ PC/launcher.c must also be updated. */ -#define PYC_MAGIC_NUMBER 3613 +#define PYC_MAGIC_NUMBER 3614 /* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes (little-endian) and then appending b'\r\n'. */ #define PYC_MAGIC_NUMBER_TOKEN \ diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index e26cb7673f939c..0b1df7e68b8dfa 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -62,7 +62,7 @@ extern void _Py_ForgetReference(PyObject *); PyAPI_FUNC(int) _PyObject_IsFreed(PyObject *); /* We need to maintain an internal copy of Py{Var}Object_HEAD_INIT to avoid - designated initializer conflicts in C++20. If we use the deinition in + designated initializer conflicts in C++20. If we use the definition in object.h, we will be mixing designated and non-designated initializers in pycore objects which is forbiddent in C++20. However, if we then use designated initializers in object.h then Extensions without designated break. @@ -299,12 +299,6 @@ Py_ssize_t _Py_ExplicitMergeRefcount(PyObject *op, Py_ssize_t extra); extern int _PyType_CheckConsistency(PyTypeObject *type); extern int _PyDict_CheckConsistency(PyObject *mp, int check_content); -/* Update the Python traceback of an object. This function must be called - when a memory block is reused from a free list. - - Internal function called by _Py_NewReference(). */ -extern int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void*); - // Fast inlined version of PyType_HasFeature() static inline int _PyType_HasFeature(PyTypeObject *type, unsigned long feature) { @@ -342,20 +336,20 @@ _Py_THREAD_INCREF_OBJECT(PyObject *obj, Py_ssize_t unique_id) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - // Unsigned comparison so that `unique_id=-1`, which indicates that - // per-thread refcounting has been disabled on this object, is handled by - // the "else". - if ((size_t)unique_id < (size_t)tstate->refcounts.size) { + // The table index is `unique_id - 1` because 0 is not a valid unique id. + // Unsigned comparison so that `idx=-1` is handled by the "else". + size_t idx = (size_t)(unique_id - 1); + if (idx < (size_t)tstate->refcounts.size) { # ifdef Py_REF_DEBUG _Py_INCREF_IncRefTotal(); # endif _Py_INCREF_STAT_INC(); - tstate->refcounts.values[unique_id]++; + tstate->refcounts.values[idx]++; } else { // The slow path resizes the per-thread refcount array if necessary. - // It handles the unique_id=-1 case to keep the inlinable function smaller. - _PyObject_ThreadIncrefSlow(obj, unique_id); + // It handles the unique_id=0 case to keep the inlinable function smaller. + _PyObject_ThreadIncrefSlow(obj, idx); } } @@ -392,15 +386,15 @@ _Py_THREAD_DECREF_OBJECT(PyObject *obj, Py_ssize_t unique_id) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - // Unsigned comparison so that `unique_id=-1`, which indicates that - // per-thread refcounting has been disabled on this object, is handled by - // the "else". - if ((size_t)unique_id < (size_t)tstate->refcounts.size) { + // The table index is `unique_id - 1` because 0 is not a valid unique id. + // Unsigned comparison so that `idx=-1` is handled by the "else". + size_t idx = (size_t)(unique_id - 1); + if (idx < (size_t)tstate->refcounts.size) { # ifdef Py_REF_DEBUG _Py_DECREF_DecRefTotal(); # endif _Py_DECREF_STAT_INC(); - tstate->refcounts.values[unique_id]--; + tstate->refcounts.values[idx]--; } else { // Directly decref the object if the id is not assigned or if diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 0c0a6145bdbb27..10061c988d5f4b 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -43,6 +43,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) { return 2; case BINARY_OP_ADD_UNICODE: return 2; + case BINARY_OP_EXTEND: + return 2; case BINARY_OP_INPLACE_ADD_UNICODE: return 2; case BINARY_OP_MULTIPLY_FLOAT: @@ -512,6 +514,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) { return 1; case BINARY_OP_ADD_UNICODE: return 1; + case BINARY_OP_EXTEND: + return 1; case BINARY_OP_INPLACE_ADD_UNICODE: return 0; case BINARY_OP_MULTIPLY_FLOAT: @@ -989,6 +993,10 @@ int _PyOpcode_max_stack_effect(int opcode, int oparg, int *effect) { *effect = 0; return 0; } + case BINARY_OP_EXTEND: { + *effect = 0; + return 0; + } case BINARY_OP_INPLACE_ADD_UNICODE: { *effect = 0; return 0; @@ -1919,11 +1927,13 @@ enum InstructionFormat { INSTR_FMT_IBC = 2, INSTR_FMT_IBC00 = 3, INSTR_FMT_IBC000 = 4, - INSTR_FMT_IBC00000000 = 5, - INSTR_FMT_IX = 6, - INSTR_FMT_IXC = 7, - INSTR_FMT_IXC00 = 8, - INSTR_FMT_IXC000 = 9, + INSTR_FMT_IBC0000 = 5, + INSTR_FMT_IBC00000000 = 6, + INSTR_FMT_IX = 7, + INSTR_FMT_IXC = 8, + INSTR_FMT_IXC00 = 9, + INSTR_FMT_IXC000 = 10, + INSTR_FMT_IXC0000 = 11, }; #define IS_VALID_OPCODE(OP) \ @@ -1981,15 +1991,16 @@ struct opcode_metadata { extern const struct opcode_metadata _PyOpcode_opcode_metadata[266]; #ifdef NEED_OPCODE_METADATA const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { - [BINARY_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, - [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP] = { true, INSTR_FMT_IBC0000, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_EXTEND] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, + [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, @@ -1997,12 +2008,12 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = { [BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, - [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [BUILD_MAP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_SET] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_SLICE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, [BUILD_STRING] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, - [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG }, + [BUILD_TUPLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG }, [CACHE] = { true, INSTR_FMT_IX, 0 }, [CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [CALL_ALLOC_AND_ENTER_INIT] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, @@ -2227,6 +2238,7 @@ _PyOpcode_macro_expansion[256] = { [BINARY_OP_ADD_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_ADD_FLOAT, 0, 0 } } }, [BINARY_OP_ADD_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_ADD_INT, 0, 0 } } }, [BINARY_OP_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_ADD_UNICODE, 0, 0 } } }, + [BINARY_OP_EXTEND] = { .nuops = 2, .uops = { { _GUARD_BINARY_OP_EXTEND, 4, 1 }, { _BINARY_OP_EXTEND, 4, 1 } } }, [BINARY_OP_INPLACE_ADD_UNICODE] = { .nuops = 2, .uops = { { _GUARD_BOTH_UNICODE, 0, 0 }, { _BINARY_OP_INPLACE_ADD_UNICODE, 0, 0 } } }, [BINARY_OP_MULTIPLY_FLOAT] = { .nuops = 2, .uops = { { _GUARD_BOTH_FLOAT, 0, 0 }, { _BINARY_OP_MULTIPLY_FLOAT, 0, 0 } } }, [BINARY_OP_MULTIPLY_INT] = { .nuops = 2, .uops = { { _GUARD_BOTH_INT, 0, 0 }, { _BINARY_OP_MULTIPLY_INT, 0, 0 } } }, @@ -2411,6 +2423,7 @@ const char *_PyOpcode_OpName[266] = { [BINARY_OP_ADD_FLOAT] = "BINARY_OP_ADD_FLOAT", [BINARY_OP_ADD_INT] = "BINARY_OP_ADD_INT", [BINARY_OP_ADD_UNICODE] = "BINARY_OP_ADD_UNICODE", + [BINARY_OP_EXTEND] = "BINARY_OP_EXTEND", [BINARY_OP_INPLACE_ADD_UNICODE] = "BINARY_OP_INPLACE_ADD_UNICODE", [BINARY_OP_MULTIPLY_FLOAT] = "BINARY_OP_MULTIPLY_FLOAT", [BINARY_OP_MULTIPLY_INT] = "BINARY_OP_MULTIPLY_INT", @@ -2661,7 +2674,7 @@ const uint8_t _PyOpcode_Caches[256] = { [FOR_ITER] = 1, [CALL] = 3, [CALL_KW] = 3, - [BINARY_OP] = 1, + [BINARY_OP] = 5, }; #endif @@ -2672,6 +2685,7 @@ const uint8_t _PyOpcode_Deopt[256] = { [BINARY_OP_ADD_FLOAT] = BINARY_OP, [BINARY_OP_ADD_INT] = BINARY_OP, [BINARY_OP_ADD_UNICODE] = BINARY_OP, + [BINARY_OP_EXTEND] = BINARY_OP, [BINARY_OP_INPLACE_ADD_UNICODE] = BINARY_OP, [BINARY_OP_MULTIPLY_FLOAT] = BINARY_OP, [BINARY_OP_MULTIPLY_INT] = BINARY_OP, @@ -2923,7 +2937,6 @@ const uint8_t _PyOpcode_Deopt[256] = { case 146: \ case 147: \ case 148: \ - case 229: \ case 230: \ case 231: \ case 232: \ diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index bc7cfcde613d65..03ce4d4491acd7 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -98,11 +98,6 @@ struct _PyOptimizerObject { }; /** Test support **/ -typedef struct { - _PyOptimizerObject base; - int64_t count; -} _PyCounterOptimizerObject; - _PyOptimizerObject *_Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject* optimizer); @@ -119,7 +114,6 @@ PyAPI_FUNC(void) _Py_Executor_DependsOn(_PyExecutorObject *executor, void *obj); // Export for '_testinternalcapi' shared extension. PyAPI_FUNC(_PyOptimizerObject *) _Py_GetOptimizer(void); PyAPI_FUNC(int) _Py_SetTier2Optimizer(_PyOptimizerObject* optimizer); -PyAPI_FUNC(PyObject *) _PyOptimizer_NewCounter(void); PyAPI_FUNC(PyObject *) _PyOptimizer_NewUOpOptimizer(void); #define _Py_MAX_ALLOWED_BUILTINS_MODIFICATIONS 3 @@ -150,21 +144,10 @@ int _Py_uop_analyze_and_optimize(struct _PyInterpreterFrame *frame, _PyUOpInstruction *trace, int trace_len, int curr_stackentries, _PyBloomFilter *dependencies); -extern PyTypeObject _PyCounterExecutor_Type; -extern PyTypeObject _PyCounterOptimizer_Type; extern PyTypeObject _PyDefaultOptimizer_Type; extern PyTypeObject _PyUOpExecutor_Type; extern PyTypeObject _PyUOpOptimizer_Type; -/* Symbols */ -/* See explanation in optimizer_symbols.c */ - -struct _Py_UopsSymbol { - int flags; // 0 bits: Top; 2 or more bits: Bottom - PyTypeObject *typ; // Borrowed reference - PyObject *const_val; // Owned reference (!) - unsigned int type_version; // currently stores type version -}; #define UOP_FORMAT_TARGET 0 #define UOP_FORMAT_JUMP 1 @@ -201,16 +184,63 @@ static inline uint16_t uop_get_error_target(const _PyUOpInstruction *inst) // handle before rejoining the rest of the program. #define MAX_CHAIN_DEPTH 4 -typedef struct _Py_UopsSymbol _Py_UopsSymbol; +/* Symbols */ +/* See explanation in optimizer_symbols.c */ + + +typedef enum _JitSymType { + JIT_SYM_UNKNOWN_TAG = 1, + JIT_SYM_NULL_TAG = 2, + JIT_SYM_NON_NULL_TAG = 3, + JIT_SYM_BOTTOM_TAG = 4, + JIT_SYM_TYPE_VERSION_TAG = 5, + JIT_SYM_KNOWN_CLASS_TAG = 6, + JIT_SYM_KNOWN_VALUE_TAG = 7, + JIT_SYM_TUPLE_TAG = 8, +} JitSymType; + +typedef struct _jit_opt_known_class { + uint8_t tag; + uint32_t version; + PyTypeObject *type; +} JitOptKnownClass; + +typedef struct _jit_opt_known_version { + uint8_t tag; + uint32_t version; +} JitOptKnownVersion; + +typedef struct _jit_opt_known_value { + uint8_t tag; + PyObject *value; +} JitOptKnownValue; + +#define MAX_SYMBOLIC_TUPLE_SIZE 7 + +typedef struct _jit_opt_tuple { + uint8_t tag; + uint8_t length; + uint16_t items[MAX_SYMBOLIC_TUPLE_SIZE]; +} JitOptTuple; + +typedef union _jit_opt_symbol { + uint8_t tag; + JitOptKnownClass cls; + JitOptKnownValue value; + JitOptKnownVersion version; + JitOptTuple tuple; +} JitOptSymbol; + + struct _Py_UOpsAbstractFrame { // Max stacklen int stack_len; int locals_len; - _Py_UopsSymbol **stack_pointer; - _Py_UopsSymbol **stack; - _Py_UopsSymbol **locals; + JitOptSymbol **stack_pointer; + JitOptSymbol **stack; + JitOptSymbol **locals; }; typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; @@ -218,10 +248,10 @@ typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; typedef struct ty_arena { int ty_curr_number; int ty_max_number; - _Py_UopsSymbol arena[TY_ARENA_SIZE]; + JitOptSymbol arena[TY_ARENA_SIZE]; } ty_arena; -struct _Py_UOpsContext { +typedef struct _JitOptContext { char done; char out_of_space; bool contradiction; @@ -233,46 +263,47 @@ struct _Py_UOpsContext { // Arena for the symbolic types. ty_arena t_arena; - _Py_UopsSymbol **n_consumed; - _Py_UopsSymbol **limit; - _Py_UopsSymbol *locals_and_stack[MAX_ABSTRACT_INTERP_SIZE]; -}; - -typedef struct _Py_UOpsContext _Py_UOpsContext; - -extern bool _Py_uop_sym_is_null(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_is_not_null(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_is_const(_Py_UopsSymbol *sym); -extern PyObject *_Py_uop_sym_get_const(_Py_UopsSymbol *sym); -extern _Py_UopsSymbol *_Py_uop_sym_new_unknown(_Py_UOpsContext *ctx); -extern _Py_UopsSymbol *_Py_uop_sym_new_not_null(_Py_UOpsContext *ctx); -extern _Py_UopsSymbol *_Py_uop_sym_new_type( - _Py_UOpsContext *ctx, PyTypeObject *typ); -extern _Py_UopsSymbol *_Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val); -extern _Py_UopsSymbol *_Py_uop_sym_new_null(_Py_UOpsContext *ctx); -extern bool _Py_uop_sym_has_type(_Py_UopsSymbol *sym); -extern bool _Py_uop_sym_matches_type(_Py_UopsSymbol *sym, PyTypeObject *typ); -extern bool _Py_uop_sym_matches_type_version(_Py_UopsSymbol *sym, unsigned int version); -extern void _Py_uop_sym_set_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym); -extern void _Py_uop_sym_set_non_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym); -extern void _Py_uop_sym_set_type(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyTypeObject *typ); -extern bool _Py_uop_sym_set_type_version(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, unsigned int version); -extern void _Py_uop_sym_set_const(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyObject *const_val); -extern bool _Py_uop_sym_is_bottom(_Py_UopsSymbol *sym); -extern int _Py_uop_sym_truthiness(_Py_UopsSymbol *sym); -extern PyTypeObject *_Py_uop_sym_get_type(_Py_UopsSymbol *sym); - - -extern void _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx); -extern void _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx); + JitOptSymbol **n_consumed; + JitOptSymbol **limit; + JitOptSymbol *locals_and_stack[MAX_ABSTRACT_INTERP_SIZE]; +} JitOptContext; + +extern bool _Py_uop_sym_is_null(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_not_null(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_const(JitOptSymbol *sym); +extern PyObject *_Py_uop_sym_get_const(JitOptSymbol *sym); +extern JitOptSymbol *_Py_uop_sym_new_unknown(JitOptContext *ctx); +extern JitOptSymbol *_Py_uop_sym_new_not_null(JitOptContext *ctx); +extern JitOptSymbol *_Py_uop_sym_new_type( + JitOptContext *ctx, PyTypeObject *typ); +extern JitOptSymbol *_Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val); +extern JitOptSymbol *_Py_uop_sym_new_null(JitOptContext *ctx); +extern bool _Py_uop_sym_has_type(JitOptSymbol *sym); +extern bool _Py_uop_sym_matches_type(JitOptSymbol *sym, PyTypeObject *typ); +extern bool _Py_uop_sym_matches_type_version(JitOptSymbol *sym, unsigned int version); +extern void _Py_uop_sym_set_null(JitOptContext *ctx, JitOptSymbol *sym); +extern void _Py_uop_sym_set_non_null(JitOptContext *ctx, JitOptSymbol *sym); +extern void _Py_uop_sym_set_type(JitOptContext *ctx, JitOptSymbol *sym, PyTypeObject *typ); +extern bool _Py_uop_sym_set_type_version(JitOptContext *ctx, JitOptSymbol *sym, unsigned int version); +extern void _Py_uop_sym_set_const(JitOptContext *ctx, JitOptSymbol *sym, PyObject *const_val); +extern bool _Py_uop_sym_is_bottom(JitOptSymbol *sym); +extern int _Py_uop_sym_truthiness(JitOptSymbol *sym); +extern PyTypeObject *_Py_uop_sym_get_type(JitOptSymbol *sym); +extern bool _Py_uop_sym_is_immortal(JitOptSymbol *sym); +extern JitOptSymbol *_Py_uop_sym_new_tuple(JitOptContext *ctx, int size, JitOptSymbol **args); +extern JitOptSymbol *_Py_uop_sym_tuple_getitem(JitOptContext *ctx, JitOptSymbol *sym, int item); +extern int _Py_uop_sym_tuple_length(JitOptSymbol *sym); + +extern void _Py_uop_abstractcontext_init(JitOptContext *ctx); +extern void _Py_uop_abstractcontext_fini(JitOptContext *ctx); extern _Py_UOpsAbstractFrame *_Py_uop_frame_new( - _Py_UOpsContext *ctx, + JitOptContext *ctx, PyCodeObject *co, int curr_stackentries, - _Py_UopsSymbol **args, + JitOptSymbol **args, int arg_len); -extern int _Py_uop_frame_pop(_Py_UOpsContext *ctx); +extern int _Py_uop_frame_pop(JitOptContext *ctx); PyAPI_FUNC(PyObject *) _Py_uop_symbols_test(PyObject *self, PyObject *ignored); diff --git a/Include/internal/pycore_pyerrors.h b/Include/internal/pycore_pyerrors.h index 8dea2d34117430..fa7d9ee36d095d 100644 --- a/Include/internal/pycore_pyerrors.h +++ b/Include/internal/pycore_pyerrors.h @@ -196,9 +196,9 @@ extern int _PyUnicodeError_GetParams( Py_ssize_t *objlen, Py_ssize_t *start, Py_ssize_t *end, + Py_ssize_t *slen, int as_bytes); - #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 1e73e541ef8de0..ff3b222b157810 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -300,6 +300,19 @@ PyAPI_FUNC(const PyConfig*) _Py_GetConfig(void); // See also PyInterpreterState_Get() and _PyInterpreterState_GET(). extern PyInterpreterState* _PyGILState_GetInterpreterStateUnsafe(void); +#ifndef NDEBUG +/* Modern equivalent of assert(PyGILState_Check()) */ +static inline void +_Py_AssertHoldsTstateFunc(const char *func) +{ + PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureFuncTstateNotNULL(func, tstate); +} +#define _Py_AssertHoldsTstate() _Py_AssertHoldsTstateFunc(__func__) +#else +#define _Py_AssertHoldsTstate() +#endif + #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_tracemalloc.h b/Include/internal/pycore_tracemalloc.h index 7ddc5bac5d10af..572e8025876319 100644 --- a/Include/internal/pycore_tracemalloc.h +++ b/Include/internal/pycore_tracemalloc.h @@ -11,10 +11,6 @@ extern "C" { #include "pycore_hashtable.h" // _Py_hashtable_t -/* Trace memory blocks allocated by PyMem_RawMalloc() */ -#define TRACE_RAW_MALLOC - - struct _PyTraceMalloc_Config { /* Module initialized? Variable protected by the GIL */ @@ -25,7 +21,7 @@ struct _PyTraceMalloc_Config { } initialized; /* Is tracemalloc tracing memory allocations? - Variable protected by the GIL */ + Variable protected by the TABLES_LOCK(). */ int tracing; /* limit of the number of frames in a traceback, 1 by default. @@ -74,9 +70,7 @@ struct _tracemalloc_runtime_state { PyMemAllocatorEx obj; } allocators; -#if defined(TRACE_RAW_MALLOC) - PyThread_type_lock tables_lock; -#endif + PyMutex tables_lock; /* Size in bytes of currently traced memory. Protected by TABLES_LOCK(). */ size_t traced_memory; @@ -85,14 +79,14 @@ struct _tracemalloc_runtime_state { size_t peak_traced_memory; /* Hash table used as a set to intern filenames: PyObject* => PyObject*. - Protected by the GIL */ + Protected by the TABLES_LOCK(). */ _Py_hashtable_t *filenames; /* Buffer to store a new traceback in traceback_new(). - Protected by the GIL. */ + Protected by the TABLES_LOCK(). */ struct tracemalloc_traceback *traceback; /* Hash table used as a set to intern tracebacks: traceback_t* => traceback_t* - Protected by the GIL */ + Protected by the TABLES_LOCK(). */ _Py_hashtable_t *tracebacks; /* pointer (void*) => trace (trace_t*). Protected by TABLES_LOCK(). */ @@ -144,7 +138,7 @@ extern PyObject* _PyTraceMalloc_GetTraces(void); extern PyObject* _PyTraceMalloc_GetObjectTraceback(PyObject *obj); /* Initialize tracemalloc */ -extern int _PyTraceMalloc_Init(void); +extern PyStatus _PyTraceMalloc_Init(void); /* Start tracemalloc */ extern int _PyTraceMalloc_Start(int max_nframe); diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h index b8bea72baeaaf5..74e1452763e56c 100644 --- a/Include/internal/pycore_tstate.h +++ b/Include/internal/pycore_tstate.h @@ -22,6 +22,7 @@ typedef struct _PyThreadStateImpl { PyThreadState base; PyObject *asyncio_running_loop; // Strong reference + PyObject *asyncio_running_task; // Strong reference struct _qsbr_thread_state *qsbr; // only used by free-threaded build struct llist_node mem_free_queue; // delayed free queue diff --git a/Include/internal/pycore_tuple.h b/Include/internal/pycore_tuple.h index 82b875241f4116..dc68d6648b9ec8 100644 --- a/Include/internal/pycore_tuple.h +++ b/Include/internal/pycore_tuple.h @@ -21,7 +21,7 @@ extern PyStatus _PyTuple_InitGlobalObjects(PyInterpreterState *); #define _PyTuple_ITEMS(op) _Py_RVALUE(_PyTuple_CAST(op)->ob_item) PyAPI_FUNC(PyObject *)_PyTuple_FromArray(PyObject *const *, Py_ssize_t); -PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefSteal(const union _PyStackRef *, Py_ssize_t); +PyAPI_FUNC(PyObject *)_PyTuple_FromStackRefStealOnSuccess(const union _PyStackRef *, Py_ssize_t); PyAPI_FUNC(PyObject *)_PyTuple_FromArraySteal(PyObject *const *, Py_ssize_t); typedef struct { diff --git a/Include/internal/pycore_uniqueid.h b/Include/internal/pycore_uniqueid.h index d3db49ddb78103..9d3c866a704894 100644 --- a/Include/internal/pycore_uniqueid.h +++ b/Include/internal/pycore_uniqueid.h @@ -16,7 +16,7 @@ extern "C" { // Per-thread reference counting is used along with deferred reference // counting to avoid scaling bottlenecks due to reference count contention. // -// An id of -1 is used to indicate that an object doesn't use per-thread +// An id of 0 is used to indicate that an object doesn't use per-thread // refcounting. This value is used when the object is finalized by the GC // and during interpreter shutdown to allow the object to be // deallocated promptly when the object's refcount reaches zero. @@ -45,6 +45,8 @@ struct _Py_unique_id_pool { Py_ssize_t size; }; +#define _Py_INVALID_UNIQUE_ID 0 + // Assigns the next id from the pool of ids. extern Py_ssize_t _PyObject_AssignUniqueId(PyObject *obj); @@ -65,7 +67,7 @@ extern void _PyObject_FinalizePerThreadRefcounts(_PyThreadStateImpl *tstate); extern void _PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp); // Increfs the object, resizing the thread-local refcount array if necessary. -PyAPI_FUNC(void) _PyObject_ThreadIncrefSlow(PyObject *obj, Py_ssize_t unique_id); +PyAPI_FUNC(void) _PyObject_ThreadIncrefSlow(PyObject *obj, size_t idx); #endif /* Py_GIL_DISABLED */ diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h index 3841363b411eed..066165a2c810d5 100644 --- a/Include/internal/pycore_uop_ids.h +++ b/Include/internal/pycore_uop_ids.h @@ -15,16 +15,17 @@ extern "C" { #define _BINARY_OP_ADD_FLOAT 303 #define _BINARY_OP_ADD_INT 304 #define _BINARY_OP_ADD_UNICODE 305 -#define _BINARY_OP_INPLACE_ADD_UNICODE 306 -#define _BINARY_OP_MULTIPLY_FLOAT 307 -#define _BINARY_OP_MULTIPLY_INT 308 -#define _BINARY_OP_SUBTRACT_FLOAT 309 -#define _BINARY_OP_SUBTRACT_INT 310 -#define _BINARY_SLICE 311 -#define _BINARY_SUBSCR 312 -#define _BINARY_SUBSCR_CHECK_FUNC 313 +#define _BINARY_OP_EXTEND 306 +#define _BINARY_OP_INPLACE_ADD_UNICODE 307 +#define _BINARY_OP_MULTIPLY_FLOAT 308 +#define _BINARY_OP_MULTIPLY_INT 309 +#define _BINARY_OP_SUBTRACT_FLOAT 310 +#define _BINARY_OP_SUBTRACT_INT 311 +#define _BINARY_SLICE 312 +#define _BINARY_SUBSCR 313 +#define _BINARY_SUBSCR_CHECK_FUNC 314 #define _BINARY_SUBSCR_DICT BINARY_SUBSCR_DICT -#define _BINARY_SUBSCR_INIT_CALL 314 +#define _BINARY_SUBSCR_INIT_CALL 315 #define _BINARY_SUBSCR_LIST_INT BINARY_SUBSCR_LIST_INT #define _BINARY_SUBSCR_STR_INT BINARY_SUBSCR_STR_INT #define _BINARY_SUBSCR_TUPLE_INT BINARY_SUBSCR_TUPLE_INT @@ -34,122 +35,123 @@ extern "C" { #define _BUILD_SLICE BUILD_SLICE #define _BUILD_STRING BUILD_STRING #define _BUILD_TUPLE BUILD_TUPLE -#define _CALL_BUILTIN_CLASS 315 -#define _CALL_BUILTIN_FAST 316 -#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 317 -#define _CALL_BUILTIN_O 318 +#define _CALL_BUILTIN_CLASS 316 +#define _CALL_BUILTIN_FAST 317 +#define _CALL_BUILTIN_FAST_WITH_KEYWORDS 318 +#define _CALL_BUILTIN_O 319 #define _CALL_INTRINSIC_1 CALL_INTRINSIC_1 #define _CALL_INTRINSIC_2 CALL_INTRINSIC_2 #define _CALL_ISINSTANCE CALL_ISINSTANCE -#define _CALL_KW_NON_PY 319 +#define _CALL_KW_NON_PY 320 #define _CALL_LEN CALL_LEN #define _CALL_LIST_APPEND CALL_LIST_APPEND -#define _CALL_METHOD_DESCRIPTOR_FAST 320 -#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 321 -#define _CALL_METHOD_DESCRIPTOR_NOARGS 322 -#define _CALL_METHOD_DESCRIPTOR_O 323 -#define _CALL_NON_PY_GENERAL 324 -#define _CALL_STR_1 325 -#define _CALL_TUPLE_1 326 +#define _CALL_METHOD_DESCRIPTOR_FAST 321 +#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 322 +#define _CALL_METHOD_DESCRIPTOR_NOARGS 323 +#define _CALL_METHOD_DESCRIPTOR_O 324 +#define _CALL_NON_PY_GENERAL 325 +#define _CALL_STR_1 326 +#define _CALL_TUPLE_1 327 #define _CALL_TYPE_1 CALL_TYPE_1 -#define _CHECK_AND_ALLOCATE_OBJECT 327 -#define _CHECK_ATTR_CLASS 328 -#define _CHECK_ATTR_METHOD_LAZY_DICT 329 -#define _CHECK_ATTR_MODULE_PUSH_KEYS 330 -#define _CHECK_ATTR_WITH_HINT 331 -#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 332 +#define _CHECK_AND_ALLOCATE_OBJECT 328 +#define _CHECK_ATTR_CLASS 329 +#define _CHECK_ATTR_METHOD_LAZY_DICT 330 +#define _CHECK_ATTR_MODULE_PUSH_KEYS 331 +#define _CHECK_ATTR_WITH_HINT 332 +#define _CHECK_CALL_BOUND_METHOD_EXACT_ARGS 333 #define _CHECK_EG_MATCH CHECK_EG_MATCH #define _CHECK_EXC_MATCH CHECK_EXC_MATCH -#define _CHECK_FUNCTION 333 -#define _CHECK_FUNCTION_EXACT_ARGS 334 -#define _CHECK_FUNCTION_VERSION 335 -#define _CHECK_FUNCTION_VERSION_INLINE 336 -#define _CHECK_FUNCTION_VERSION_KW 337 -#define _CHECK_IS_NOT_PY_CALLABLE 338 -#define _CHECK_IS_NOT_PY_CALLABLE_KW 339 -#define _CHECK_MANAGED_OBJECT_HAS_VALUES 340 -#define _CHECK_METHOD_VERSION 341 -#define _CHECK_METHOD_VERSION_KW 342 -#define _CHECK_PEP_523 343 -#define _CHECK_PERIODIC 344 -#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 345 -#define _CHECK_STACK_SPACE 346 -#define _CHECK_STACK_SPACE_OPERAND 347 -#define _CHECK_VALIDITY 348 -#define _CHECK_VALIDITY_AND_SET_IP 349 -#define _COMPARE_OP 350 -#define _COMPARE_OP_FLOAT 351 -#define _COMPARE_OP_INT 352 -#define _COMPARE_OP_STR 353 -#define _CONTAINS_OP 354 +#define _CHECK_FUNCTION 334 +#define _CHECK_FUNCTION_EXACT_ARGS 335 +#define _CHECK_FUNCTION_VERSION 336 +#define _CHECK_FUNCTION_VERSION_INLINE 337 +#define _CHECK_FUNCTION_VERSION_KW 338 +#define _CHECK_IS_NOT_PY_CALLABLE 339 +#define _CHECK_IS_NOT_PY_CALLABLE_KW 340 +#define _CHECK_MANAGED_OBJECT_HAS_VALUES 341 +#define _CHECK_METHOD_VERSION 342 +#define _CHECK_METHOD_VERSION_KW 343 +#define _CHECK_PEP_523 344 +#define _CHECK_PERIODIC 345 +#define _CHECK_PERIODIC_IF_NOT_YIELD_FROM 346 +#define _CHECK_STACK_SPACE 347 +#define _CHECK_STACK_SPACE_OPERAND 348 +#define _CHECK_VALIDITY 349 +#define _CHECK_VALIDITY_AND_SET_IP 350 +#define _COMPARE_OP 351 +#define _COMPARE_OP_FLOAT 352 +#define _COMPARE_OP_INT 353 +#define _COMPARE_OP_STR 354 +#define _CONTAINS_OP 355 #define _CONTAINS_OP_DICT CONTAINS_OP_DICT #define _CONTAINS_OP_SET CONTAINS_OP_SET #define _CONVERT_VALUE CONVERT_VALUE #define _COPY COPY #define _COPY_FREE_VARS COPY_FREE_VARS -#define _CREATE_INIT_FRAME 355 +#define _CREATE_INIT_FRAME 356 #define _DELETE_ATTR DELETE_ATTR #define _DELETE_DEREF DELETE_DEREF #define _DELETE_FAST DELETE_FAST #define _DELETE_GLOBAL DELETE_GLOBAL #define _DELETE_NAME DELETE_NAME #define _DELETE_SUBSCR DELETE_SUBSCR -#define _DEOPT 356 +#define _DEOPT 357 #define _DICT_MERGE DICT_MERGE #define _DICT_UPDATE DICT_UPDATE -#define _DO_CALL 357 -#define _DO_CALL_FUNCTION_EX 358 -#define _DO_CALL_KW 359 -#define _DYNAMIC_EXIT 360 +#define _DO_CALL 358 +#define _DO_CALL_FUNCTION_EX 359 +#define _DO_CALL_KW 360 +#define _DYNAMIC_EXIT 361 #define _END_FOR END_FOR #define _END_SEND END_SEND -#define _ERROR_POP_N 361 +#define _ERROR_POP_N 362 #define _EXIT_INIT_CHECK EXIT_INIT_CHECK -#define _EXPAND_METHOD 362 -#define _EXPAND_METHOD_KW 363 -#define _FATAL_ERROR 364 +#define _EXPAND_METHOD 363 +#define _EXPAND_METHOD_KW 364 +#define _FATAL_ERROR 365 #define _FORMAT_SIMPLE FORMAT_SIMPLE #define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC -#define _FOR_ITER 365 -#define _FOR_ITER_GEN_FRAME 366 -#define _FOR_ITER_TIER_TWO 367 +#define _FOR_ITER 366 +#define _FOR_ITER_GEN_FRAME 367 +#define _FOR_ITER_TIER_TWO 368 #define _GET_AITER GET_AITER #define _GET_ANEXT GET_ANEXT #define _GET_AWAITABLE GET_AWAITABLE #define _GET_ITER GET_ITER #define _GET_LEN GET_LEN #define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER -#define _GUARD_BOTH_FLOAT 368 -#define _GUARD_BOTH_INT 369 -#define _GUARD_BOTH_UNICODE 370 -#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 371 -#define _GUARD_DORV_NO_DICT 372 -#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 373 -#define _GUARD_GLOBALS_VERSION 374 -#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 375 -#define _GUARD_IS_FALSE_POP 376 -#define _GUARD_IS_NONE_POP 377 -#define _GUARD_IS_NOT_NONE_POP 378 -#define _GUARD_IS_TRUE_POP 379 -#define _GUARD_KEYS_VERSION 380 -#define _GUARD_NOS_FLOAT 381 -#define _GUARD_NOS_INT 382 -#define _GUARD_NOT_EXHAUSTED_LIST 383 -#define _GUARD_NOT_EXHAUSTED_RANGE 384 -#define _GUARD_NOT_EXHAUSTED_TUPLE 385 -#define _GUARD_TOS_FLOAT 386 -#define _GUARD_TOS_INT 387 -#define _GUARD_TYPE_VERSION 388 -#define _GUARD_TYPE_VERSION_AND_LOCK 389 +#define _GUARD_BINARY_OP_EXTEND 369 +#define _GUARD_BOTH_FLOAT 370 +#define _GUARD_BOTH_INT 371 +#define _GUARD_BOTH_UNICODE 372 +#define _GUARD_BUILTINS_VERSION_PUSH_KEYS 373 +#define _GUARD_DORV_NO_DICT 374 +#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 375 +#define _GUARD_GLOBALS_VERSION 376 +#define _GUARD_GLOBALS_VERSION_PUSH_KEYS 377 +#define _GUARD_IS_FALSE_POP 378 +#define _GUARD_IS_NONE_POP 379 +#define _GUARD_IS_NOT_NONE_POP 380 +#define _GUARD_IS_TRUE_POP 381 +#define _GUARD_KEYS_VERSION 382 +#define _GUARD_NOS_FLOAT 383 +#define _GUARD_NOS_INT 384 +#define _GUARD_NOT_EXHAUSTED_LIST 385 +#define _GUARD_NOT_EXHAUSTED_RANGE 386 +#define _GUARD_NOT_EXHAUSTED_TUPLE 387 +#define _GUARD_TOS_FLOAT 388 +#define _GUARD_TOS_INT 389 +#define _GUARD_TYPE_VERSION 390 +#define _GUARD_TYPE_VERSION_AND_LOCK 391 #define _IMPORT_FROM IMPORT_FROM #define _IMPORT_NAME IMPORT_NAME -#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 390 -#define _INIT_CALL_PY_EXACT_ARGS 391 -#define _INIT_CALL_PY_EXACT_ARGS_0 392 -#define _INIT_CALL_PY_EXACT_ARGS_1 393 -#define _INIT_CALL_PY_EXACT_ARGS_2 394 -#define _INIT_CALL_PY_EXACT_ARGS_3 395 -#define _INIT_CALL_PY_EXACT_ARGS_4 396 +#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 392 +#define _INIT_CALL_PY_EXACT_ARGS 393 +#define _INIT_CALL_PY_EXACT_ARGS_0 394 +#define _INIT_CALL_PY_EXACT_ARGS_1 395 +#define _INIT_CALL_PY_EXACT_ARGS_2 396 +#define _INIT_CALL_PY_EXACT_ARGS_3 397 +#define _INIT_CALL_PY_EXACT_ARGS_4 398 #define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX #define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW #define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER @@ -162,144 +164,143 @@ extern "C" { #define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE #define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE #define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE -#define _INTERNAL_INCREMENT_OPT_COUNTER 397 -#define _IS_NONE 398 +#define _IS_NONE 399 #define _IS_OP IS_OP -#define _ITER_CHECK_LIST 399 -#define _ITER_CHECK_RANGE 400 -#define _ITER_CHECK_TUPLE 401 -#define _ITER_JUMP_LIST 402 -#define _ITER_JUMP_RANGE 403 -#define _ITER_JUMP_TUPLE 404 -#define _ITER_NEXT_LIST 405 -#define _ITER_NEXT_RANGE 406 -#define _ITER_NEXT_TUPLE 407 -#define _JUMP_TO_TOP 408 +#define _ITER_CHECK_LIST 400 +#define _ITER_CHECK_RANGE 401 +#define _ITER_CHECK_TUPLE 402 +#define _ITER_JUMP_LIST 403 +#define _ITER_JUMP_RANGE 404 +#define _ITER_JUMP_TUPLE 405 +#define _ITER_NEXT_LIST 406 +#define _ITER_NEXT_RANGE 407 +#define _ITER_NEXT_TUPLE 408 +#define _JUMP_TO_TOP 409 #define _LIST_APPEND LIST_APPEND #define _LIST_EXTEND LIST_EXTEND -#define _LOAD_ATTR 409 -#define _LOAD_ATTR_CLASS 410 -#define _LOAD_ATTR_CLASS_0 411 -#define _LOAD_ATTR_CLASS_1 412 +#define _LOAD_ATTR 410 +#define _LOAD_ATTR_CLASS 411 +#define _LOAD_ATTR_CLASS_0 412 +#define _LOAD_ATTR_CLASS_1 413 #define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN -#define _LOAD_ATTR_INSTANCE_VALUE 413 -#define _LOAD_ATTR_INSTANCE_VALUE_0 414 -#define _LOAD_ATTR_INSTANCE_VALUE_1 415 -#define _LOAD_ATTR_METHOD_LAZY_DICT 416 -#define _LOAD_ATTR_METHOD_NO_DICT 417 -#define _LOAD_ATTR_METHOD_WITH_VALUES 418 -#define _LOAD_ATTR_MODULE 419 -#define _LOAD_ATTR_MODULE_FROM_KEYS 420 -#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 421 -#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 422 -#define _LOAD_ATTR_PROPERTY_FRAME 423 -#define _LOAD_ATTR_SLOT 424 -#define _LOAD_ATTR_SLOT_0 425 -#define _LOAD_ATTR_SLOT_1 426 -#define _LOAD_ATTR_WITH_HINT 427 +#define _LOAD_ATTR_INSTANCE_VALUE 414 +#define _LOAD_ATTR_INSTANCE_VALUE_0 415 +#define _LOAD_ATTR_INSTANCE_VALUE_1 416 +#define _LOAD_ATTR_METHOD_LAZY_DICT 417 +#define _LOAD_ATTR_METHOD_NO_DICT 418 +#define _LOAD_ATTR_METHOD_WITH_VALUES 419 +#define _LOAD_ATTR_MODULE 420 +#define _LOAD_ATTR_MODULE_FROM_KEYS 421 +#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 422 +#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 423 +#define _LOAD_ATTR_PROPERTY_FRAME 424 +#define _LOAD_ATTR_SLOT 425 +#define _LOAD_ATTR_SLOT_0 426 +#define _LOAD_ATTR_SLOT_1 427 +#define _LOAD_ATTR_WITH_HINT 428 #define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS -#define _LOAD_BYTECODE 428 +#define _LOAD_BYTECODE 429 #define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT #define _LOAD_CONST LOAD_CONST #define _LOAD_CONST_IMMORTAL LOAD_CONST_IMMORTAL -#define _LOAD_CONST_INLINE 429 -#define _LOAD_CONST_INLINE_BORROW 430 -#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 431 -#define _LOAD_CONST_INLINE_WITH_NULL 432 +#define _LOAD_CONST_INLINE 430 +#define _LOAD_CONST_INLINE_BORROW 431 +#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 432 +#define _LOAD_CONST_INLINE_WITH_NULL 433 #define _LOAD_CONST_MORTAL LOAD_CONST_MORTAL #define _LOAD_DEREF LOAD_DEREF -#define _LOAD_FAST 433 -#define _LOAD_FAST_0 434 -#define _LOAD_FAST_1 435 -#define _LOAD_FAST_2 436 -#define _LOAD_FAST_3 437 -#define _LOAD_FAST_4 438 -#define _LOAD_FAST_5 439 -#define _LOAD_FAST_6 440 -#define _LOAD_FAST_7 441 +#define _LOAD_FAST 434 +#define _LOAD_FAST_0 435 +#define _LOAD_FAST_1 436 +#define _LOAD_FAST_2 437 +#define _LOAD_FAST_3 438 +#define _LOAD_FAST_4 439 +#define _LOAD_FAST_5 440 +#define _LOAD_FAST_6 441 +#define _LOAD_FAST_7 442 #define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR #define _LOAD_FAST_CHECK LOAD_FAST_CHECK #define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST #define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF #define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS -#define _LOAD_GLOBAL 442 -#define _LOAD_GLOBAL_BUILTINS 443 -#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 444 -#define _LOAD_GLOBAL_MODULE 445 -#define _LOAD_GLOBAL_MODULE_FROM_KEYS 446 +#define _LOAD_GLOBAL 443 +#define _LOAD_GLOBAL_BUILTINS 444 +#define _LOAD_GLOBAL_BUILTINS_FROM_KEYS 445 +#define _LOAD_GLOBAL_MODULE 446 +#define _LOAD_GLOBAL_MODULE_FROM_KEYS 447 #define _LOAD_LOCALS LOAD_LOCALS #define _LOAD_NAME LOAD_NAME -#define _LOAD_SMALL_INT 447 -#define _LOAD_SMALL_INT_0 448 -#define _LOAD_SMALL_INT_1 449 -#define _LOAD_SMALL_INT_2 450 -#define _LOAD_SMALL_INT_3 451 +#define _LOAD_SMALL_INT 448 +#define _LOAD_SMALL_INT_0 449 +#define _LOAD_SMALL_INT_1 450 +#define _LOAD_SMALL_INT_2 451 +#define _LOAD_SMALL_INT_3 452 #define _LOAD_SPECIAL LOAD_SPECIAL #define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR #define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD -#define _MAKE_CALLARGS_A_TUPLE 452 +#define _MAKE_CALLARGS_A_TUPLE 453 #define _MAKE_CELL MAKE_CELL #define _MAKE_FUNCTION MAKE_FUNCTION -#define _MAKE_WARM 453 +#define _MAKE_WARM 454 #define _MAP_ADD MAP_ADD #define _MATCH_CLASS MATCH_CLASS #define _MATCH_KEYS MATCH_KEYS #define _MATCH_MAPPING MATCH_MAPPING #define _MATCH_SEQUENCE MATCH_SEQUENCE -#define _MAYBE_EXPAND_METHOD 454 -#define _MAYBE_EXPAND_METHOD_KW 455 -#define _MONITOR_CALL 456 -#define _MONITOR_JUMP_BACKWARD 457 -#define _MONITOR_RESUME 458 +#define _MAYBE_EXPAND_METHOD 455 +#define _MAYBE_EXPAND_METHOD_KW 456 +#define _MONITOR_CALL 457 +#define _MONITOR_JUMP_BACKWARD 458 +#define _MONITOR_RESUME 459 #define _NOP NOP #define _POP_EXCEPT POP_EXCEPT -#define _POP_JUMP_IF_FALSE 459 -#define _POP_JUMP_IF_TRUE 460 +#define _POP_JUMP_IF_FALSE 460 +#define _POP_JUMP_IF_TRUE 461 #define _POP_TOP POP_TOP -#define _POP_TOP_LOAD_CONST_INLINE_BORROW 461 +#define _POP_TOP_LOAD_CONST_INLINE_BORROW 462 #define _PUSH_EXC_INFO PUSH_EXC_INFO -#define _PUSH_FRAME 462 +#define _PUSH_FRAME 463 #define _PUSH_NULL PUSH_NULL -#define _PY_FRAME_GENERAL 463 -#define _PY_FRAME_KW 464 -#define _QUICKEN_RESUME 465 -#define _REPLACE_WITH_TRUE 466 +#define _PY_FRAME_GENERAL 464 +#define _PY_FRAME_KW 465 +#define _QUICKEN_RESUME 466 +#define _REPLACE_WITH_TRUE 467 #define _RESUME_CHECK RESUME_CHECK #define _RETURN_GENERATOR RETURN_GENERATOR #define _RETURN_VALUE RETURN_VALUE -#define _SAVE_RETURN_OFFSET 467 -#define _SEND 468 -#define _SEND_GEN_FRAME 469 +#define _SAVE_RETURN_OFFSET 468 +#define _SEND 469 +#define _SEND_GEN_FRAME 470 #define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS #define _SET_ADD SET_ADD #define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE #define _SET_UPDATE SET_UPDATE -#define _START_EXECUTOR 470 -#define _STORE_ATTR 471 -#define _STORE_ATTR_INSTANCE_VALUE 472 -#define _STORE_ATTR_SLOT 473 -#define _STORE_ATTR_WITH_HINT 474 +#define _START_EXECUTOR 471 +#define _STORE_ATTR 472 +#define _STORE_ATTR_INSTANCE_VALUE 473 +#define _STORE_ATTR_SLOT 474 +#define _STORE_ATTR_WITH_HINT 475 #define _STORE_DEREF STORE_DEREF -#define _STORE_FAST 475 -#define _STORE_FAST_0 476 -#define _STORE_FAST_1 477 -#define _STORE_FAST_2 478 -#define _STORE_FAST_3 479 -#define _STORE_FAST_4 480 -#define _STORE_FAST_5 481 -#define _STORE_FAST_6 482 -#define _STORE_FAST_7 483 +#define _STORE_FAST 476 +#define _STORE_FAST_0 477 +#define _STORE_FAST_1 478 +#define _STORE_FAST_2 479 +#define _STORE_FAST_3 480 +#define _STORE_FAST_4 481 +#define _STORE_FAST_5 482 +#define _STORE_FAST_6 483 +#define _STORE_FAST_7 484 #define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST #define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST #define _STORE_GLOBAL STORE_GLOBAL #define _STORE_NAME STORE_NAME -#define _STORE_SLICE 484 -#define _STORE_SUBSCR 485 +#define _STORE_SLICE 485 +#define _STORE_SUBSCR 486 #define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT #define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT #define _SWAP SWAP -#define _TIER2_RESUME_CHECK 486 -#define _TO_BOOL 487 +#define _TIER2_RESUME_CHECK 487 +#define _TO_BOOL 488 #define _TO_BOOL_BOOL TO_BOOL_BOOL #define _TO_BOOL_INT TO_BOOL_INT #define _TO_BOOL_LIST TO_BOOL_LIST @@ -309,13 +310,13 @@ extern "C" { #define _UNARY_NEGATIVE UNARY_NEGATIVE #define _UNARY_NOT UNARY_NOT #define _UNPACK_EX UNPACK_EX -#define _UNPACK_SEQUENCE 488 +#define _UNPACK_SEQUENCE 489 #define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST #define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE #define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE #define _WITH_EXCEPT_START WITH_EXCEPT_START #define _YIELD_VALUE YIELD_VALUE -#define MAX_UOP_ID 488 +#define MAX_UOP_ID 489 #ifdef __cplusplus } diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index 5670fe26f72071..c9bef792e5b3e3 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -82,6 +82,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG, [_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG, [_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG, + [_GUARD_BINARY_OP_EXTEND] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG, + [_BINARY_OP_EXTEND] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG, [_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -135,8 +137,8 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_STORE_DEREF] = HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ESCAPES_FLAG, [_COPY_FREE_VARS] = HAS_ARG_FLAG, [_BUILD_STRING] = HAS_ARG_FLAG | HAS_ERROR_FLAG, - [_BUILD_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG, - [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG, + [_BUILD_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG, + [_BUILD_LIST] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG, [_LIST_EXTEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_SET_UPDATE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_BUILD_SET] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, @@ -286,7 +288,6 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_LOAD_GLOBAL_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_LOAD_GLOBAL_BUILTINS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, [_LOAD_ATTR_MODULE] = HAS_ARG_FLAG | HAS_DEOPT_FLAG, - [_INTERNAL_INCREMENT_OPT_COUNTER] = 0, [_DYNAMIC_EXIT] = HAS_ESCAPES_FLAG, [_START_EXECUTOR] = HAS_ESCAPES_FLAG, [_MAKE_WARM] = 0, @@ -309,6 +310,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_BINARY_OP_ADD_FLOAT] = "_BINARY_OP_ADD_FLOAT", [_BINARY_OP_ADD_INT] = "_BINARY_OP_ADD_INT", [_BINARY_OP_ADD_UNICODE] = "_BINARY_OP_ADD_UNICODE", + [_BINARY_OP_EXTEND] = "_BINARY_OP_EXTEND", [_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE", [_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT", [_BINARY_OP_MULTIPLY_INT] = "_BINARY_OP_MULTIPLY_INT", @@ -410,6 +412,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_GET_ITER] = "_GET_ITER", [_GET_LEN] = "_GET_LEN", [_GET_YIELD_FROM_ITER] = "_GET_YIELD_FROM_ITER", + [_GUARD_BINARY_OP_EXTEND] = "_GUARD_BINARY_OP_EXTEND", [_GUARD_BOTH_FLOAT] = "_GUARD_BOTH_FLOAT", [_GUARD_BOTH_INT] = "_GUARD_BOTH_INT", [_GUARD_BOTH_UNICODE] = "_GUARD_BOTH_UNICODE", @@ -441,7 +444,6 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = { [_INIT_CALL_PY_EXACT_ARGS_2] = "_INIT_CALL_PY_EXACT_ARGS_2", [_INIT_CALL_PY_EXACT_ARGS_3] = "_INIT_CALL_PY_EXACT_ARGS_3", [_INIT_CALL_PY_EXACT_ARGS_4] = "_INIT_CALL_PY_EXACT_ARGS_4", - [_INTERNAL_INCREMENT_OPT_COUNTER] = "_INTERNAL_INCREMENT_OPT_COUNTER", [_IS_NONE] = "_IS_NONE", [_IS_OP] = "_IS_OP", [_ITER_CHECK_LIST] = "_ITER_CHECK_LIST", @@ -711,6 +713,10 @@ int _PyUop_num_popped(int opcode, int oparg) return 2; case _BINARY_OP_INPLACE_ADD_UNICODE: return 2; + case _GUARD_BINARY_OP_EXTEND: + return 0; + case _BINARY_OP_EXTEND: + return 2; case _BINARY_SUBSCR: return 2; case _BINARY_SLICE: @@ -1119,8 +1125,6 @@ int _PyUop_num_popped(int opcode, int oparg) return 0; case _LOAD_ATTR_MODULE: return 1; - case _INTERNAL_INCREMENT_OPT_COUNTER: - return 1; case _DYNAMIC_EXIT: return 0; case _START_EXECUTOR: diff --git a/Include/opcode_ids.h b/Include/opcode_ids.h index f2d8963a1813c5..06b207b347e504 100644 --- a/Include/opcode_ids.h +++ b/Include/opcode_ids.h @@ -132,82 +132,83 @@ extern "C" { #define BINARY_OP_ADD_FLOAT 150 #define BINARY_OP_ADD_INT 151 #define BINARY_OP_ADD_UNICODE 152 -#define BINARY_OP_MULTIPLY_FLOAT 153 -#define BINARY_OP_MULTIPLY_INT 154 -#define BINARY_OP_SUBTRACT_FLOAT 155 -#define BINARY_OP_SUBTRACT_INT 156 -#define BINARY_SUBSCR_DICT 157 -#define BINARY_SUBSCR_GETITEM 158 -#define BINARY_SUBSCR_LIST_INT 159 -#define BINARY_SUBSCR_STR_INT 160 -#define BINARY_SUBSCR_TUPLE_INT 161 -#define CALL_ALLOC_AND_ENTER_INIT 162 -#define CALL_BOUND_METHOD_EXACT_ARGS 163 -#define CALL_BOUND_METHOD_GENERAL 164 -#define CALL_BUILTIN_CLASS 165 -#define CALL_BUILTIN_FAST 166 -#define CALL_BUILTIN_FAST_WITH_KEYWORDS 167 -#define CALL_BUILTIN_O 168 -#define CALL_ISINSTANCE 169 -#define CALL_KW_BOUND_METHOD 170 -#define CALL_KW_NON_PY 171 -#define CALL_KW_PY 172 -#define CALL_LEN 173 -#define CALL_LIST_APPEND 174 -#define CALL_METHOD_DESCRIPTOR_FAST 175 -#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 176 -#define CALL_METHOD_DESCRIPTOR_NOARGS 177 -#define CALL_METHOD_DESCRIPTOR_O 178 -#define CALL_NON_PY_GENERAL 179 -#define CALL_PY_EXACT_ARGS 180 -#define CALL_PY_GENERAL 181 -#define CALL_STR_1 182 -#define CALL_TUPLE_1 183 -#define CALL_TYPE_1 184 -#define COMPARE_OP_FLOAT 185 -#define COMPARE_OP_INT 186 -#define COMPARE_OP_STR 187 -#define CONTAINS_OP_DICT 188 -#define CONTAINS_OP_SET 189 -#define FOR_ITER_GEN 190 -#define FOR_ITER_LIST 191 -#define FOR_ITER_RANGE 192 -#define FOR_ITER_TUPLE 193 -#define LOAD_ATTR_CLASS 194 -#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 195 -#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 196 -#define LOAD_ATTR_INSTANCE_VALUE 197 -#define LOAD_ATTR_METHOD_LAZY_DICT 198 -#define LOAD_ATTR_METHOD_NO_DICT 199 -#define LOAD_ATTR_METHOD_WITH_VALUES 200 -#define LOAD_ATTR_MODULE 201 -#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 202 -#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 203 -#define LOAD_ATTR_PROPERTY 204 -#define LOAD_ATTR_SLOT 205 -#define LOAD_ATTR_WITH_HINT 206 -#define LOAD_CONST_IMMORTAL 207 -#define LOAD_CONST_MORTAL 208 -#define LOAD_GLOBAL_BUILTIN 209 -#define LOAD_GLOBAL_MODULE 210 -#define LOAD_SUPER_ATTR_ATTR 211 -#define LOAD_SUPER_ATTR_METHOD 212 -#define RESUME_CHECK 213 -#define SEND_GEN 214 -#define STORE_ATTR_INSTANCE_VALUE 215 -#define STORE_ATTR_SLOT 216 -#define STORE_ATTR_WITH_HINT 217 -#define STORE_SUBSCR_DICT 218 -#define STORE_SUBSCR_LIST_INT 219 -#define TO_BOOL_ALWAYS_TRUE 220 -#define TO_BOOL_BOOL 221 -#define TO_BOOL_INT 222 -#define TO_BOOL_LIST 223 -#define TO_BOOL_NONE 224 -#define TO_BOOL_STR 225 -#define UNPACK_SEQUENCE_LIST 226 -#define UNPACK_SEQUENCE_TUPLE 227 -#define UNPACK_SEQUENCE_TWO_TUPLE 228 +#define BINARY_OP_EXTEND 153 +#define BINARY_OP_MULTIPLY_FLOAT 154 +#define BINARY_OP_MULTIPLY_INT 155 +#define BINARY_OP_SUBTRACT_FLOAT 156 +#define BINARY_OP_SUBTRACT_INT 157 +#define BINARY_SUBSCR_DICT 158 +#define BINARY_SUBSCR_GETITEM 159 +#define BINARY_SUBSCR_LIST_INT 160 +#define BINARY_SUBSCR_STR_INT 161 +#define BINARY_SUBSCR_TUPLE_INT 162 +#define CALL_ALLOC_AND_ENTER_INIT 163 +#define CALL_BOUND_METHOD_EXACT_ARGS 164 +#define CALL_BOUND_METHOD_GENERAL 165 +#define CALL_BUILTIN_CLASS 166 +#define CALL_BUILTIN_FAST 167 +#define CALL_BUILTIN_FAST_WITH_KEYWORDS 168 +#define CALL_BUILTIN_O 169 +#define CALL_ISINSTANCE 170 +#define CALL_KW_BOUND_METHOD 171 +#define CALL_KW_NON_PY 172 +#define CALL_KW_PY 173 +#define CALL_LEN 174 +#define CALL_LIST_APPEND 175 +#define CALL_METHOD_DESCRIPTOR_FAST 176 +#define CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS 177 +#define CALL_METHOD_DESCRIPTOR_NOARGS 178 +#define CALL_METHOD_DESCRIPTOR_O 179 +#define CALL_NON_PY_GENERAL 180 +#define CALL_PY_EXACT_ARGS 181 +#define CALL_PY_GENERAL 182 +#define CALL_STR_1 183 +#define CALL_TUPLE_1 184 +#define CALL_TYPE_1 185 +#define COMPARE_OP_FLOAT 186 +#define COMPARE_OP_INT 187 +#define COMPARE_OP_STR 188 +#define CONTAINS_OP_DICT 189 +#define CONTAINS_OP_SET 190 +#define FOR_ITER_GEN 191 +#define FOR_ITER_LIST 192 +#define FOR_ITER_RANGE 193 +#define FOR_ITER_TUPLE 194 +#define LOAD_ATTR_CLASS 195 +#define LOAD_ATTR_CLASS_WITH_METACLASS_CHECK 196 +#define LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN 197 +#define LOAD_ATTR_INSTANCE_VALUE 198 +#define LOAD_ATTR_METHOD_LAZY_DICT 199 +#define LOAD_ATTR_METHOD_NO_DICT 200 +#define LOAD_ATTR_METHOD_WITH_VALUES 201 +#define LOAD_ATTR_MODULE 202 +#define LOAD_ATTR_NONDESCRIPTOR_NO_DICT 203 +#define LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 204 +#define LOAD_ATTR_PROPERTY 205 +#define LOAD_ATTR_SLOT 206 +#define LOAD_ATTR_WITH_HINT 207 +#define LOAD_CONST_IMMORTAL 208 +#define LOAD_CONST_MORTAL 209 +#define LOAD_GLOBAL_BUILTIN 210 +#define LOAD_GLOBAL_MODULE 211 +#define LOAD_SUPER_ATTR_ATTR 212 +#define LOAD_SUPER_ATTR_METHOD 213 +#define RESUME_CHECK 214 +#define SEND_GEN 215 +#define STORE_ATTR_INSTANCE_VALUE 216 +#define STORE_ATTR_SLOT 217 +#define STORE_ATTR_WITH_HINT 218 +#define STORE_SUBSCR_DICT 219 +#define STORE_SUBSCR_LIST_INT 220 +#define TO_BOOL_ALWAYS_TRUE 221 +#define TO_BOOL_BOOL 222 +#define TO_BOOL_INT 223 +#define TO_BOOL_LIST 224 +#define TO_BOOL_NONE 225 +#define TO_BOOL_STR 226 +#define UNPACK_SEQUENCE_LIST 227 +#define UNPACK_SEQUENCE_TUPLE 228 +#define UNPACK_SEQUENCE_TWO_TUPLE 229 #define INSTRUMENTED_END_FOR 235 #define INSTRUMENTED_POP_ITER 236 #define INSTRUMENTED_END_SEND 237 diff --git a/Include/pyport.h b/Include/pyport.h index 2b6bd4c21110e5..aabd094df54a74 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -625,8 +625,13 @@ extern "C" { // case 2: code; break; // } // -// __attribute__((fallthrough)) was introduced in GCC 7. -#if _Py__has_attribute(fallthrough) +// __attribute__((fallthrough)) was introduced in GCC 7 and Clang 10 / +// Apple Clang 12.0. Earlier Clang versions support only the C++11 +// style fallthrough attribute, not the GCC extension syntax used here, +// and __has_attribute(fallthrough) evaluates to 1. +#if _Py__has_attribute(fallthrough) && (!defined(__clang__) || \ + (!defined(__apple_build_version__) && __clang_major__ >= 10) || \ + (defined(__apple_build_version__) && __clang_major__ >= 12)) # define _Py_FALLTHROUGH __attribute__((fallthrough)) #else # define _Py_FALLTHROUGH do { } while (0) diff --git a/InternalDocs/README.md b/InternalDocs/README.md index 794b4f3c6aad42..4502902307cd5c 100644 --- a/InternalDocs/README.md +++ b/InternalDocs/README.md @@ -25,7 +25,7 @@ Runtime Objects - [Code Objects](code_objects.md) -- [Generators (coming soon)](generators.md) +- [Generators](generators.md) - [Frames](frames.md) diff --git a/InternalDocs/generators.md b/InternalDocs/generators.md index afa8b8f4bb8040..87fbb91236844b 100644 --- a/InternalDocs/generators.md +++ b/InternalDocs/generators.md @@ -1,8 +1,106 @@ + +Generators and Coroutines +========================= + Generators -========== +---------- + +Generators in CPython are implemented with the struct `PyGenObject`. +They consist of a [`frame`](frames.md) and metadata about the generator's +execution state. + +A generator object resumes execution in its frame when its `send()` +method is called. This is analogous to a function executing in its own +frame when it is called, but a function returns to the calling frame only once, +while a generator "returns" execution to the caller's frame every time +it emits a new item with a +[`yield` expression](https://docs.python.org/dev/reference/expressions.html#yield-expressions). +This is implemented by the +[`YIELD_VALUE`](https://docs.python.org/dev/library/dis.html#opcode-YIELD_VALUE) +bytecode, which is similar to +[`RETURN_VALUE`](https://docs.python.org/dev/library/dis.html#opcode-RETURN_VALUE) +in the sense that it puts a value on the stack and returns execution to the +calling frame, but it also needs to perform additional work to leave the generator +frame in a state that allows it to be resumed. In particular, it updates the frame's +instruction pointer and stores the interpreter's exception state on the generator +object. When the generator is resumed, this exception state is copied back to the +interpreter state. + +The `frame` of a generator is embedded in the generator object struct as a +[`_PyInterpreterFrame`](frames.md) (see `_PyGenObject_HEAD` in +[`pycore_genobject.h`](../Include/internal/pycore_genobject.h)). +This means that we can get the frame from the generator or the generator +from the frame (see `_PyGen_GetGeneratorFromFrame` in the same file). +Other fields of the generator struct include metadata (such as the name of +the generator function) and runtime state information (such as whether its +frame is executing, suspended, cleared, etc.). + +Generator Object Creation and Destruction +----------------------------------------- + +The bytecode of a generator function begins with a +[`RETURN_GENERATOR`](https://docs.python.org/dev/library/dis.html#opcode-RETURN_GENERATOR) +instruction, which creates a generator object, including its embedded frame. +The generator's frame is initialized as a copy of the frame in which +`RETURN_GENERATOR` is executing, but its `owner` field is overwritten to indicate +that it is owned by a generator. Finally, `RETURN_GENERATOR` pushes the new generator +object to the stack and returns to the caller of the generator function (at +which time its frame is destroyed). When the generator is next resumed by +[`gen_send_ex2()`](../Objects/genobject.c), `_PyEval_EvalFrame()` is called +to continue executing the generator function, in the frame that is embedded in +the generator object. + +When a generator object is destroyed in [`gen_dealloc`](../Objects/genobject.c), +its embedded `_PyInterpreterFrame` field may need to be preserved, if it is exposed +to Python as part of a [`PyFrameObject`](frames.md#frame-objects). This is detected +in [`_PyFrame_ClearExceptCode`](../Python/frame.c) by the fact that the interpreter +frame's `frame_obj` field is set, and the frame object it points to has refcount +greater than 1. If so, the `take_ownership()` function is called to create a new +copy of the interpreter frame and transfer ownership of it from the generator to +the frame object. + +Iteration +--------- + +The [`FOR_ITER`](https://docs.python.org/dev/library/dis.html#opcode-FOR_ITER) +instruction calls `__next__` on the iterator which is on the top of the stack, +and pushes the result to the stack. It has [`specializations`](adaptive.md) +for a few common iterator types, including `FOR_ITER_GEN`, for iterating over +a generator. `FOR_ITER_GEN` bypasses the call to `__next__`, and instead +directly pushes the generator stack and resumes its execution from the +instruction that follows the last yield. + +Chained Generators +------------------ + +A `yield from` expression creates a generator that efficiently yields the +sequence created by another generator. This is implemented with the +[`SEND` instruction](https://docs.python.org/dev/library/dis.html#opcode-SEND), +which pushes the value of its arg to the stack of the generator's frame, sets +the exception state on this frame, and resumes execution of the chained generator. +On return from `SEND`, the value at the top of the stack is sent back up +the generator chain with a `YIELD_VALUE`. This sequence of `SEND` followed by +`YIELD_VALUE` is repeated in a loop, until a `StopIteration` exception is +raised to indicate that the generator has no more values to emit. + +The [`CLEANUP_THROW`](https://docs.python.org/dev/library/dis.html#opcode-CLEANUP_THROW) +instruction is used to handle exceptions raised from the send-yield loop. +Exceptions of type `StopIteration` is handled, their `value` field hold the +value to be returned by the generator's `close()` function. Any other +exception is re-raised by `CLEANUP_THROW`. + +Coroutines +---------- -Coming soon. +Coroutines are generators that use the value returned from a `yield` expression, +i.e., the argument that was passed to the `.send()` call that resumed it after +it yielded. This makes it possible for data to flow in both directions: from +the generator to the caller via the argument of the `yield` expression, and +from the caller to the generator via the send argument to the `send()` call. +A `yield from` expression passes the `send` argument to the chained generator, +so this data flow works along the chain (see `gen_send_ex2()` in +[`genobject.c`](../Objects/genobject.c)). - +Recall that a generator's `__next__` function simply calls `self.send(None)`, +so all this works the same in generators and coroutines, but only coroutines +use the value of the argument to `send`. diff --git a/Lib/_colorize.py b/Lib/_colorize.py index f609901887a26b..5e36e45734a5fb 100644 --- a/Lib/_colorize.py +++ b/Lib/_colorize.py @@ -26,30 +26,32 @@ class ANSIColors: setattr(NoColors, attr, "") -def get_colors(colorize: bool = False) -> ANSIColors: - if colorize or can_colorize(): +def get_colors(colorize: bool = False, *, file=None) -> ANSIColors: + if colorize or can_colorize(file=file): return ANSIColors() else: return NoColors -def can_colorize() -> bool: +def can_colorize(*, file=None) -> bool: + if file is None: + file = sys.stdout + if not sys.flags.ignore_environment: if os.environ.get("PYTHON_COLORS") == "0": return False if os.environ.get("PYTHON_COLORS") == "1": return True - if "NO_COLOR" in os.environ: - return False + if "NO_COLOR" in os.environ: + return False if not COLORIZE: return False - if not sys.flags.ignore_environment: - if "FORCE_COLOR" in os.environ: - return True - if os.environ.get("TERM") == "dumb": - return False + if "FORCE_COLOR" in os.environ: + return True + if os.environ.get("TERM") == "dumb": + return False - if not hasattr(sys.stderr, "fileno"): + if not hasattr(file, "fileno"): return False if sys.platform == "win32": @@ -62,6 +64,6 @@ def can_colorize() -> bool: return False try: - return os.isatty(sys.stderr.fileno()) + return os.isatty(file.fileno()) except io.UnsupportedOperation: - return sys.stderr.isatty() + return file.isatty() diff --git a/Lib/_opcode_metadata.py b/Lib/_opcode_metadata.py index d93e8d8df8fe4b..7dd528ef74df33 100644 --- a/Lib/_opcode_metadata.py +++ b/Lib/_opcode_metadata.py @@ -26,6 +26,7 @@ "BINARY_OP_ADD_FLOAT", "BINARY_OP_SUBTRACT_FLOAT", "BINARY_OP_ADD_UNICODE", + "BINARY_OP_EXTEND", "BINARY_OP_INPLACE_ADD_UNICODE", ], "BINARY_SUBSCR": [ @@ -123,83 +124,84 @@ 'BINARY_OP_ADD_FLOAT': 150, 'BINARY_OP_ADD_INT': 151, 'BINARY_OP_ADD_UNICODE': 152, + 'BINARY_OP_EXTEND': 153, 'BINARY_OP_INPLACE_ADD_UNICODE': 3, - 'BINARY_OP_MULTIPLY_FLOAT': 153, - 'BINARY_OP_MULTIPLY_INT': 154, - 'BINARY_OP_SUBTRACT_FLOAT': 155, - 'BINARY_OP_SUBTRACT_INT': 156, - 'BINARY_SUBSCR_DICT': 157, - 'BINARY_SUBSCR_GETITEM': 158, - 'BINARY_SUBSCR_LIST_INT': 159, - 'BINARY_SUBSCR_STR_INT': 160, - 'BINARY_SUBSCR_TUPLE_INT': 161, - 'CALL_ALLOC_AND_ENTER_INIT': 162, - 'CALL_BOUND_METHOD_EXACT_ARGS': 163, - 'CALL_BOUND_METHOD_GENERAL': 164, - 'CALL_BUILTIN_CLASS': 165, - 'CALL_BUILTIN_FAST': 166, - 'CALL_BUILTIN_FAST_WITH_KEYWORDS': 167, - 'CALL_BUILTIN_O': 168, - 'CALL_ISINSTANCE': 169, - 'CALL_KW_BOUND_METHOD': 170, - 'CALL_KW_NON_PY': 171, - 'CALL_KW_PY': 172, - 'CALL_LEN': 173, - 'CALL_LIST_APPEND': 174, - 'CALL_METHOD_DESCRIPTOR_FAST': 175, - 'CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS': 176, - 'CALL_METHOD_DESCRIPTOR_NOARGS': 177, - 'CALL_METHOD_DESCRIPTOR_O': 178, - 'CALL_NON_PY_GENERAL': 179, - 'CALL_PY_EXACT_ARGS': 180, - 'CALL_PY_GENERAL': 181, - 'CALL_STR_1': 182, - 'CALL_TUPLE_1': 183, - 'CALL_TYPE_1': 184, - 'COMPARE_OP_FLOAT': 185, - 'COMPARE_OP_INT': 186, - 'COMPARE_OP_STR': 187, - 'CONTAINS_OP_DICT': 188, - 'CONTAINS_OP_SET': 189, - 'FOR_ITER_GEN': 190, - 'FOR_ITER_LIST': 191, - 'FOR_ITER_RANGE': 192, - 'FOR_ITER_TUPLE': 193, - 'LOAD_ATTR_CLASS': 194, - 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 195, - 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 196, - 'LOAD_ATTR_INSTANCE_VALUE': 197, - 'LOAD_ATTR_METHOD_LAZY_DICT': 198, - 'LOAD_ATTR_METHOD_NO_DICT': 199, - 'LOAD_ATTR_METHOD_WITH_VALUES': 200, - 'LOAD_ATTR_MODULE': 201, - 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 202, - 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 203, - 'LOAD_ATTR_PROPERTY': 204, - 'LOAD_ATTR_SLOT': 205, - 'LOAD_ATTR_WITH_HINT': 206, - 'LOAD_CONST_IMMORTAL': 207, - 'LOAD_CONST_MORTAL': 208, - 'LOAD_GLOBAL_BUILTIN': 209, - 'LOAD_GLOBAL_MODULE': 210, - 'LOAD_SUPER_ATTR_ATTR': 211, - 'LOAD_SUPER_ATTR_METHOD': 212, - 'RESUME_CHECK': 213, - 'SEND_GEN': 214, - 'STORE_ATTR_INSTANCE_VALUE': 215, - 'STORE_ATTR_SLOT': 216, - 'STORE_ATTR_WITH_HINT': 217, - 'STORE_SUBSCR_DICT': 218, - 'STORE_SUBSCR_LIST_INT': 219, - 'TO_BOOL_ALWAYS_TRUE': 220, - 'TO_BOOL_BOOL': 221, - 'TO_BOOL_INT': 222, - 'TO_BOOL_LIST': 223, - 'TO_BOOL_NONE': 224, - 'TO_BOOL_STR': 225, - 'UNPACK_SEQUENCE_LIST': 226, - 'UNPACK_SEQUENCE_TUPLE': 227, - 'UNPACK_SEQUENCE_TWO_TUPLE': 228, + 'BINARY_OP_MULTIPLY_FLOAT': 154, + 'BINARY_OP_MULTIPLY_INT': 155, + 'BINARY_OP_SUBTRACT_FLOAT': 156, + 'BINARY_OP_SUBTRACT_INT': 157, + 'BINARY_SUBSCR_DICT': 158, + 'BINARY_SUBSCR_GETITEM': 159, + 'BINARY_SUBSCR_LIST_INT': 160, + 'BINARY_SUBSCR_STR_INT': 161, + 'BINARY_SUBSCR_TUPLE_INT': 162, + 'CALL_ALLOC_AND_ENTER_INIT': 163, + 'CALL_BOUND_METHOD_EXACT_ARGS': 164, + 'CALL_BOUND_METHOD_GENERAL': 165, + 'CALL_BUILTIN_CLASS': 166, + 'CALL_BUILTIN_FAST': 167, + 'CALL_BUILTIN_FAST_WITH_KEYWORDS': 168, + 'CALL_BUILTIN_O': 169, + 'CALL_ISINSTANCE': 170, + 'CALL_KW_BOUND_METHOD': 171, + 'CALL_KW_NON_PY': 172, + 'CALL_KW_PY': 173, + 'CALL_LEN': 174, + 'CALL_LIST_APPEND': 175, + 'CALL_METHOD_DESCRIPTOR_FAST': 176, + 'CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS': 177, + 'CALL_METHOD_DESCRIPTOR_NOARGS': 178, + 'CALL_METHOD_DESCRIPTOR_O': 179, + 'CALL_NON_PY_GENERAL': 180, + 'CALL_PY_EXACT_ARGS': 181, + 'CALL_PY_GENERAL': 182, + 'CALL_STR_1': 183, + 'CALL_TUPLE_1': 184, + 'CALL_TYPE_1': 185, + 'COMPARE_OP_FLOAT': 186, + 'COMPARE_OP_INT': 187, + 'COMPARE_OP_STR': 188, + 'CONTAINS_OP_DICT': 189, + 'CONTAINS_OP_SET': 190, + 'FOR_ITER_GEN': 191, + 'FOR_ITER_LIST': 192, + 'FOR_ITER_RANGE': 193, + 'FOR_ITER_TUPLE': 194, + 'LOAD_ATTR_CLASS': 195, + 'LOAD_ATTR_CLASS_WITH_METACLASS_CHECK': 196, + 'LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN': 197, + 'LOAD_ATTR_INSTANCE_VALUE': 198, + 'LOAD_ATTR_METHOD_LAZY_DICT': 199, + 'LOAD_ATTR_METHOD_NO_DICT': 200, + 'LOAD_ATTR_METHOD_WITH_VALUES': 201, + 'LOAD_ATTR_MODULE': 202, + 'LOAD_ATTR_NONDESCRIPTOR_NO_DICT': 203, + 'LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES': 204, + 'LOAD_ATTR_PROPERTY': 205, + 'LOAD_ATTR_SLOT': 206, + 'LOAD_ATTR_WITH_HINT': 207, + 'LOAD_CONST_IMMORTAL': 208, + 'LOAD_CONST_MORTAL': 209, + 'LOAD_GLOBAL_BUILTIN': 210, + 'LOAD_GLOBAL_MODULE': 211, + 'LOAD_SUPER_ATTR_ATTR': 212, + 'LOAD_SUPER_ATTR_METHOD': 213, + 'RESUME_CHECK': 214, + 'SEND_GEN': 215, + 'STORE_ATTR_INSTANCE_VALUE': 216, + 'STORE_ATTR_SLOT': 217, + 'STORE_ATTR_WITH_HINT': 218, + 'STORE_SUBSCR_DICT': 219, + 'STORE_SUBSCR_LIST_INT': 220, + 'TO_BOOL_ALWAYS_TRUE': 221, + 'TO_BOOL_BOOL': 222, + 'TO_BOOL_INT': 223, + 'TO_BOOL_LIST': 224, + 'TO_BOOL_NONE': 225, + 'TO_BOOL_STR': 226, + 'UNPACK_SEQUENCE_LIST': 227, + 'UNPACK_SEQUENCE_TUPLE': 228, + 'UNPACK_SEQUENCE_TWO_TUPLE': 229, } opmap = { diff --git a/Lib/_pyrepl/commands.py b/Lib/_pyrepl/commands.py index c3fce91013b001..503ca1da329eaa 100644 --- a/Lib/_pyrepl/commands.py +++ b/Lib/_pyrepl/commands.py @@ -282,7 +282,7 @@ def do(self) -> None: x, y = r.pos2xy() new_y = y + 1 - if new_y > r.max_row(): + if r.eol() == len(b): if r.historyi < len(r.history): r.select_item(r.historyi + 1) r.pos = r.eol(0) @@ -309,7 +309,7 @@ def do(self) -> None: class left(MotionCommand): def do(self) -> None: r = self.reader - for i in range(r.get_arg()): + for _ in range(r.get_arg()): p = r.pos - 1 if p >= 0: r.pos = p @@ -321,7 +321,7 @@ class right(MotionCommand): def do(self) -> None: r = self.reader b = r.buffer - for i in range(r.get_arg()): + for _ in range(r.get_arg()): p = r.pos + 1 if p <= len(b): r.pos = p @@ -459,9 +459,15 @@ def do(self) -> None: from site import gethistoryfile # type: ignore[attr-defined] history = os.linesep.join(self.reader.history[:]) - with self.reader.suspend(): - pager = get_pager() - pager(history, gethistoryfile()) + self.reader.console.restore() + pager = get_pager() + pager(history, gethistoryfile()) + self.reader.console.prepare() + + # We need to copy over the state so that it's consistent between + # console and reader, and console does not overwrite/append stuff + self.reader.console.screen = self.reader.screen.copy() + self.reader.console.posxy = self.reader.cxy class paste_mode(Command): diff --git a/Lib/_pyrepl/completing_reader.py b/Lib/_pyrepl/completing_reader.py index e856bb9807c7f6..1cd4b6367ca8b1 100644 --- a/Lib/_pyrepl/completing_reader.py +++ b/Lib/_pyrepl/completing_reader.py @@ -260,10 +260,15 @@ def after_command(self, cmd: Command) -> None: def calc_screen(self) -> list[str]: screen = super().calc_screen() if self.cmpltn_menu_visible: - ly = self.lxy[1] + # We display the completions menu below the current prompt + ly = self.lxy[1] + 1 screen[ly:ly] = self.cmpltn_menu - self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) - self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu) + # If we're not in the middle of multiline edit, don't append to screeninfo + # since that screws up the position calculation in pos2xy function. + # This is a hack to prevent the cursor jumping + # into the completions menu when pressing left or down arrow. + if self.pos != len(self.buffer): + self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) return screen def finish(self) -> None: diff --git a/Lib/_pyrepl/console.py b/Lib/_pyrepl/console.py index 03266c4dfc2dd8..0d78890b4f45d5 100644 --- a/Lib/_pyrepl/console.py +++ b/Lib/_pyrepl/console.py @@ -45,6 +45,7 @@ class Event: @dataclass class Console(ABC): + posxy: tuple[int, int] screen: list[str] = field(default_factory=list) height: int = 25 width: int = 80 diff --git a/Lib/_pyrepl/fancy_termios.py b/Lib/_pyrepl/fancy_termios.py index 5b85cb0f52521f..0468b9a2670267 100644 --- a/Lib/_pyrepl/fancy_termios.py +++ b/Lib/_pyrepl/fancy_termios.py @@ -40,7 +40,9 @@ def as_list(self): self.lflag, self.ispeed, self.ospeed, - self.cc, + # Always return a copy of the control characters list to ensure + # there are not any additional references to self.cc + self.cc[:], ] def copy(self): diff --git a/Lib/_pyrepl/historical_reader.py b/Lib/_pyrepl/historical_reader.py index 5d416f336ad5d2..c4b95fa2e81ee6 100644 --- a/Lib/_pyrepl/historical_reader.py +++ b/Lib/_pyrepl/historical_reader.py @@ -290,13 +290,17 @@ def get_item(self, i: int) -> str: @contextmanager def suspend(self) -> SimpleContextManager: - with super().suspend(): - try: - old_history = self.history[:] - del self.history[:] - yield - finally: - self.history[:] = old_history + with super().suspend(), self.suspend_history(): + yield + + @contextmanager + def suspend_history(self) -> SimpleContextManager: + try: + old_history = self.history[:] + del self.history[:] + yield + finally: + self.history[:] = old_history def prepare(self) -> None: super().prepare() diff --git a/Lib/_pyrepl/simple_interact.py b/Lib/_pyrepl/simple_interact.py index a5033496712a73..a065174ad42fb6 100644 --- a/Lib/_pyrepl/simple_interact.py +++ b/Lib/_pyrepl/simple_interact.py @@ -77,7 +77,7 @@ def _clear_screen(): "exit": _sitebuiltins.Quitter('exit', ''), "quit": _sitebuiltins.Quitter('quit' ,''), "copyright": _sitebuiltins._Printer('copyright', sys.copyright), - "help": "help", + "help": _sitebuiltins._Helper(), "clear": _clear_screen, "\x1a": _sitebuiltins.Quitter('\x1a', ''), } @@ -124,18 +124,10 @@ def maybe_run_command(statement: str) -> bool: reader.history.pop() # skip internal commands in history command = REPL_COMMANDS[statement] if callable(command): - command() + # Make sure that history does not change because of commands + with reader.suspend_history(): + command() return True - - if isinstance(command, str): - # Internal readline commands require a prepared reader like - # inside multiline_input. - reader.prepare() - reader.refresh() - reader.do_cmd((command, [statement])) - reader.restore() - return True - return False while True: diff --git a/Lib/_pyrepl/unix_console.py b/Lib/_pyrepl/unix_console.py index 63e8fc24dd7625..96379bc20f3357 100644 --- a/Lib/_pyrepl/unix_console.py +++ b/Lib/_pyrepl/unix_console.py @@ -240,7 +240,7 @@ def refresh(self, screen, c_xy): self.__hide_cursor() self.__move(0, len(self.screen) - 1) self.__write("\n") - self.__posxy = 0, len(self.screen) + self.posxy = 0, len(self.screen) self.screen.append("") else: while len(self.screen) < len(screen): @@ -250,7 +250,7 @@ def refresh(self, screen, c_xy): self.__gone_tall = 1 self.__move = self.__move_tall - px, py = self.__posxy + px, py = self.posxy old_offset = offset = self.__offset height = self.height @@ -271,7 +271,7 @@ def refresh(self, screen, c_xy): if old_offset > offset and self._ri: self.__hide_cursor() self.__write_code(self._cup, 0, 0) - self.__posxy = 0, old_offset + self.posxy = 0, old_offset for i in range(old_offset - offset): self.__write_code(self._ri) oldscr.pop(-1) @@ -279,7 +279,7 @@ def refresh(self, screen, c_xy): elif old_offset < offset and self._ind: self.__hide_cursor() self.__write_code(self._cup, self.height - 1, 0) - self.__posxy = 0, old_offset + self.height - 1 + self.posxy = 0, old_offset + self.height - 1 for i in range(offset - old_offset): self.__write_code(self._ind) oldscr.pop(0) @@ -299,7 +299,7 @@ def refresh(self, screen, c_xy): while y < len(oldscr): self.__hide_cursor() self.__move(0, y) - self.__posxy = 0, y + self.posxy = 0, y self.__write_code(self._el) y += 1 @@ -321,7 +321,7 @@ def move_cursor(self, x, y): self.event_queue.insert(Event("scroll", None)) else: self.__move(x, y) - self.__posxy = x, y + self.posxy = x, y self.flushoutput() def prepare(self): @@ -350,7 +350,7 @@ def prepare(self): self.__buffer = [] - self.__posxy = 0, 0 + self.posxy = 0, 0 self.__gone_tall = 0 self.__move = self.__move_short self.__offset = 0 @@ -449,10 +449,12 @@ def getheightwidth(self): """ try: return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) - except KeyError: - height, width = struct.unpack( - "hhhh", ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8) - )[0:2] + except (KeyError, TypeError, ValueError): + try: + size = ioctl(self.input_fd, TIOCGWINSZ, b"\000" * 8) + except OSError: + return 25, 80 + height, width = struct.unpack("hhhh", size)[0:2] if not height: return 25, 80 return height, width @@ -468,7 +470,7 @@ def getheightwidth(self): """ try: return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) - except KeyError: + except (KeyError, TypeError, ValueError): return 25, 80 def forgetinput(self): @@ -559,7 +561,7 @@ def clear(self): self.__write_code(self._clear) self.__gone_tall = 1 self.__move = self.__move_tall - self.__posxy = 0, 0 + self.posxy = 0, 0 self.screen = [] @property @@ -644,8 +646,8 @@ def __write_changed_line(self, y, oldline, newline, px_coord): # if we need to insert a single character right after the first detected change if oldline[x_pos:] == newline[x_pos + 1 :] and self.ich1: if ( - y == self.__posxy[1] - and x_coord > self.__posxy[0] + y == self.posxy[1] + and x_coord > self.posxy[0] and oldline[px_pos:x_pos] == newline[px_pos + 1 : x_pos + 1] ): x_pos = px_pos @@ -654,7 +656,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): self.__move(x_coord, y) self.__write_code(self.ich1) self.__write(newline[x_pos]) - self.__posxy = x_coord + character_width, y + self.posxy = x_coord + character_width, y # if it's a single character change in the middle of the line elif ( @@ -665,7 +667,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): character_width = wlen(newline[x_pos]) self.__move(x_coord, y) self.__write(newline[x_pos]) - self.__posxy = x_coord + character_width, y + self.posxy = x_coord + character_width, y # if this is the last character to fit in the line and we edit in the middle of the line elif ( @@ -677,14 +679,14 @@ def __write_changed_line(self, y, oldline, newline, px_coord): ): self.__hide_cursor() self.__move(self.width - 2, y) - self.__posxy = self.width - 2, y + self.posxy = self.width - 2, y self.__write_code(self.dch1) character_width = wlen(newline[x_pos]) self.__move(x_coord, y) self.__write_code(self.ich1) self.__write(newline[x_pos]) - self.__posxy = character_width + 1, y + self.posxy = character_width + 1, y else: self.__hide_cursor() @@ -692,7 +694,7 @@ def __write_changed_line(self, y, oldline, newline, px_coord): if wlen(oldline) > wlen(newline): self.__write_code(self._el) self.__write(newline[x_pos:]) - self.__posxy = wlen(newline), y + self.posxy = wlen(newline), y if "\x1b" in newline: # ANSI escape characters are present, so we can't assume @@ -711,32 +713,36 @@ def __maybe_write_code(self, fmt, *args): self.__write_code(fmt, *args) def __move_y_cuu1_cud1(self, y): - dy = y - self.__posxy[1] + assert self._cud1 is not None + assert self._cuu1 is not None + dy = y - self.posxy[1] if dy > 0: self.__write_code(dy * self._cud1) elif dy < 0: self.__write_code((-dy) * self._cuu1) def __move_y_cuu_cud(self, y): - dy = y - self.__posxy[1] + dy = y - self.posxy[1] if dy > 0: self.__write_code(self._cud, dy) elif dy < 0: self.__write_code(self._cuu, -dy) def __move_x_hpa(self, x: int) -> None: - if x != self.__posxy[0]: + if x != self.posxy[0]: self.__write_code(self._hpa, x) def __move_x_cub1_cuf1(self, x: int) -> None: - dx = x - self.__posxy[0] + assert self._cuf1 is not None + assert self._cub1 is not None + dx = x - self.posxy[0] if dx > 0: self.__write_code(self._cuf1 * dx) elif dx < 0: self.__write_code(self._cub1 * (-dx)) def __move_x_cub_cuf(self, x: int) -> None: - dx = x - self.__posxy[0] + dx = x - self.posxy[0] if dx > 0: self.__write_code(self._cuf, dx) elif dx < 0: @@ -766,12 +772,12 @@ def __show_cursor(self): def repaint(self): if not self.__gone_tall: - self.__posxy = 0, self.__posxy[1] + self.posxy = 0, self.posxy[1] self.__write("\r") ns = len(self.screen) * ["\000" * self.width] self.screen = ns else: - self.__posxy = 0, self.__offset + self.posxy = 0, self.__offset self.__move(0, self.__offset) ns = self.height * ["\000" * self.width] self.screen = ns diff --git a/Lib/_pyrepl/utils.py b/Lib/_pyrepl/utils.py index 0f36083b6ffa92..4651717bd7e121 100644 --- a/Lib/_pyrepl/utils.py +++ b/Lib/_pyrepl/utils.py @@ -16,7 +16,7 @@ def str_width(c: str) -> int: def wlen(s: str) -> int: - if len(s) == 1: + if len(s) == 1 and s != '\x1a': return str_width(s) length = sum(str_width(i) for i in s) # remove lengths of any escape sequences diff --git a/Lib/_pyrepl/windows_console.py b/Lib/_pyrepl/windows_console.py index e738fd09c65758..e1ecd9845aefb4 100644 --- a/Lib/_pyrepl/windows_console.py +++ b/Lib/_pyrepl/windows_console.py @@ -152,10 +152,10 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: self._hide_cursor() self._move_relative(0, len(self.screen) - 1) self.__write("\n") - self.__posxy = 0, len(self.screen) + self.posxy = 0, len(self.screen) self.screen.append("") - px, py = self.__posxy + px, py = self.posxy old_offset = offset = self.__offset height = self.height @@ -171,7 +171,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: # portion of the window. We need to scroll the visible portion and the # entire history self._scroll(scroll_lines, self._getscrollbacksize()) - self.__posxy = self.__posxy[0], self.__posxy[1] + scroll_lines + self.posxy = self.posxy[0], self.posxy[1] + scroll_lines self.__offset += scroll_lines for i in range(scroll_lines): @@ -197,7 +197,7 @@ def refresh(self, screen: list[str], c_xy: tuple[int, int]) -> None: y = len(newscr) while y < len(oldscr): self._move_relative(0, y) - self.__posxy = 0, y + self.posxy = 0, y self._erase_to_end() y += 1 @@ -254,11 +254,11 @@ def __write_changed_line( if wlen(newline) == self.width: # If we wrapped we want to start at the next line self._move_relative(0, y + 1) - self.__posxy = 0, y + 1 + self.posxy = 0, y + 1 else: - self.__posxy = wlen(newline), y + self.posxy = wlen(newline), y - if "\x1b" in newline or y != self.__posxy[1] or '\x1a' in newline: + if "\x1b" in newline or y != self.posxy[1] or '\x1a' in newline: # ANSI escape characters are present, so we can't assume # anything about the position of the cursor. Moving the cursor # to the left margin should work to get to a known position. @@ -320,7 +320,7 @@ def prepare(self) -> None: self.screen = [] self.height, self.width = self.getheightwidth() - self.__posxy = 0, 0 + self.posxy = 0, 0 self.__gone_tall = 0 self.__offset = 0 @@ -328,9 +328,9 @@ def restore(self) -> None: pass def _move_relative(self, x: int, y: int) -> None: - """Moves relative to the current __posxy""" - dx = x - self.__posxy[0] - dy = y - self.__posxy[1] + """Moves relative to the current posxy""" + dx = x - self.posxy[0] + dy = y - self.posxy[1] if dx < 0: self.__write(MOVE_LEFT.format(-dx)) elif dx > 0: @@ -349,7 +349,7 @@ def move_cursor(self, x: int, y: int) -> None: self.event_queue.insert(0, Event("scroll", "")) else: self._move_relative(x, y) - self.__posxy = x, y + self.posxy = x, y def set_cursor_vis(self, visible: bool) -> None: if visible: @@ -455,7 +455,7 @@ def beep(self) -> None: def clear(self) -> None: """Wipe the screen""" self.__write(CLEAR) - self.__posxy = 0, 0 + self.posxy = 0, 0 self.screen = [""] def finish(self) -> None: diff --git a/Lib/asyncio/__init__.py b/Lib/asyncio/__init__.py index edb615b1b6b1c6..4be7112fa017d4 100644 --- a/Lib/asyncio/__init__.py +++ b/Lib/asyncio/__init__.py @@ -10,6 +10,7 @@ from .events import * from .exceptions import * from .futures import * +from .graph import * from .locks import * from .protocols import * from .runners import * @@ -27,6 +28,7 @@ events.__all__ + exceptions.__all__ + futures.__all__ + + graph.__all__ + locks.__all__ + protocols.__all__ + runners.__all__ + diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 6e6e5aaac15caf..ed852421e44212 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -458,25 +458,18 @@ def create_future(self): """Create a Future object attached to the loop.""" return futures.Future(loop=self) - def create_task(self, coro, *, name=None, context=None): + def create_task(self, coro, **kwargs): """Schedule a coroutine object. Return a task object. """ self._check_closed() - if self._task_factory is None: - task = tasks.Task(coro, loop=self, name=name, context=context) - if task._source_traceback: - del task._source_traceback[-1] - else: - if context is None: - # Use legacy API if context is not needed - task = self._task_factory(self, coro) - else: - task = self._task_factory(self, coro, context=context) - - task.set_name(name) + if self._task_factory is not None: + return self._task_factory(self, coro, **kwargs) + task = tasks.Task(coro, loop=self, **kwargs) + if task._source_traceback: + del task._source_traceback[-1] try: return task finally: @@ -490,9 +483,10 @@ def set_task_factory(self, factory): If factory is None the default task factory will be set. If factory is a callable, it should have a signature matching - '(loop, coro)', where 'loop' will be a reference to the active - event loop, 'coro' will be a coroutine object. The callable - must return a Future. + '(loop, coro, **kwargs)', where 'loop' will be a reference to the active + event loop, 'coro' will be a coroutine object, and **kwargs will be + arbitrary keyword arguments that should be passed on to Task. + The callable must return a Task. """ if factory is not None and not callable(factory): raise TypeError('task factory must be a callable or None') @@ -1593,7 +1587,9 @@ async def create_server( if reuse_address: sock.setsockopt( socket.SOL_SOCKET, socket.SO_REUSEADDR, True) - if reuse_port: + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if reuse_port and af in (socket.AF_INET, socket.AF_INET6): _set_reuseport(sock) if keep_alive: sock.setsockopt( diff --git a/Lib/asyncio/events.py b/Lib/asyncio/events.py index 2ee9870e80f20b..2e45b4fe6fa2dd 100644 --- a/Lib/asyncio/events.py +++ b/Lib/asyncio/events.py @@ -329,7 +329,7 @@ def create_future(self): # Method scheduling a coroutine object: create a task. - def create_task(self, coro, *, name=None, context=None): + def create_task(self, coro, **kwargs): raise NotImplementedError # Methods for interacting with threads. diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index c95fce035cd548..d1df6707302277 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -2,6 +2,7 @@ __all__ = ( 'Future', 'wrap_future', 'isfuture', + 'future_add_to_awaited_by', 'future_discard_from_awaited_by', ) import concurrent.futures @@ -62,10 +63,13 @@ class Future: # that it is not compatible by setting this to None. # - It is set by __iter__() below so that Task.__step() can tell # the difference between - # `await Future()` or`yield from Future()` (correct) vs. + # `await Future()` or `yield from Future()` (correct) vs. # `yield Future()` (incorrect). _asyncio_future_blocking = False + # Used by the capture_call_stack() API. + __asyncio_awaited_by = None + __log_traceback = False def __init__(self, *, loop=None): @@ -115,6 +119,12 @@ def _log_traceback(self, val): raise ValueError('_log_traceback can only be set to False') self.__log_traceback = False + @property + def _asyncio_awaited_by(self): + if self.__asyncio_awaited_by is None: + return None + return frozenset(self.__asyncio_awaited_by) + def get_loop(self): """Return the event loop the Future is bound to.""" loop = self._loop @@ -415,6 +425,49 @@ def wrap_future(future, *, loop=None): return new_future +def future_add_to_awaited_by(fut, waiter, /): + """Record that `fut` is awaited on by `waiter`.""" + # For the sake of keeping the implementation minimal and assuming + # that most of asyncio users use the built-in Futures and Tasks + # (or their subclasses), we only support native Future objects + # and their subclasses. + # + # Longer version: tracking requires storing the caller-callee + # dependency somewhere. One obvious choice is to store that + # information right in the future itself in a dedicated attribute. + # This means that we'd have to require all duck-type compatible + # futures to implement a specific attribute used by asyncio for + # the book keeping. Another solution would be to store that in + # a global dictionary. The downside here is that that would create + # strong references and any scenario where the "add" call isn't + # followed by a "discard" call would lead to a memory leak. + # Using WeakDict would resolve that issue, but would complicate + # the C code (_asynciomodule.c). The bottom line here is that + # it's not clear that all this work would be worth the effort. + # + # Note that there's an accelerated version of this function + # shadowing this implementation later in this file. + if isinstance(fut, _PyFuture) and isinstance(waiter, _PyFuture): + if fut._Future__asyncio_awaited_by is None: + fut._Future__asyncio_awaited_by = set() + fut._Future__asyncio_awaited_by.add(waiter) + + +def future_discard_from_awaited_by(fut, waiter, /): + """Record that `fut` is no longer awaited on by `waiter`.""" + # See the comment in "future_add_to_awaited_by()" body for + # details on implementation. + # + # Note that there's an accelerated version of this function + # shadowing this implementation later in this file. + if isinstance(fut, _PyFuture) and isinstance(waiter, _PyFuture): + if fut._Future__asyncio_awaited_by is not None: + fut._Future__asyncio_awaited_by.discard(waiter) + + +_py_future_add_to_awaited_by = future_add_to_awaited_by +_py_future_discard_from_awaited_by = future_discard_from_awaited_by + try: import _asyncio except ImportError: @@ -422,3 +475,7 @@ def wrap_future(future, *, loop=None): else: # _CFuture is needed for tests. Future = _CFuture = _asyncio.Future + future_add_to_awaited_by = _asyncio.future_add_to_awaited_by + future_discard_from_awaited_by = _asyncio.future_discard_from_awaited_by + _c_future_add_to_awaited_by = future_add_to_awaited_by + _c_future_discard_from_awaited_by = future_discard_from_awaited_by diff --git a/Lib/asyncio/graph.py b/Lib/asyncio/graph.py new file mode 100644 index 00000000000000..d8df7c9919abbf --- /dev/null +++ b/Lib/asyncio/graph.py @@ -0,0 +1,278 @@ +"""Introspection utils for tasks call graphs.""" + +import dataclasses +import sys +import types + +from . import events +from . import futures +from . import tasks + +__all__ = ( + 'capture_call_graph', + 'format_call_graph', + 'print_call_graph', + 'FrameCallGraphEntry', + 'FutureCallGraph', +) + +if False: # for type checkers + from typing import TextIO + +# Sadly, we can't re-use the traceback module's datastructures as those +# are tailored for error reporting, whereas we need to represent an +# async call graph. +# +# Going with pretty verbose names as we'd like to export them to the +# top level asyncio namespace, and want to avoid future name clashes. + + +@dataclasses.dataclass(frozen=True, slots=True) +class FrameCallGraphEntry: + frame: types.FrameType + + +@dataclasses.dataclass(frozen=True, slots=True) +class FutureCallGraph: + future: futures.Future + call_stack: tuple["FrameCallGraphEntry", ...] + awaited_by: tuple["FutureCallGraph", ...] + + +def _build_graph_for_future( + future: futures.Future, + *, + limit: int | None = None, +) -> FutureCallGraph: + if not isinstance(future, futures.Future): + raise TypeError( + f"{future!r} object does not appear to be compatible " + f"with asyncio.Future" + ) + + coro = None + if get_coro := getattr(future, 'get_coro', None): + coro = get_coro() if limit != 0 else None + + st: list[FrameCallGraphEntry] = [] + awaited_by: list[FutureCallGraph] = [] + + while coro is not None: + if hasattr(coro, 'cr_await'): + # A native coroutine or duck-type compatible iterator + st.append(FrameCallGraphEntry(coro.cr_frame)) + coro = coro.cr_await + elif hasattr(coro, 'ag_await'): + # A native async generator or duck-type compatible iterator + st.append(FrameCallGraphEntry(coro.cr_frame)) + coro = coro.ag_await + else: + break + + if future._asyncio_awaited_by: + for parent in future._asyncio_awaited_by: + awaited_by.append(_build_graph_for_future(parent, limit=limit)) + + if limit is not None: + if limit > 0: + st = st[:limit] + elif limit < 0: + st = st[limit:] + st.reverse() + return FutureCallGraph(future, tuple(st), tuple(awaited_by)) + + +def capture_call_graph( + future: futures.Future | None = None, + /, + *, + depth: int = 1, + limit: int | None = None, +) -> FutureCallGraph | None: + """Capture the async call graph for the current task or the provided Future. + + The graph is represented with three data structures: + + * FutureCallGraph(future, call_stack, awaited_by) + + Where 'future' is an instance of asyncio.Future or asyncio.Task. + + 'call_stack' is a tuple of FrameGraphEntry objects. + + 'awaited_by' is a tuple of FutureCallGraph objects. + + * FrameCallGraphEntry(frame) + + Where 'frame' is a frame object of a regular Python function + in the call stack. + + Receives an optional 'future' argument. If not passed, + the current task will be used. If there's no current task, the function + returns None. + + If "capture_call_graph()" is introspecting *the current task*, the + optional keyword-only 'depth' argument can be used to skip the specified + number of frames from top of the stack. + + If the optional keyword-only 'limit' argument is provided, each call stack + in the resulting graph is truncated to include at most ``abs(limit)`` + entries. If 'limit' is positive, the entries left are the closest to + the invocation point. If 'limit' is negative, the topmost entries are + left. If 'limit' is omitted or None, all entries are present. + If 'limit' is 0, the call stack is not captured at all, only + "awaited by" information is present. + """ + + loop = events._get_running_loop() + + if future is not None: + # Check if we're in a context of a running event loop; + # if yes - check if the passed future is the currently + # running task or not. + if loop is None or future is not tasks.current_task(loop=loop): + return _build_graph_for_future(future, limit=limit) + # else: future is the current task, move on. + else: + if loop is None: + raise RuntimeError( + 'capture_call_graph() is called outside of a running ' + 'event loop and no *future* to introspect was provided') + future = tasks.current_task(loop=loop) + + if future is None: + # This isn't a generic call stack introspection utility. If we + # can't determine the current task and none was provided, we + # just return. + return None + + if not isinstance(future, futures.Future): + raise TypeError( + f"{future!r} object does not appear to be compatible " + f"with asyncio.Future" + ) + + call_stack: list[FrameCallGraphEntry] = [] + + f = sys._getframe(depth) if limit != 0 else None + try: + while f is not None: + is_async = f.f_generator is not None + call_stack.append(FrameCallGraphEntry(f)) + + if is_async: + if f.f_back is not None and f.f_back.f_generator is None: + # We've reached the bottom of the coroutine stack, which + # must be the Task that runs it. + break + + f = f.f_back + finally: + del f + + awaited_by = [] + if future._asyncio_awaited_by: + for parent in future._asyncio_awaited_by: + awaited_by.append(_build_graph_for_future(parent, limit=limit)) + + if limit is not None: + limit *= -1 + if limit > 0: + call_stack = call_stack[:limit] + elif limit < 0: + call_stack = call_stack[limit:] + + return FutureCallGraph(future, tuple(call_stack), tuple(awaited_by)) + + +def format_call_graph( + future: futures.Future | None = None, + /, + *, + depth: int = 1, + limit: int | None = None, +) -> str: + """Return the async call graph as a string for `future`. + + If `future` is not provided, format the call graph for the current task. + """ + + def render_level(st: FutureCallGraph, buf: list[str], level: int) -> None: + def add_line(line: str) -> None: + buf.append(level * ' ' + line) + + if isinstance(st.future, tasks.Task): + add_line( + f'* Task(name={st.future.get_name()!r}, id={id(st.future):#x})' + ) + else: + add_line( + f'* Future(id={id(st.future):#x})' + ) + + if st.call_stack: + add_line( + f' + Call stack:' + ) + for ste in st.call_stack: + f = ste.frame + + if f.f_generator is None: + f = ste.frame + add_line( + f' | File {f.f_code.co_filename!r},' + f' line {f.f_lineno}, in' + f' {f.f_code.co_qualname}()' + ) + else: + c = f.f_generator + + try: + f = c.cr_frame + code = c.cr_code + tag = 'async' + except AttributeError: + try: + f = c.ag_frame + code = c.ag_code + tag = 'async generator' + except AttributeError: + f = c.gi_frame + code = c.gi_code + tag = 'generator' + + add_line( + f' | File {f.f_code.co_filename!r},' + f' line {f.f_lineno}, in' + f' {tag} {code.co_qualname}()' + ) + + if st.awaited_by: + add_line( + f' + Awaited by:' + ) + for fut in st.awaited_by: + render_level(fut, buf, level + 1) + + graph = capture_call_graph(future, depth=depth + 1, limit=limit) + if graph is None: + return "" + + buf: list[str] = [] + try: + render_level(graph, buf, 0) + finally: + # 'graph' has references to frames so we should + # make sure it's GC'ed as soon as we don't need it. + del graph + return '\n'.join(buf) + +def print_call_graph( + future: futures.Future | None = None, + /, + *, + file: TextIO | None = None, + depth: int = 1, + limit: int | None = None, +) -> None: + """Print the async call graph for the current task or the provided Future.""" + print(format_call_graph(future, depth=depth, limit=limit), file=file) diff --git a/Lib/asyncio/staggered.py b/Lib/asyncio/staggered.py index 0f4df8855a80b9..2ad65d8648e6c5 100644 --- a/Lib/asyncio/staggered.py +++ b/Lib/asyncio/staggered.py @@ -8,6 +8,7 @@ from . import exceptions as exceptions_mod from . import locks from . import tasks +from . import futures async def staggered_race(coro_fns, delay, *, loop=None): @@ -63,11 +64,32 @@ async def staggered_race(coro_fns, delay, *, loop=None): """ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns. loop = loop or events.get_running_loop() + parent_task = tasks.current_task(loop) enum_coro_fns = enumerate(coro_fns) winner_result = None winner_index = None + unhandled_exceptions = [] exceptions = [] - running_tasks = [] + running_tasks = set() + on_completed_fut = None + + def task_done(task): + running_tasks.discard(task) + futures.future_discard_from_awaited_by(task, parent_task) + if ( + on_completed_fut is not None + and not on_completed_fut.done() + and not running_tasks + ): + on_completed_fut.set_result(None) + + if task.cancelled(): + return + + exc = task.exception() + if exc is None: + return + unhandled_exceptions.append(exc) async def run_one_coro(ok_to_start, previous_failed) -> None: # in eager tasks this waits for the calling task to append this task @@ -91,11 +113,12 @@ async def run_one_coro(ok_to_start, previous_failed) -> None: this_failed = locks.Event() next_ok_to_start = locks.Event() next_task = loop.create_task(run_one_coro(next_ok_to_start, this_failed)) - running_tasks.append(next_task) + futures.future_add_to_awaited_by(next_task, parent_task) + running_tasks.add(next_task) + next_task.add_done_callback(task_done) # next_task has been appended to running_tasks so next_task is ok to # start. next_ok_to_start.set() - assert len(running_tasks) == this_index + 2 # Prepare place to put this coroutine's exceptions if not won exceptions.append(None) assert len(exceptions) == this_index + 1 @@ -120,31 +143,37 @@ async def run_one_coro(ok_to_start, previous_failed) -> None: # up as done() == True, cancelled() == False, exception() == # asyncio.CancelledError. This behavior is specified in # https://bugs.python.org/issue30048 - for i, t in enumerate(running_tasks): - if i != this_index: + current_task = tasks.current_task(loop) + for t in running_tasks: + if t is not current_task: t.cancel() - ok_to_start = locks.Event() - first_task = loop.create_task(run_one_coro(ok_to_start, None)) - running_tasks.append(first_task) - # first_task has been appended to running_tasks so first_task is ok to start. - ok_to_start.set() + propagate_cancellation_error = None try: - # Wait for a growing list of tasks to all finish: poor man's version of - # curio's TaskGroup or trio's nursery - done_count = 0 - while done_count != len(running_tasks): - done, _ = await tasks.wait(running_tasks) - done_count = len(done) + ok_to_start = locks.Event() + first_task = loop.create_task(run_one_coro(ok_to_start, None)) + futures.future_add_to_awaited_by(first_task, parent_task) + running_tasks.add(first_task) + first_task.add_done_callback(task_done) + # first_task has been appended to running_tasks so first_task is ok to start. + ok_to_start.set() + propagate_cancellation_error = None + # Make sure no tasks are left running if we leave this function + while running_tasks: + on_completed_fut = loop.create_future() + try: + await on_completed_fut + except exceptions_mod.CancelledError as ex: + propagate_cancellation_error = ex + for task in running_tasks: + task.cancel(*ex.args) + on_completed_fut = None + if __debug__ and unhandled_exceptions: # If run_one_coro raises an unhandled exception, it's probably a # programming error, and I want to see it. - if __debug__: - for d in done: - if d.done() and not d.cancelled() and d.exception(): - raise d.exception() + raise ExceptionGroup("staggered race failed", unhandled_exceptions) + if propagate_cancellation_error is not None: + raise propagate_cancellation_error return winner_result, winner_index, exceptions finally: - del exceptions - # Make sure no tasks are left running if we leave this function - for t in running_tasks: - t.cancel() + del exceptions, propagate_cancellation_error, unhandled_exceptions, parent_task diff --git a/Lib/asyncio/taskgroups.py b/Lib/asyncio/taskgroups.py index 8af199d6dcc41a..1633478d1c87c2 100644 --- a/Lib/asyncio/taskgroups.py +++ b/Lib/asyncio/taskgroups.py @@ -6,6 +6,7 @@ from . import events from . import exceptions +from . import futures from . import tasks @@ -197,14 +198,14 @@ def create_task(self, coro, *, name=None, context=None): else: task = self._loop.create_task(coro, name=name, context=context) - # optimization: Immediately call the done callback if the task is + futures.future_add_to_awaited_by(task, self._parent_task) + + # Always schedule the done callback even if the task is # already done (e.g. if the coro was able to complete eagerly), - # and skip scheduling a done callback - if task.done(): - self._on_task_done(task) - else: - self._tasks.add(task) - task.add_done_callback(self._on_task_done) + # otherwise if the task completes with an exception then it will cancel + # the current task too early. gh-128550, gh-128588 + self._tasks.add(task) + task.add_done_callback(self._on_task_done) try: return task finally: @@ -230,6 +231,8 @@ def _abort(self): def _on_task_done(self, task): self._tasks.discard(task) + futures.future_discard_from_awaited_by(task, self._parent_task) + if self._on_completed_fut is not None and not self._tasks: if not self._on_completed_fut.done(): self._on_completed_fut.set_result(True) diff --git a/Lib/asyncio/tasks.py b/Lib/asyncio/tasks.py index 2112dd4b99d17f..a25854cc4bd69e 100644 --- a/Lib/asyncio/tasks.py +++ b/Lib/asyncio/tasks.py @@ -322,6 +322,7 @@ def __step_run_and_handle_result(self, exc): self._loop.call_soon( self.__step, new_exc, context=self._context) else: + futures.future_add_to_awaited_by(result, self) result._asyncio_future_blocking = False result.add_done_callback( self.__wakeup, context=self._context) @@ -356,6 +357,7 @@ def __step_run_and_handle_result(self, exc): self = None # Needed to break cycles when an exception occurs. def __wakeup(self, future): + futures.future_discard_from_awaited_by(future, self) try: future.result() except BaseException as exc: @@ -502,6 +504,7 @@ async def _wait(fs, timeout, return_when, loop): if timeout is not None: timeout_handle = loop.call_later(timeout, _release_waiter, waiter) counter = len(fs) + cur_task = current_task() def _on_completion(f): nonlocal counter @@ -514,9 +517,11 @@ def _on_completion(f): timeout_handle.cancel() if not waiter.done(): waiter.set_result(None) + futures.future_discard_from_awaited_by(f, cur_task) for f in fs: f.add_done_callback(_on_completion) + futures.future_add_to_awaited_by(f, cur_task) try: await waiter @@ -802,10 +807,19 @@ def gather(*coros_or_futures, return_exceptions=False): outer.set_result([]) return outer - def _done_callback(fut): + loop = events._get_running_loop() + if loop is not None: + cur_task = current_task(loop) + else: + cur_task = None + + def _done_callback(fut, cur_task=cur_task): nonlocal nfinished nfinished += 1 + if cur_task is not None: + futures.future_discard_from_awaited_by(fut, cur_task) + if outer is None or outer.done(): if not fut.cancelled(): # Mark exception retrieved. @@ -862,7 +876,6 @@ def _done_callback(fut): nfuts = 0 nfinished = 0 done_futs = [] - loop = None outer = None # bpo-46672 for arg in coros_or_futures: if arg not in arg_to_fut: @@ -875,12 +888,13 @@ def _done_callback(fut): # can't control it, disable the "destroy pending task" # warning. fut._log_destroy_pending = False - nfuts += 1 arg_to_fut[arg] = fut if fut.done(): done_futs.append(fut) else: + if cur_task is not None: + futures.future_add_to_awaited_by(fut, cur_task) fut.add_done_callback(_done_callback) else: @@ -940,7 +954,15 @@ def shield(arg): loop = futures._get_loop(inner) outer = loop.create_future() - def _inner_done_callback(inner): + if loop is not None and (cur_task := current_task(loop)) is not None: + futures.future_add_to_awaited_by(inner, cur_task) + else: + cur_task = None + + def _inner_done_callback(inner, cur_task=cur_task): + if cur_task is not None: + futures.future_discard_from_awaited_by(inner, cur_task) + if outer.cancelled(): if not inner.cancelled(): # Mark inner's result as retrieved. diff --git a/Lib/bdb.py b/Lib/bdb.py index 81bba8a130f97c..a741628e32a981 100644 --- a/Lib/bdb.py +++ b/Lib/bdb.py @@ -4,6 +4,7 @@ import sys import os import weakref +from contextlib import contextmanager from inspect import CO_GENERATOR, CO_COROUTINE, CO_ASYNC_GENERATOR __all__ = ["BdbQuit", "Bdb", "Breakpoint"] @@ -65,6 +66,12 @@ def reset(self): self.botframe = None self._set_stopinfo(None, None) + @contextmanager + def set_enterframe(self, frame): + self.enterframe = frame + yield + self.enterframe = None + def trace_dispatch(self, frame, event, arg): """Dispatch a trace function for debugged frames based on the event. @@ -90,28 +97,27 @@ def trace_dispatch(self, frame, event, arg): The arg parameter depends on the previous event. """ - self.enterframe = frame - - if self.quitting: - return # None - if event == 'line': - return self.dispatch_line(frame) - if event == 'call': - return self.dispatch_call(frame, arg) - if event == 'return': - return self.dispatch_return(frame, arg) - if event == 'exception': - return self.dispatch_exception(frame, arg) - if event == 'c_call': - return self.trace_dispatch - if event == 'c_exception': - return self.trace_dispatch - if event == 'c_return': + with self.set_enterframe(frame): + if self.quitting: + return # None + if event == 'line': + return self.dispatch_line(frame) + if event == 'call': + return self.dispatch_call(frame, arg) + if event == 'return': + return self.dispatch_return(frame, arg) + if event == 'exception': + return self.dispatch_exception(frame, arg) + if event == 'c_call': + return self.trace_dispatch + if event == 'c_exception': + return self.trace_dispatch + if event == 'c_return': + return self.trace_dispatch + if event == 'opcode': + return self.dispatch_opcode(frame, arg) + print('bdb.Bdb.dispatch: unknown debugging event:', repr(event)) return self.trace_dispatch - if event == 'opcode': - return self.dispatch_opcode(frame, arg) - print('bdb.Bdb.dispatch: unknown debugging event:', repr(event)) - return self.trace_dispatch def dispatch_line(self, frame): """Invoke user function and return trace function for line event. @@ -395,15 +401,15 @@ def set_trace(self, frame=None): if frame is None: frame = sys._getframe().f_back self.reset() - self.enterframe = frame - while frame: - frame.f_trace = self.trace_dispatch - self.botframe = frame - self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes) - # We need f_trace_lines == True for the debugger to work - frame.f_trace_lines = True - frame = frame.f_back - self.set_stepinstr() + with self.set_enterframe(frame): + while frame: + frame.f_trace = self.trace_dispatch + self.botframe = frame + self.frame_trace_lines_opcodes[frame] = (frame.f_trace_lines, frame.f_trace_opcodes) + # We need f_trace_lines == True for the debugger to work + frame.f_trace_lines = True + frame = frame.f_back + self.set_stepinstr() sys.settrace(self.trace_dispatch) def set_continue(self): diff --git a/Lib/configparser.py b/Lib/configparser.py index 420dce77c234e1..9dc4fa515cfcbe 100644 --- a/Lib/configparser.py +++ b/Lib/configparser.py @@ -1105,11 +1105,7 @@ def _handle_continuation_line(self, st, line, fpname): def _handle_rest(self, st, line, fpname): # a section header or option header? if self._allow_unnamed_section and st.cursect is None: - st.sectname = UNNAMED_SECTION - st.cursect = self._dict() - self._sections[st.sectname] = st.cursect - self._proxies[st.sectname] = SectionProxy(self, st.sectname) - st.elements_added.add(st.sectname) + self._handle_header(st, UNNAMED_SECTION, fpname) st.indent_level = st.cur_indent_level # is it a section header? @@ -1118,10 +1114,10 @@ def _handle_rest(self, st, line, fpname): if not mo and st.cursect is None: raise MissingSectionHeaderError(fpname, st.lineno, line) - self._handle_header(st, mo, fpname) if mo else self._handle_option(st, line, fpname) + self._handle_header(st, mo.group('header'), fpname) if mo else self._handle_option(st, line, fpname) - def _handle_header(self, st, mo, fpname): - st.sectname = mo.group('header') + def _handle_header(self, st, sectname, fpname): + st.sectname = sectname if st.sectname in self._sections: if self._strict and st.sectname in st.elements_added: raise DuplicateSectionError(st.sectname, fpname, diff --git a/Lib/csv.py b/Lib/csv.py index cd202659873811..0a627ba7a512fa 100644 --- a/Lib/csv.py +++ b/Lib/csv.py @@ -63,7 +63,6 @@ class excel: written as two quotes """ -import re import types from _csv import Error, writer, reader, register_dialect, \ unregister_dialect, get_dialect, list_dialects, \ @@ -281,6 +280,7 @@ def _guess_quote_and_delimiter(self, data, delimiters): If there is no quotechar the delimiter can't be determined this way. """ + import re matches = [] for restr in (r'(?P[^\w\n"\'])(?P ?)(?P["\']).*?(?P=quote)(?P=delim)', # ,".*?", diff --git a/Lib/doctest.py b/Lib/doctest.py index bb281fc483c41c..e02e73ed722f7e 100644 --- a/Lib/doctest.py +++ b/Lib/doctest.py @@ -1558,7 +1558,7 @@ def out(s): save_displayhook = sys.displayhook sys.displayhook = sys.__displayhook__ saved_can_colorize = _colorize.can_colorize - _colorize.can_colorize = lambda: False + _colorize.can_colorize = lambda *args, **kwargs: False color_variables = {"PYTHON_COLORS": None, "FORCE_COLOR": None} for key in color_variables: color_variables[key] = os.environ.pop(key, None) diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py index ec2215a5e5f33c..3d845c09d415f6 100644 --- a/Lib/email/_header_value_parser.py +++ b/Lib/email/_header_value_parser.py @@ -95,8 +95,16 @@ NLSET = {'\n', '\r'} SPECIALSNL = SPECIALS | NLSET + +def make_quoted_pairs(value): + """Escape dquote and backslash for use within a quoted-string.""" + return str(value).replace('\\', '\\\\').replace('"', '\\"') + + def quote_string(value): - return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"' + escaped = make_quoted_pairs(value) + return f'"{escaped}"' + # Match a RFC 2047 word, looks like =?utf-8?q?someword?= rfc2047_matcher = re.compile(r''' @@ -2905,6 +2913,15 @@ def _refold_parse_tree(parse_tree, *, policy): if not hasattr(part, 'encode'): # It's not a terminal, try folding the subparts. newparts = list(part) + if part.token_type == 'bare-quoted-string': + # To fold a quoted string we need to create a list of terminal + # tokens that will render the leading and trailing quotes + # and use quoted pairs in the value as appropriate. + newparts = ( + [ValueTerminal('"', 'ptext')] + + [ValueTerminal(make_quoted_pairs(p), 'ptext') + for p in newparts] + + [ValueTerminal('"', 'ptext')]) if not part.as_ew_allowed: wrap_as_ew_blocked += 1 newparts.append(end_ew_not_allowed) diff --git a/Lib/gettext.py b/Lib/gettext.py index a0d81cf846a05c..4c1f9427459b14 100644 --- a/Lib/gettext.py +++ b/Lib/gettext.py @@ -48,7 +48,6 @@ import operator import os -import re import sys @@ -70,22 +69,26 @@ # https://www.gnu.org/software/gettext/manual/gettext.html#Plural-forms # http://git.savannah.gnu.org/cgit/gettext.git/tree/gettext-runtime/intl/plural.y -_token_pattern = re.compile(r""" - (?P[ \t]+) | # spaces and horizontal tabs - (?P[0-9]+\b) | # decimal integer - (?Pn\b) | # only n is allowed - (?P[()]) | - (?P[-*/%+?:]|[>, - # <=, >=, ==, !=, &&, ||, - # ? : - # unary and bitwise ops - # not allowed - (?P\w+|.) # invalid token - """, re.VERBOSE|re.DOTALL) - +_token_pattern = None def _tokenize(plural): - for mo in re.finditer(_token_pattern, plural): + global _token_pattern + if _token_pattern is None: + import re + _token_pattern = re.compile(r""" + (?P[ \t]+) | # spaces and horizontal tabs + (?P[0-9]+\b) | # decimal integer + (?Pn\b) | # only n is allowed + (?P[()]) | + (?P[-*/%+?:]|[>, + # <=, >=, ==, !=, &&, ||, + # ? : + # unary and bitwise ops + # not allowed + (?P\w+|.) # invalid token + """, re.VERBOSE|re.DOTALL) + + for mo in _token_pattern.finditer(plural): kind = mo.lastgroup if kind == 'WHITESPACES': continue diff --git a/Lib/http/cookies.py b/Lib/http/cookies.py index 23d5461f86fc23..694b1b09a0567c 100644 --- a/Lib/http/cookies.py +++ b/Lib/http/cookies.py @@ -264,11 +264,12 @@ class Morsel(dict): "httponly" : "HttpOnly", "version" : "Version", "samesite" : "SameSite", + "partitioned": "Partitioned", } _reserved_defaults = dict.fromkeys(_reserved, "") - _flags = {'secure', 'httponly'} + _flags = {'secure', 'httponly', 'partitioned'} def __init__(self): # Set defaults diff --git a/Lib/idlelib/idle_test/test_configdialog.py b/Lib/idlelib/idle_test/test_configdialog.py index 5099d093382445..2773ed7ce614b5 100644 --- a/Lib/idlelib/idle_test/test_configdialog.py +++ b/Lib/idlelib/idle_test/test_configdialog.py @@ -98,8 +98,8 @@ def test_click_help(self): dialog.buttons['Help'].invoke() title, contents = view.kwds['title'], view.kwds['contents'] self.assertEqual(title, 'Help for IDLE preferences') - self.assertTrue(contents.startswith('When you click') and - contents.endswith('a different name.\n')) + self.assertStartsWith(contents, 'When you click') + self.assertEndsWith(contents,'a different name.\n') class FontPageTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_debugger.py b/Lib/idlelib/idle_test/test_debugger.py index d1c9638dd5d711..9ca3b332648b31 100644 --- a/Lib/idlelib/idle_test/test_debugger.py +++ b/Lib/idlelib/idle_test/test_debugger.py @@ -256,7 +256,7 @@ def test_init(self): flist = None master_window = self.root sv = debugger.StackViewer(master_window, flist, gui) - self.assertTrue(hasattr(sv, 'stack')) + self.assertHasAttr(sv, 'stack') def test_load_stack(self): # Test the .load_stack() method against a fixed test stack. diff --git a/Lib/idlelib/idle_test/test_grep.py b/Lib/idlelib/idle_test/test_grep.py index a0b5b69171879c..d67dba76911fcf 100644 --- a/Lib/idlelib/idle_test/test_grep.py +++ b/Lib/idlelib/idle_test/test_grep.py @@ -143,7 +143,7 @@ def test_found(self): self.assertIn(pat, lines[0]) self.assertIn('py: 1:', lines[1]) # line number 1 self.assertIn('2', lines[3]) # hits found 2 - self.assertTrue(lines[4].startswith('(Hint:')) + self.assertStartsWith(lines[4], '(Hint:') class Default_commandTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_multicall.py b/Lib/idlelib/idle_test/test_multicall.py index b3a3bfb88f9c31..67f28db6b0875c 100644 --- a/Lib/idlelib/idle_test/test_multicall.py +++ b/Lib/idlelib/idle_test/test_multicall.py @@ -27,7 +27,7 @@ def tearDownClass(cls): def test_creator(self): mc = self.mc self.assertIs(multicall._multicall_dict[Text], mc) - self.assertTrue(issubclass(mc, Text)) + self.assertIsSubclass(mc, Text) mc2 = multicall.MultiCallCreator(Text) self.assertIs(mc, mc2) diff --git a/Lib/idlelib/idle_test/test_query.py b/Lib/idlelib/idle_test/test_query.py index bb12b2b08652d5..a6ef858a8c954a 100644 --- a/Lib/idlelib/idle_test/test_query.py +++ b/Lib/idlelib/idle_test/test_query.py @@ -134,10 +134,10 @@ def test_c_source_name(self): def test_good_module_name(self): dialog = self.Dummy_ModuleName('idlelib') - self.assertTrue(dialog.entry_ok().endswith('__init__.py')) + self.assertEndsWith(dialog.entry_ok(), '__init__.py') self.assertEqual(dialog.entry_error['text'], '') dialog = self.Dummy_ModuleName('idlelib.idle') - self.assertTrue(dialog.entry_ok().endswith('idle.py')) + self.assertEndsWith(dialog.entry_ok(), 'idle.py') self.assertEqual(dialog.entry_error['text'], '') @@ -389,7 +389,7 @@ def test_click_module_name(self): self.assertEqual(dialog.text0, 'idlelib') self.assertEqual(dialog.entry.get(), 'idlelib') dialog.button_ok.invoke() - self.assertTrue(dialog.result.endswith('__init__.py')) + self.assertEndsWith(dialog.result, '__init__.py') root.destroy() diff --git a/Lib/idlelib/idle_test/test_redirector.py b/Lib/idlelib/idle_test/test_redirector.py index a97b3002afcf12..bd486d7da66010 100644 --- a/Lib/idlelib/idle_test/test_redirector.py +++ b/Lib/idlelib/idle_test/test_redirector.py @@ -34,7 +34,7 @@ def test_close(self): redir.register('insert', Func) redir.close() self.assertEqual(redir._operations, {}) - self.assertFalse(hasattr(self.text, 'widget')) + self.assertNotHasAttr(self.text, 'widget') class WidgetRedirectorTest(unittest.TestCase): diff --git a/Lib/idlelib/idle_test/test_sidebar.py b/Lib/idlelib/idle_test/test_sidebar.py index 605e7a892570d7..4157a4b4dcdd2a 100644 --- a/Lib/idlelib/idle_test/test_sidebar.py +++ b/Lib/idlelib/idle_test/test_sidebar.py @@ -725,7 +725,7 @@ def test_copy(self): text.tag_add('sel', f'{first_line}.0', 'end-1c') selected_text = text.get('sel.first', 'sel.last') - self.assertTrue(selected_text.startswith('if True:\n')) + self.assertStartsWith(selected_text, 'if True:\n') self.assertIn('\n1\n', selected_text) text.event_generate('<>') @@ -749,7 +749,7 @@ def test_copy_with_prompts(self): text.tag_add('sel', f'{first_line}.3', 'end-1c') selected_text = text.get('sel.first', 'sel.last') - self.assertTrue(selected_text.startswith('True:\n')) + self.assertStartsWith(selected_text, 'True:\n') selected_lines_text = text.get('sel.first linestart', 'sel.last') selected_lines = selected_lines_text.split('\n') diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 697f7c55218a8f..8bcd741c446bd2 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -1244,7 +1244,7 @@ def _path_importer_cache(cls, path): if path == '': try: path = _os.getcwd() - except FileNotFoundError: + except (FileNotFoundError, PermissionError): # Don't cache the failure as the cwd can easily change to # a valid directory later on. return None diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index bb2837d38d83f1..29f01f77eff4a0 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -74,7 +74,7 @@ def __init__(self): import warnings warnings.warn('importlib.abc.ResourceLoader is deprecated in ' 'favour of supporting resource loading through ' - 'importlib.resources.abc.ResourceReader.', + 'importlib.resources.abc.TraversableResources.', DeprecationWarning, stacklevel=2) super().__init__() diff --git a/Lib/importlib/resources/__init__.py b/Lib/importlib/resources/__init__.py index ec4441c9116118..723c9f9eb33ce1 100644 --- a/Lib/importlib/resources/__init__.py +++ b/Lib/importlib/resources/__init__.py @@ -1,4 +1,11 @@ -"""Read resources contained within a package.""" +""" +Read resources contained within a package. + +This codebase is shared between importlib.resources in the stdlib +and importlib_resources in PyPI. See +https://github.com/python/importlib_metadata/wiki/Development-Methodology +for more detail. +""" from ._common import ( as_file, diff --git a/Lib/importlib/resources/_common.py b/Lib/importlib/resources/_common.py index c2c92254370f71..4e9014c45a056e 100644 --- a/Lib/importlib/resources/_common.py +++ b/Lib/importlib/resources/_common.py @@ -66,10 +66,10 @@ def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]: # zipimport.zipimporter does not support weak references, resulting in a # TypeError. That seems terrible. spec = package.__spec__ - reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore + reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore[union-attr] if reader is None: return None - return reader(spec.name) # type: ignore + return reader(spec.name) # type: ignore[union-attr] @functools.singledispatch diff --git a/Lib/importlib/resources/readers.py b/Lib/importlib/resources/readers.py index ccc5abbeb4e56e..70fc7e2b9c0145 100644 --- a/Lib/importlib/resources/readers.py +++ b/Lib/importlib/resources/readers.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import collections import contextlib import itertools @@ -6,6 +8,7 @@ import re import warnings import zipfile +from collections.abc import Iterator from . import abc @@ -135,27 +138,31 @@ class NamespaceReader(abc.TraversableResources): def __init__(self, namespace_path): if 'NamespacePath' not in str(namespace_path): raise ValueError('Invalid path') - self.path = MultiplexedPath(*map(self._resolve, namespace_path)) + self.path = MultiplexedPath(*filter(bool, map(self._resolve, namespace_path))) @classmethod - def _resolve(cls, path_str) -> abc.Traversable: + def _resolve(cls, path_str) -> abc.Traversable | None: r""" Given an item from a namespace path, resolve it to a Traversable. path_str might be a directory on the filesystem or a path to a zipfile plus the path within the zipfile, e.g. ``/foo/bar`` or ``/foo/baz.zip/inner_dir`` or ``foo\baz.zip\inner_dir\sub``. + + path_str might also be a sentinel used by editable packages to + trigger other behaviors (see python/importlib_resources#311). + In that case, return None. """ - (dir,) = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) - return dir + dirs = (cand for cand in cls._candidate_paths(path_str) if cand.is_dir()) + return next(dirs, None) @classmethod - def _candidate_paths(cls, path_str): + def _candidate_paths(cls, path_str: str) -> Iterator[abc.Traversable]: yield pathlib.Path(path_str) yield from cls._resolve_zip_path(path_str) @staticmethod - def _resolve_zip_path(path_str): + def _resolve_zip_path(path_str: str): for match in reversed(list(re.finditer(r'[\\/]', path_str))): with contextlib.suppress( FileNotFoundError, diff --git a/Lib/importlib/resources/simple.py b/Lib/importlib/resources/simple.py index 96f117fec62c10..2e75299b13aabf 100644 --- a/Lib/importlib/resources/simple.py +++ b/Lib/importlib/resources/simple.py @@ -77,7 +77,7 @@ class ResourceHandle(Traversable): def __init__(self, parent: ResourceContainer, name: str): self.parent = parent - self.name = name # type: ignore + self.name = name # type: ignore[misc] def is_file(self): return True diff --git a/Lib/opcode.py b/Lib/opcode.py index 974f4d35e2a524..4ee0d64026bd0a 100644 --- a/Lib/opcode.py +++ b/Lib/opcode.py @@ -17,8 +17,9 @@ EXTENDED_ARG = opmap['EXTENDED_ARG'] opname = ['<%r>' % (op,) for op in range(max(opmap.values()) + 1)] -for op, i in opmap.items(): - opname[i] = op +for m in (opmap, _specialized_opmap): + for op, i in m.items(): + opname[i] = op cmp_op = ('<', '<=', '==', '!=', '>', '>=') @@ -51,6 +52,7 @@ }, "BINARY_OP": { "counter": 1, + "descr": 4, }, "UNPACK_SEQUENCE": { "counter": 1, diff --git a/Lib/optparse.py b/Lib/optparse.py index cbe3451ced8bc3..38cf16d21efffa 100644 --- a/Lib/optparse.py +++ b/Lib/optparse.py @@ -74,7 +74,6 @@ """ import sys, os -import textwrap from gettext import gettext as _, ngettext @@ -252,6 +251,7 @@ def _format_text(self, text): Format a paragraph of free-form text for inclusion in the help output at the current indentation level. """ + import textwrap text_width = max(self.width - self.current_indent, 11) indent = " "*self.current_indent return textwrap.fill(text, @@ -308,6 +308,7 @@ def format_option(self, option): indent_first = 0 result.append(opts) if option.help: + import textwrap help_text = self.expand_default(option) help_lines = textwrap.wrap(help_text, self.help_width) result.append("%*s%s\n" % (indent_first, "", help_lines[0])) diff --git a/Lib/pathlib/_abc.py b/Lib/pathlib/_abc.py index 38bc660e0aeb30..d55cc6f243cf2b 100644 --- a/Lib/pathlib/_abc.py +++ b/Lib/pathlib/_abc.py @@ -12,6 +12,7 @@ """ import functools +import io import operator import posixpath from errno import EINVAL @@ -41,6 +42,40 @@ def _explode_path(path): return path, names +def magic_open(path, mode='r', buffering=-1, encoding=None, errors=None, + newline=None): + """ + Open the file pointed to by this path and return a file object, as + the built-in open() function does. + """ + try: + return io.open(path, mode, buffering, encoding, errors, newline) + except TypeError: + pass + cls = type(path) + text = 'b' not in mode + mode = ''.join(sorted(c for c in mode if c not in 'bt')) + if text: + try: + attr = getattr(cls, f'__open_{mode}__') + except AttributeError: + pass + else: + return attr(path, buffering, encoding, errors, newline) + + try: + attr = getattr(cls, f'__open_{mode}b__') + except AttributeError: + pass + else: + stream = attr(path, buffering) + if text: + stream = io.TextIOWrapper(stream, encoding, errors, newline) + return stream + + raise TypeError(f"{cls.__name__} can't be opened with mode {mode!r}") + + class PathGlobber(_GlobberBase): """ Class providing shell-style globbing for path objects. @@ -58,35 +93,15 @@ def concat_path(path, text): class CopyReader: """ - Class that implements copying between path objects. An instance of this - class is available from the ReadablePath.copy property; it's made callable - so that ReadablePath.copy() can be treated as a method. - - The target path's CopyWriter drives the process from its _create() method. - Files and directories are exchanged by calling methods on the source and - target paths, and metadata is exchanged by calling - source.copy._read_metadata() and target.copy._write_metadata(). + Class that implements the "read" part of copying between path objects. + An instance of this class is available from the ReadablePath._copy_reader + property. """ __slots__ = ('_path',) def __init__(self, path): self._path = path - def __call__(self, target, follow_symlinks=True, dirs_exist_ok=False, - preserve_metadata=False): - """ - Recursively copy this file or directory tree to the given destination. - """ - if not isinstance(target, ReadablePath): - target = self._path.with_segments(target) - - # Delegate to the target path's CopyWriter object. - try: - create = target.copy._create - except AttributeError: - raise TypeError(f"Target is not writable: {target}") from None - return create(self._path, follow_symlinks, dirs_exist_ok, preserve_metadata) - _readable_metakeys = frozenset() def _read_metadata(self, metakeys, *, follow_symlinks=True): @@ -96,8 +111,16 @@ def _read_metadata(self, metakeys, *, follow_symlinks=True): raise NotImplementedError -class CopyWriter(CopyReader): - __slots__ = () +class CopyWriter: + """ + Class that implements the "write" part of copying between path objects. An + instance of this class is available from the WritablePath._copy_writer + property. + """ + __slots__ = ('_path',) + + def __init__(self, path): + self._path = path _writable_metakeys = frozenset() @@ -110,7 +133,7 @@ def _write_metadata(self, metadata, *, follow_symlinks=True): def _create(self, source, follow_symlinks, dirs_exist_ok, preserve_metadata): self._ensure_distinct_path(source) if preserve_metadata: - metakeys = self._writable_metakeys & source.copy._readable_metakeys + metakeys = self._writable_metakeys & source._copy_reader._readable_metakeys else: metakeys = None if not follow_symlinks and source.is_symlink(): @@ -128,22 +151,22 @@ def _create_dir(self, source, metakeys, follow_symlinks, dirs_exist_ok): for src in children: dst = self._path.joinpath(src.name) if not follow_symlinks and src.is_symlink(): - dst.copy._create_symlink(src, metakeys) + dst._copy_writer._create_symlink(src, metakeys) elif src.is_dir(): - dst.copy._create_dir(src, metakeys, follow_symlinks, dirs_exist_ok) + dst._copy_writer._create_dir(src, metakeys, follow_symlinks, dirs_exist_ok) else: - dst.copy._create_file(src, metakeys) + dst._copy_writer._create_file(src, metakeys) if metakeys: - metadata = source.copy._read_metadata(metakeys) + metadata = source._copy_reader._read_metadata(metakeys) if metadata: self._write_metadata(metadata) def _create_file(self, source, metakeys): """Copy the given file to our path.""" self._ensure_different_file(source) - with source.open('rb') as source_f: + with magic_open(source, 'rb') as source_f: try: - with self._path.open('wb') as target_f: + with magic_open(self._path, 'wb') as target_f: copyfileobj(source_f, target_f) except IsADirectoryError as e: if not self._path.exists(): @@ -152,7 +175,7 @@ def _create_file(self, source, metakeys): f'Directory does not exist: {self._path}') from e raise if metakeys: - metadata = source.copy._read_metadata(metakeys) + metadata = source._copy_reader._read_metadata(metakeys) if metadata: self._write_metadata(metadata) @@ -160,7 +183,7 @@ def _create_symlink(self, source, metakeys): """Copy the given symbolic link to our path.""" self._path.symlink_to(source.readlink()) if metakeys: - metadata = source.copy._read_metadata(metakeys, follow_symlinks=False) + metadata = source._copy_reader._read_metadata(metakeys, follow_symlinks=False) if metadata: self._write_metadata(metadata, follow_symlinks=False) @@ -420,11 +443,10 @@ def is_symlink(self): """ raise NotImplementedError - def open(self, mode='r', buffering=-1, encoding=None, - errors=None, newline=None): + def __open_rb__(self, buffering=-1): """ - Open the file pointed to by this path and return a file object, as - the built-in open() function does. + Open the file pointed to by this path for reading in binary mode and + return a file object, like open(mode='rb'). """ raise NotImplementedError @@ -432,14 +454,14 @@ def read_bytes(self): """ Open the file in bytes mode, read it, and close the file. """ - with self.open(mode='rb', buffering=0) as f: + with magic_open(self, mode='rb', buffering=0) as f: return f.read() def read_text(self, encoding=None, errors=None, newline=None): """ Open the file in text mode, read it, and close the file. """ - with self.open(mode='r', encoding=encoding, errors=errors, newline=newline) as f: + with magic_open(self, mode='r', encoding=encoding, errors=errors, newline=newline) as f: return f.read() def _scandir(self): @@ -532,7 +554,22 @@ def readlink(self): """ raise NotImplementedError - copy = property(CopyReader, doc=CopyReader.__call__.__doc__) + _copy_reader = property(CopyReader) + + def copy(self, target, follow_symlinks=True, dirs_exist_ok=False, + preserve_metadata=False): + """ + Recursively copy this file or directory tree to the given destination. + """ + if not hasattr(target, '_copy_writer'): + target = self.with_segments(target) + + # Delegate to the target path's CopyWriter object. + try: + create = target._copy_writer._create + except AttributeError: + raise TypeError(f"Target is not writable: {target}") from None + return create(self, follow_symlinks, dirs_exist_ok, preserve_metadata) def copy_into(self, target_dir, *, follow_symlinks=True, dirs_exist_ok=False, preserve_metadata=False): @@ -542,7 +579,7 @@ def copy_into(self, target_dir, *, follow_symlinks=True, name = self.name if not name: raise ValueError(f"{self!r} has an empty name") - elif isinstance(target_dir, ReadablePath): + elif hasattr(target_dir, '_copy_writer'): target = target_dir / name else: target = self.with_segments(target_dir, name) @@ -551,7 +588,7 @@ def copy_into(self, target_dir, *, follow_symlinks=True, preserve_metadata=preserve_metadata) -class WritablePath(ReadablePath): +class WritablePath(JoinablePath): __slots__ = () def symlink_to(self, target, target_is_directory=False): @@ -567,13 +604,20 @@ def mkdir(self, mode=0o777, parents=False, exist_ok=False): """ raise NotImplementedError + def __open_wb__(self, buffering=-1): + """ + Open the file pointed to by this path for writing in binary mode and + return a file object, like open(mode='wb'). + """ + raise NotImplementedError + def write_bytes(self, data): """ Open the file in bytes mode, write to it, and close the file. """ # type-check for the buffer interface before truncating the file view = memoryview(data) - with self.open(mode='wb') as f: + with magic_open(self, mode='wb') as f: return f.write(view) def write_text(self, data, encoding=None, errors=None, newline=None): @@ -583,7 +627,7 @@ def write_text(self, data, encoding=None, errors=None, newline=None): if not isinstance(data, str): raise TypeError('data must be str, not %s' % data.__class__.__name__) - with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: + with magic_open(self, mode='w', encoding=encoding, errors=errors, newline=newline) as f: return f.write(data) - copy = property(CopyWriter, doc=CopyWriter.__call__.__doc__) + _copy_writer = property(CopyWriter) diff --git a/Lib/pathlib/_local.py b/Lib/pathlib/_local.py index d6afb31424265c..2b42f3c22254b8 100644 --- a/Lib/pathlib/_local.py +++ b/Lib/pathlib/_local.py @@ -20,7 +20,7 @@ grp = None from pathlib._os import copyfile -from pathlib._abc import CopyWriter, JoinablePath, WritablePath +from pathlib._abc import CopyReader, CopyWriter, JoinablePath, ReadablePath, WritablePath __all__ = [ @@ -65,9 +65,10 @@ def __repr__(self): return "<{}.parents>".format(type(self._path).__name__) -class _LocalCopyWriter(CopyWriter): - """This object implements the Path.copy callable. Don't try to construct - it yourself.""" +class _LocalCopyReader(CopyReader): + """This object implements the "read" part of copying local paths. Don't + try to construct it yourself. + """ __slots__ = () _readable_metakeys = {'mode', 'times_ns'} @@ -75,7 +76,7 @@ class _LocalCopyWriter(CopyWriter): _readable_metakeys.add('flags') if hasattr(os, 'listxattr'): _readable_metakeys.add('xattrs') - _readable_metakeys = _writable_metakeys = frozenset(_readable_metakeys) + _readable_metakeys = frozenset(_readable_metakeys) def _read_metadata(self, metakeys, *, follow_symlinks=True): metadata = {} @@ -97,6 +98,15 @@ def _read_metadata(self, metakeys, *, follow_symlinks=True): raise return metadata + +class _LocalCopyWriter(CopyWriter): + """This object implements the "write" part of copying local paths. Don't + try to construct it yourself. + """ + __slots__ = () + + _writable_metakeys = _LocalCopyReader._readable_metakeys + def _write_metadata(self, metadata, *, follow_symlinks=True): def _nop(*args, ns=None, follow_symlinks=None): pass @@ -171,7 +181,7 @@ def _create_symlink(self, source, metakeys): """Copy the given symlink to the given target.""" self._path.symlink_to(source.readlink(), source.is_dir()) if metakeys: - metadata = source.copy._read_metadata(metakeys, follow_symlinks=False) + metadata = source._copy_reader._read_metadata(metakeys, follow_symlinks=False) if metadata: self._write_metadata(metadata, follow_symlinks=False) @@ -683,7 +693,7 @@ class PureWindowsPath(PurePath): __slots__ = () -class Path(WritablePath, PurePath): +class Path(WritablePath, ReadablePath, PurePath): """PurePath subclass that can make system calls. Path represents a filesystem path but unlike PurePath, also offers @@ -823,6 +833,13 @@ def open(self, mode='r', buffering=-1, encoding=None, encoding = io.text_encoding(encoding) return io.open(self, mode, buffering, encoding, errors, newline) + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb', buffering=0) as f: + return f.read() + def read_text(self, encoding=None, errors=None, newline=None): """ Open the file in text mode, read it, and close the file. @@ -830,7 +847,17 @@ def read_text(self, encoding=None, errors=None, newline=None): # Call io.text_encoding() here to ensure any warning is raised at an # appropriate stack level. encoding = io.text_encoding(encoding) - return super().read_text(encoding, errors, newline) + with self.open(mode='r', encoding=encoding, errors=errors, newline=newline) as f: + return f.read() + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + # type-check for the buffer interface before truncating the file + view = memoryview(data) + with self.open(mode='wb') as f: + return f.write(view) def write_text(self, data, encoding=None, errors=None, newline=None): """ @@ -839,7 +866,11 @@ def write_text(self, data, encoding=None, errors=None, newline=None): # Call io.text_encoding() here to ensure any warning is raised at an # appropriate stack level. encoding = io.text_encoding(encoding) - return super().write_text(data, encoding, errors, newline) + if not isinstance(data, str): + raise TypeError('data must be str, not %s' % + data.__class__.__name__) + with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: + return f.write(data) _remove_leading_dot = operator.itemgetter(slice(2, None)) _remove_trailing_slash = operator.itemgetter(slice(-1)) @@ -1122,7 +1153,8 @@ def replace(self, target): os.replace(self, target) return self.with_segments(target) - copy = property(_LocalCopyWriter, doc=_LocalCopyWriter.__call__.__doc__) + _copy_reader = property(_LocalCopyReader) + _copy_writer = property(_LocalCopyWriter) def move(self, target): """ @@ -1134,9 +1166,9 @@ def move(self, target): except TypeError: pass else: - if not isinstance(target, WritablePath): + if not hasattr(target, '_copy_writer'): target = self.with_segments(target_str) - target.copy._ensure_different_file(self) + target._copy_writer._ensure_different_file(self) try: os.replace(self, target_str) return target @@ -1155,7 +1187,7 @@ def move_into(self, target_dir): name = self.name if not name: raise ValueError(f"{self!r} has an empty name") - elif isinstance(target_dir, WritablePath): + elif hasattr(target_dir, '_copy_writer'): target = target_dir / name else: target = self.with_segments(target_dir, name) diff --git a/Lib/pdb.py b/Lib/pdb.py index 10d1923cdad2d6..beef74d792250b 100644 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -1725,6 +1725,19 @@ def do_quit(self, arg): Quit from the debugger. The program being executed is aborted. """ + if self.mode == 'inline': + while True: + try: + reply = input('Quitting pdb will kill the process. Quit anyway? [y/n] ') + reply = reply.lower().strip() + except EOFError: + reply = 'y' + self.message('') + if reply == 'y' or reply == '': + sys.exit(0) + elif reply.lower() == 'n': + return + self._user_requested_quit = True self.set_quit() return 1 @@ -1738,9 +1751,7 @@ def do_EOF(self, arg): Handles the receipt of EOF as a command. """ self.message('') - self._user_requested_quit = True - self.set_quit() - return 1 + return self.do_quit(arg) def do_args(self, arg): """a(rgs) diff --git a/Lib/pstats.py b/Lib/pstats.py index 46e18fb7592a77..becaf35580eaee 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -29,7 +29,6 @@ from enum import StrEnum, _simple_enum from functools import cmp_to_key from dataclasses import dataclass -from typing import Dict __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] @@ -69,7 +68,7 @@ class FunctionProfile: class StatsProfile: '''Class for keeping track of an item in inventory.''' total_tt: float - func_profiles: Dict[str, FunctionProfile] + func_profiles: dict[str, FunctionProfile] class Stats: """This class is used for creating reports from data generated by the diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 9e84292aaf825f..922946e5fa7ddb 100644 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1435,7 +1435,8 @@ def makename(c, m=object.__module__): # List the built-in subclasses, if any: subclasses = sorted( (str(cls.__name__) for cls in type.__subclasses__(object) - if not cls.__name__.startswith("_") and cls.__module__ == "builtins"), + if (not cls.__name__.startswith("_") and + getattr(cls, '__module__', '') == "builtins")), key=str.lower ) no_of_subclasses = len(subclasses) diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 97fed45d5a998c..3e8c7ce321edc2 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,17485 +1,12781 @@ -# -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Tue Jan 14 13:41:56 2025 +# Autogenerated by Sphinx on Tue Jan 21 03:33:33 2025 # as part of the release process. -topics = {'assert': 'The "assert" statement\n' - '**********************\n' - '\n' - 'Assert statements are a convenient way to insert debugging ' - 'assertions\n' - 'into a program:\n' - '\n' - ' assert_stmt ::= "assert" expression ["," expression]\n' - '\n' - 'The simple form, "assert expression", is equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression: raise AssertionError\n' - '\n' - 'The extended form, "assert expression1, expression2", is ' - 'equivalent to\n' - '\n' - ' if __debug__:\n' - ' if not expression1: raise AssertionError(expression2)\n' - '\n' - 'These equivalences assume that "__debug__" and "AssertionError" ' - 'refer\n' - 'to the built-in variables with those names. In the current\n' - 'implementation, the built-in variable "__debug__" is "True" under\n' - 'normal circumstances, "False" when optimization is requested ' - '(command\n' - 'line option "-O"). The current code generator emits no code for ' - 'an\n' - '"assert" statement when optimization is requested at compile ' - 'time.\n' - 'Note that it is unnecessary to include the source code for the\n' - 'expression that failed in the error message; it will be displayed ' - 'as\n' - 'part of the stack trace.\n' - '\n' - 'Assignments to "__debug__" are illegal. The value for the ' - 'built-in\n' - 'variable is determined when the interpreter starts.\n', - 'assignment': 'Assignment statements\n' - '*********************\n' - '\n' - 'Assignment statements are used to (re)bind names to values and ' - 'to\n' - 'modify attributes or items of mutable objects:\n' - '\n' - ' assignment_stmt ::= (target_list "=")+ (starred_expression ' - '| yield_expression)\n' - ' target_list ::= target ("," target)* [","]\n' - ' target ::= identifier\n' - ' | "(" [target_list] ")"\n' - ' | "[" [target_list] "]"\n' - ' | attributeref\n' - ' | subscription\n' - ' | slicing\n' - ' | "*" target\n' - '\n' - '(See section Primaries for the syntax definitions for ' - '*attributeref*,\n' - '*subscription*, and *slicing*.)\n' - '\n' - 'An assignment statement evaluates the expression list ' - '(remember that\n' - 'this can be a single expression or a comma-separated list, the ' - 'latter\n' - 'yielding a tuple) and assigns the single resulting object to ' - 'each of\n' - 'the target lists, from left to right.\n' - '\n' - 'Assignment is defined recursively depending on the form of the ' - 'target\n' - '(list). When a target is part of a mutable object (an ' - 'attribute\n' - 'reference, subscription or slicing), the mutable object must\n' - 'ultimately perform the assignment and decide about its ' - 'validity, and\n' - 'may raise an exception if the assignment is unacceptable. The ' - 'rules\n' - 'observed by various types and the exceptions raised are given ' - 'with the\n' - 'definition of the object types (see section The standard type\n' - 'hierarchy).\n' - '\n' - 'Assignment of an object to a target list, optionally enclosed ' - 'in\n' - 'parentheses or square brackets, is recursively defined as ' - 'follows.\n' - '\n' - '* If the target list is a single target with no trailing ' - 'comma,\n' - ' optionally in parentheses, the object is assigned to that ' - 'target.\n' - '\n' - '* Else:\n' - '\n' - ' * If the target list contains one target prefixed with an ' - 'asterisk,\n' - ' called a “starred” target: The object must be an iterable ' - 'with at\n' - ' least as many items as there are targets in the target ' - 'list, minus\n' - ' one. The first items of the iterable are assigned, from ' - 'left to\n' - ' right, to the targets before the starred target. The ' - 'final items\n' - ' of the iterable are assigned to the targets after the ' - 'starred\n' - ' target. A list of the remaining items in the iterable is ' - 'then\n' - ' assigned to the starred target (the list can be empty).\n' - '\n' - ' * Else: The object must be an iterable with the same number ' - 'of items\n' - ' as there are targets in the target list, and the items ' - 'are\n' - ' assigned, from left to right, to the corresponding ' - 'targets.\n' - '\n' - 'Assignment of an object to a single target is recursively ' - 'defined as\n' - 'follows.\n' - '\n' - '* If the target is an identifier (name):\n' - '\n' - ' * If the name does not occur in a "global" or "nonlocal" ' - 'statement\n' - ' in the current code block: the name is bound to the object ' - 'in the\n' - ' current local namespace.\n' - '\n' - ' * Otherwise: the name is bound to the object in the global ' - 'namespace\n' - ' or the outer namespace determined by "nonlocal", ' - 'respectively.\n' - '\n' - ' The name is rebound if it was already bound. This may cause ' - 'the\n' - ' reference count for the object previously bound to the name ' - 'to reach\n' - ' zero, causing the object to be deallocated and its ' - 'destructor (if it\n' - ' has one) to be called.\n' - '\n' - '* If the target is an attribute reference: The primary ' - 'expression in\n' - ' the reference is evaluated. It should yield an object with\n' - ' assignable attributes; if this is not the case, "TypeError" ' - 'is\n' - ' raised. That object is then asked to assign the assigned ' - 'object to\n' - ' the given attribute; if it cannot perform the assignment, it ' - 'raises\n' - ' an exception (usually but not necessarily ' - '"AttributeError").\n' - '\n' - ' Note: If the object is a class instance and the attribute ' - 'reference\n' - ' occurs on both sides of the assignment operator, the ' - 'right-hand side\n' - ' expression, "a.x" can access either an instance attribute or ' - '(if no\n' - ' instance attribute exists) a class attribute. The left-hand ' - 'side\n' - ' target "a.x" is always set as an instance attribute, ' - 'creating it if\n' - ' necessary. Thus, the two occurrences of "a.x" do not ' - 'necessarily\n' - ' refer to the same attribute: if the right-hand side ' - 'expression\n' - ' refers to a class attribute, the left-hand side creates a ' - 'new\n' - ' instance attribute as the target of the assignment:\n' - '\n' - ' class Cls:\n' - ' x = 3 # class variable\n' - ' inst = Cls()\n' - ' inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x ' - 'as 3\n' - '\n' - ' This description does not necessarily apply to descriptor\n' - ' attributes, such as properties created with "property()".\n' - '\n' - '* If the target is a subscription: The primary expression in ' - 'the\n' - ' reference is evaluated. It should yield either a mutable ' - 'sequence\n' - ' object (such as a list) or a mapping object (such as a ' - 'dictionary).\n' - ' Next, the subscript expression is evaluated.\n' - '\n' - ' If the primary is a mutable sequence object (such as a ' - 'list), the\n' - ' subscript must yield an integer. If it is negative, the ' - 'sequence’s\n' - ' length is added to it. The resulting value must be a ' - 'nonnegative\n' - ' integer less than the sequence’s length, and the sequence is ' - 'asked\n' - ' to assign the assigned object to its item with that index. ' - 'If the\n' - ' index is out of range, "IndexError" is raised (assignment to ' - 'a\n' - ' subscripted sequence cannot add new items to a list).\n' - '\n' - ' If the primary is a mapping object (such as a dictionary), ' - 'the\n' - ' subscript must have a type compatible with the mapping’s key ' - 'type,\n' - ' and the mapping is then asked to create a key/value pair ' - 'which maps\n' - ' the subscript to the assigned object. This can either ' - 'replace an\n' - ' existing key/value pair with the same key value, or insert a ' - 'new\n' - ' key/value pair (if no key with the same value existed).\n' - '\n' - ' For user-defined objects, the "__setitem__()" method is ' - 'called with\n' - ' appropriate arguments.\n' - '\n' - '* If the target is a slicing: The primary expression in the ' - 'reference\n' - ' is evaluated. It should yield a mutable sequence object ' - '(such as a\n' - ' list). The assigned object should be a sequence object of ' - 'the same\n' - ' type. Next, the lower and upper bound expressions are ' - 'evaluated,\n' - ' insofar they are present; defaults are zero and the ' - 'sequence’s\n' - ' length. The bounds should evaluate to integers. If either ' - 'bound is\n' - ' negative, the sequence’s length is added to it. The ' - 'resulting\n' - ' bounds are clipped to lie between zero and the sequence’s ' - 'length,\n' - ' inclusive. Finally, the sequence object is asked to replace ' - 'the\n' - ' slice with the items of the assigned sequence. The length ' - 'of the\n' - ' slice may be different from the length of the assigned ' - 'sequence,\n' - ' thus changing the length of the target sequence, if the ' - 'target\n' - ' sequence allows it.\n' - '\n' - '**CPython implementation detail:** In the current ' - 'implementation, the\n' - 'syntax for targets is taken to be the same as for expressions, ' - 'and\n' - 'invalid syntax is rejected during the code generation phase, ' - 'causing\n' - 'less detailed error messages.\n' - '\n' - 'Although the definition of assignment implies that overlaps ' - 'between\n' - 'the left-hand side and the right-hand side are ‘simultaneous’ ' - '(for\n' - 'example "a, b = b, a" swaps two variables), overlaps *within* ' - 'the\n' - 'collection of assigned-to variables occur left-to-right, ' - 'sometimes\n' - 'resulting in confusion. For instance, the following program ' - 'prints\n' - '"[0, 2]":\n' - '\n' - ' x = [0, 1]\n' - ' i = 0\n' - ' i, x[i] = 1, 2 # i is updated, then x[i] is ' - 'updated\n' - ' print(x)\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3132** - Extended Iterable Unpacking\n' - ' The specification for the "*target" feature.\n' - '\n' - '\n' - 'Augmented assignment statements\n' - '===============================\n' - '\n' - 'Augmented assignment is the combination, in a single ' - 'statement, of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment statement like "x += 1" can be ' - 'rewritten as "x\n' - '= x + 1" to achieve a similar, but not exactly equal effect. ' - 'In the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the ' - 'old object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to ' - '"a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n' - '\n' - '\n' - 'Annotated assignment statements\n' - '===============================\n' - '\n' - '*Annotation* assignment is the combination, in a single ' - 'statement, of\n' - 'a variable or attribute annotation and an optional assignment\n' - 'statement:\n' - '\n' - ' annotated_assignment_stmt ::= augtarget ":" expression\n' - ' ["=" (starred_expression | ' - 'yield_expression)]\n' - '\n' - 'The difference from normal Assignment statements is that only ' - 'a single\n' - 'target is allowed.\n' - '\n' - 'The assignment target is considered “simple” if it consists of ' - 'a\n' - 'single name that is not enclosed in parentheses. For simple ' - 'assignment\n' - 'targets, if in class or module scope, the annotations are ' - 'gathered in\n' - 'a lazily evaluated annotation scope. The annotations can be ' - 'evaluated\n' - 'using the "__annotations__" attribute of a class or module, or ' - 'using\n' - 'the facilities in the "annotationlib" module.\n' - '\n' - 'If the assignment target is not simple (an attribute, ' - 'subscript node,\n' - 'or parenthesized name), the annotation is never evaluated.\n' - '\n' - 'If a name is annotated in a function scope, then this name is ' - 'local\n' - 'for that scope. Annotations are never evaluated and stored in ' - 'function\n' - 'scopes.\n' - '\n' - 'If the right hand side is present, an annotated assignment ' - 'performs\n' - 'the actual assignment as if there was no annotation present. ' - 'If the\n' - 'right hand side is not present for an expression target, then ' - 'the\n' - 'interpreter evaluates the target except for the last ' - '"__setitem__()"\n' - 'or "__setattr__()" call.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' The proposal that added syntax for annotating the types ' - 'of\n' - ' variables (including class variables and instance ' - 'variables),\n' - ' instead of expressing them through comments.\n' - '\n' - ' **PEP 484** - Type hints\n' - ' The proposal that added the "typing" module to provide a ' - 'standard\n' - ' syntax for type annotations that can be used in static ' - 'analysis\n' - ' tools and IDEs.\n' - '\n' - 'Changed in version 3.8: Now annotated assignments allow the ' - 'same\n' - 'expressions in the right hand side as regular assignments. ' - 'Previously,\n' - 'some expressions (like un-parenthesized tuple expressions) ' - 'caused a\n' - 'syntax error.\n' - '\n' - 'Changed in version 3.14: Annotations are now lazily evaluated ' - 'in a\n' - 'separate annotation scope. If the assignment target is not ' - 'simple,\n' - 'annotations are never evaluated.\n', - 'assignment-expressions': 'Assignment expressions\n' - '**********************\n' - '\n' - ' assignment_expression ::= [identifier ":="] ' - 'expression\n' - '\n' - 'An assignment expression (sometimes also called a ' - '“named expression”\n' - 'or “walrus”) assigns an "expression" to an ' - '"identifier", while also\n' - 'returning the value of the "expression".\n' - '\n' - 'One common use case is when handling matched ' - 'regular expressions:\n' - '\n' - ' if matching := pattern.search(data):\n' - ' do_something(matching)\n' - '\n' - 'Or, when processing a file stream in chunks:\n' - '\n' - ' while chunk := file.read(9000):\n' - ' process(chunk)\n' - '\n' - 'Assignment expressions must be surrounded by ' - 'parentheses when used as\n' - 'expression statements and when used as ' - 'sub-expressions in slicing,\n' - 'conditional, lambda, keyword-argument, and ' - 'comprehension-if\n' - 'expressions and in "assert", "with", and ' - '"assignment" statements. In\n' - 'all other places where they can be used, ' - 'parentheses are not required,\n' - 'including in "if" and "while" statements.\n' - '\n' - 'Added in version 3.8: See **PEP 572** for more ' - 'details about\n' - 'assignment expressions.\n', - 'async': 'Coroutines\n' - '**********\n' - '\n' - 'Added in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '=============================\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '=========================\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside the ' - 'body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '==========================\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone concept ' - 'in\n' - ' Python, and added supporting syntax.\n', - 'atom-identifiers': 'Identifiers (Names)\n' - '*******************\n' - '\n' - 'An identifier occurring as an atom is a name. See ' - 'section Identifiers\n' - 'and keywords for lexical definition and section Naming ' - 'and binding for\n' - 'documentation of naming and binding.\n' - '\n' - 'When the name is bound to an object, evaluation of the ' - 'atom yields\n' - 'that object. When a name is not bound, an attempt to ' - 'evaluate it\n' - 'raises a "NameError" exception.\n' - '\n' - '\n' - 'Private name mangling\n' - '=====================\n' - '\n' - 'When an identifier that textually occurs in a class ' - 'definition begins\n' - 'with two or more underscore characters and does not end ' - 'in two or more\n' - 'underscores, it is considered a *private name* of that ' - 'class.\n' - '\n' - 'See also: The class specifications.\n' - '\n' - 'More precisely, private names are transformed to a ' - 'longer form before\n' - 'code is generated for them. If the transformed name is ' - 'longer than\n' - '255 characters, implementation-defined truncation may ' - 'happen.\n' - '\n' - 'The transformation is independent of the syntactical ' - 'context in which\n' - 'the identifier is used but only the following private ' - 'identifiers are\n' - 'mangled:\n' - '\n' - '* Any name used as the name of a variable that is ' - 'assigned or read or\n' - ' any name of an attribute being accessed.\n' - '\n' - ' The "__name__" attribute of nested functions, classes, ' - 'and type\n' - ' aliases is however not mangled.\n' - '\n' - '* The name of imported modules, e.g., "__spam" in ' - '"import __spam". If\n' - ' the module is part of a package (i.e., its name ' - 'contains a dot), the\n' - ' name is *not* mangled, e.g., the "__foo" in "import ' - '__foo.bar" is\n' - ' not mangled.\n' - '\n' - '* The name of an imported member, e.g., "__f" in "from ' - 'spam import\n' - ' __f".\n' - '\n' - 'The transformation rule is defined as follows:\n' - '\n' - '* The class name, with leading underscores removed and a ' - 'single\n' - ' leading underscore inserted, is inserted in front of ' - 'the identifier,\n' - ' e.g., the identifier "__spam" occurring in a class ' - 'named "Foo",\n' - ' "_Foo" or "__Foo" is transformed to "_Foo__spam".\n' - '\n' - '* If the class name consists only of underscores, the ' - 'transformation\n' - ' is the identity, e.g., the identifier "__spam" ' - 'occurring in a class\n' - ' named "_" or "__" is left as is.\n', - 'atom-literals': 'Literals\n' - '********\n' - '\n' - 'Python supports string and bytes literals and various ' - 'numeric\n' - 'literals:\n' - '\n' - ' literal ::= stringliteral | bytesliteral\n' - ' | integer | floatnumber | imagnumber\n' - '\n' - 'Evaluation of a literal yields an object of the given type ' - '(string,\n' - 'bytes, integer, floating-point number, complex number) with ' - 'the given\n' - 'value. The value may be approximated in the case of ' - 'floating-point\n' - 'and imaginary (complex) literals. See section Literals for ' - 'details.\n' - '\n' - 'All literals correspond to immutable data types, and hence ' - 'the\n' - 'object’s identity is less important than its value. ' - 'Multiple\n' - 'evaluations of literals with the same value (either the ' - 'same\n' - 'occurrence in the program text or a different occurrence) ' - 'may obtain\n' - 'the same object or a different object with the same ' - 'value.\n', - 'attribute-access': 'Customizing attribute access\n' - '****************************\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with ' - 'an\n' - ' "AttributeError" (either "__getattribute__()" raises ' - 'an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or ' - '"__get__()" of a *name*\n' - ' property raises "AttributeError"). This method ' - 'should either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception. The "object" class itself does not provide ' - 'this method.\n' - '\n' - ' Note that if the attribute is found through the ' - 'normal mechanism,\n' - ' "__getattr__()" is not called. (This is an ' - 'intentional asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise ' - '"__getattr__()" would have\n' - ' no way to access other attributes of the instance. ' - 'Note that at\n' - ' least for instance variables, you can take total ' - 'control by not\n' - ' inserting any values in the instance attribute ' - 'dictionary (but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to ' - 'actually get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute ' - 'accesses for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless ' - '"__getattribute__()" either\n' - ' calls it explicitly or raises an "AttributeError". ' - 'This method\n' - ' should return the (computed) attribute value or raise ' - 'an\n' - ' "AttributeError" exception. In order to avoid ' - 'infinite recursion in\n' - ' this method, its implementation should always call ' - 'the base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. ' - 'This is called\n' - ' instead of the normal mechanism (i.e. store the value ' - 'in the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, ' - 'for example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises ' - 'an auditing\n' - ' event "object.__setattr__" with arguments "obj", ' - '"name", "value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion ' - 'instead of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and ' - '"name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. An ' - 'iterable must be\n' - ' returned. "dir()" converts the returned iterable to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '===================================\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also ' - 'used to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through ' - 'the normal\n' - 'lookup, i.e. "object.__getattribute__()", then ' - '"__getattr__" is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return an\n' - 'iterable of strings that represents the names accessible ' - 'on module. If\n' - 'present, this function overrides the standard "dir()" ' - 'search on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module ' - 'behavior (setting\n' - 'attributes, properties, etc.), one can set the ' - '"__class__" attribute\n' - 'of a module object to a subclass of "types.ModuleType". ' - 'For example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax ' - '– directly\n' - ' accessing the module globals (whether by code within ' - 'the module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is ' - 'now writable.\n' - '\n' - 'Added in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions ' - 'on modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '========================\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the ' - 'owner’s class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute” refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__". ' - 'The "object"\n' - 'class itself does not implement any of these protocols.\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute ' - 'value or raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable ' - 'with one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. ' - 'Python’s own\n' - ' "__getattribute__()" implementation always passes in ' - 'both arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* ' - 'of the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes ' - 'the kind of\n' - ' descriptor to a “data descriptor”. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance ' - '*instance* of the\n' - ' owner class.\n' - '\n' - 'Instances of descriptors may also have the ' - '"__objclass__" attribute\n' - 'present:\n' - '\n' - 'object.__objclass__\n' - '\n' - ' The attribute "__objclass__" is interpreted by the ' - '"inspect" module\n' - ' as specifying the class where this object was defined ' - '(setting this\n' - ' appropriately can assist in runtime introspection of ' - 'dynamic class\n' - ' attributes). For callables, it may indicate that an ' - 'instance of the\n' - ' given type (or a subclass) is expected or required as ' - 'the first\n' - ' positional argument (for example, CPython sets this ' - 'attribute for\n' - ' unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '====================\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behavior”, one whose attribute access has been ' - 'overridden by methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", ' - 'and\n' - '"__delete__()". If any of those methods are defined for ' - 'an object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, ' - 'set, or delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the ' - 'base classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining ' - 'one of the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs ' - 'in the\n' - 'precedence chain depends on which descriptor methods ' - 'were defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a ' - 'binding, "a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is ' - 'transformed into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' A dotted lookup such as "super(A, a).x" searches\n' - ' "a.__class__.__mro__" for a base class "B" following ' - '"A" and then\n' - ' returns "B.__dict__[\'x\'].__get__(a, A)". If not a ' - 'descriptor, "x"\n' - ' is returned unchanged.\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor ' - 'can define any\n' - 'combination of "__get__()", "__set__()" and ' - '"__delete__()". If it\n' - 'does not define "__get__()", then accessing the ' - 'attribute will return\n' - 'the descriptor object itself unless there is a value in ' - 'the object’s\n' - 'instance dictionary. If the descriptor defines ' - '"__set__()" and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors ' - 'define both\n' - '"__get__()" and "__set__()", while non-data descriptors ' - 'have just the\n' - '"__get__()" method. Data descriptors with "__get__()" ' - 'and "__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors ' - 'can be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including those decorated with ' - '"@staticmethod" and\n' - '"@classmethod") are implemented as non-data ' - 'descriptors. Accordingly,\n' - 'instances can redefine and override methods. This ' - 'allows individual\n' - 'instances to acquire behaviors that differ from other ' - 'instances of the\n' - 'same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '=========\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of "__dict__" and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available ' - 'in a parent.)\n' - '\n' - 'The space saved over using "__dict__" can be ' - 'significant. Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, ' - 'iterable, or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and ' - 'prevents the\n' - ' automatic creation of "__dict__" and *__weakref__* ' - 'for each\n' - ' instance.\n' - '\n' - 'Notes on using *__slots__*:\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '"__dict__" and\n' - ' *__weakref__* attribute of the instances will always ' - 'be accessible.\n' - '\n' - '* Without a "__dict__" variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then ' - 'add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support "weak references" to its ' - 'instances. If\n' - ' weak reference support is needed, then add ' - '"\'__weakref__\'" to the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors for each variable name. As a result, ' - 'class attributes\n' - ' cannot be used to set default values for instance ' - 'variables defined\n' - ' by *__slots__*; otherwise, the class attribute would ' - 'overwrite the\n' - ' descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited ' - 'to the class\n' - ' where it is defined. *__slots__* declared in parents ' - 'are available\n' - ' in child classes. However, instances of a child ' - 'subclass will get a\n' - ' "__dict__" and *__weakref__* unless the subclass also ' - 'defines\n' - ' *__slots__* (which should only contain names of any ' - '*additional*\n' - ' slots).\n' - '\n' - '* If a class defines a slot also defined in a base ' - 'class, the instance\n' - ' variable defined by the base class slot is ' - 'inaccessible (except by\n' - ' retrieving its descriptor directly from the base ' - 'class). This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* "TypeError" will be raised if nonempty *__slots__* are ' - 'defined for a\n' - ' class derived from a ""variable-length" built-in type" ' - 'such as\n' - ' "int", "bytes", and "tuple".\n' - '\n' - '* Any non-string *iterable* may be assigned to ' - '*__slots__*.\n' - '\n' - '* If a "dictionary" is used to assign *__slots__*, the ' - 'dictionary keys\n' - ' will be used as the slot names. The values of the ' - 'dictionary can be\n' - ' used to provide per-attribute docstrings that will be ' - 'recognised by\n' - ' "inspect.getdoc()" and displayed in the output of ' - '"help()".\n' - '\n' - '* "__class__" assignment works only if both classes have ' - 'the same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent ' - 'classes can be\n' - ' used, but only one parent is allowed to have ' - 'attributes created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an *iterator* is used for *__slots__* then a ' - '*descriptor* is\n' - ' created for each of the iterator’s values. However, ' - 'the *__slots__*\n' - ' attribute will be an empty iterator.\n', - 'attribute-references': 'Attribute references\n' - '********************\n' - '\n' - 'An attribute reference is a primary followed by a ' - 'period and a name:\n' - '\n' - ' attributeref ::= primary "." identifier\n' - '\n' - 'The primary must evaluate to an object of a type ' - 'that supports\n' - 'attribute references, which most objects do. This ' - 'object is then\n' - 'asked to produce the attribute whose name is the ' - 'identifier. The type\n' - 'and value produced is determined by the object. ' - 'Multiple evaluations\n' - 'of the same attribute reference may yield different ' - 'objects.\n' - '\n' - 'This production can be customized by overriding the\n' - '"__getattribute__()" method or the "__getattr__()" ' - 'method. The\n' - '"__getattribute__()" method is called first and ' - 'either returns a value\n' - 'or raises "AttributeError" if the attribute is not ' - 'available.\n' - '\n' - 'If an "AttributeError" is raised and the object has ' - 'a "__getattr__()"\n' - 'method, that method is called as a fallback.\n', - 'augassign': 'Augmented assignment statements\n' - '*******************************\n' - '\n' - 'Augmented assignment is the combination, in a single statement, ' - 'of a\n' - 'binary operation and an assignment statement:\n' - '\n' - ' augmented_assignment_stmt ::= augtarget augop ' - '(expression_list | yield_expression)\n' - ' augtarget ::= identifier | attributeref | ' - 'subscription | slicing\n' - ' augop ::= "+=" | "-=" | "*=" | "@=" | ' - '"/=" | "//=" | "%=" | "**="\n' - ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' - '\n' - '(See section Primaries for the syntax definitions of the last ' - 'three\n' - 'symbols.)\n' - '\n' - 'An augmented assignment evaluates the target (which, unlike ' - 'normal\n' - 'assignment statements, cannot be an unpacking) and the ' - 'expression\n' - 'list, performs the binary operation specific to the type of ' - 'assignment\n' - 'on the two operands, and assigns the result to the original ' - 'target.\n' - 'The target is only evaluated once.\n' - '\n' - 'An augmented assignment statement like "x += 1" can be ' - 'rewritten as "x\n' - '= x + 1" to achieve a similar, but not exactly equal effect. In ' - 'the\n' - 'augmented version, "x" is only evaluated once. Also, when ' - 'possible,\n' - 'the actual operation is performed *in-place*, meaning that ' - 'rather than\n' - 'creating a new object and assigning that to the target, the old ' - 'object\n' - 'is modified instead.\n' - '\n' - 'Unlike normal assignments, augmented assignments evaluate the ' - 'left-\n' - 'hand side *before* evaluating the right-hand side. For ' - 'example, "a[i]\n' - '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' - 'performs\n' - 'the addition, and lastly, it writes the result back to "a[i]".\n' - '\n' - 'With the exception of assigning to tuples and multiple targets ' - 'in a\n' - 'single statement, the assignment done by augmented assignment\n' - 'statements is handled the same way as normal assignments. ' - 'Similarly,\n' - 'with the exception of the possible *in-place* behavior, the ' - 'binary\n' - 'operation performed by augmented assignment is the same as the ' - 'normal\n' - 'binary operations.\n' - '\n' - 'For targets which are attribute references, the same caveat ' - 'about\n' - 'class and instance attributes applies as for regular ' - 'assignments.\n', - 'await': 'Await expression\n' - '****************\n' - '\n' - 'Suspend the execution of *coroutine* on an *awaitable* object. Can\n' - 'only be used inside a *coroutine function*.\n' - '\n' - ' await_expr ::= "await" primary\n' - '\n' - 'Added in version 3.5.\n', - 'binary': 'Binary arithmetic operations\n' - '****************************\n' - '\n' - 'The binary arithmetic operations have the conventional priority\n' - 'levels. Note that some of these operations also apply to certain ' - 'non-\n' - 'numeric types. Apart from the power operator, there are only two\n' - 'levels, one for multiplicative operators and one for additive\n' - 'operators:\n' - '\n' - ' m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr |\n' - ' m_expr "//" u_expr | m_expr "/" u_expr |\n' - ' m_expr "%" u_expr\n' - ' a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n' - '\n' - 'The "*" (multiplication) operator yields the product of its ' - 'arguments.\n' - 'The arguments must either both be numbers, or one argument must be ' - 'an\n' - 'integer and the other must be a sequence. In the former case, the\n' - 'numbers are converted to a common real type and then multiplied\n' - 'together. In the latter case, sequence repetition is performed; ' - 'a\n' - 'negative repetition factor yields an empty sequence.\n' - '\n' - 'This operation can be customized using the special "__mul__()" ' - 'and\n' - '"__rmul__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n' - '\n' - 'The "@" (at) operator is intended to be used for matrix\n' - 'multiplication. No builtin Python types implement this operator.\n' - '\n' - 'This operation can be customized using the special "__matmul__()" ' - 'and\n' - '"__rmatmul__()" methods.\n' - '\n' - 'Added in version 3.5.\n' - '\n' - 'The "/" (division) and "//" (floor division) operators yield the\n' - 'quotient of their arguments. The numeric arguments are first\n' - 'converted to a common type. Division of integers yields a float, ' - 'while\n' - 'floor division of integers results in an integer; the result is ' - 'that\n' - 'of mathematical division with the ‘floor’ function applied to the\n' - 'result. Division by zero raises the "ZeroDivisionError" ' - 'exception.\n' - '\n' - 'The division operation can be customized using the special\n' - '"__truediv__()" and "__rtruediv__()" methods. The floor division\n' - 'operation can be customized using the special "__floordiv__()" ' - 'and\n' - '"__rfloordiv__()" methods.\n' - '\n' - 'The "%" (modulo) operator yields the remainder from the division ' - 'of\n' - 'the first argument by the second. The numeric arguments are ' - 'first\n' - 'converted to a common type. A zero right argument raises the\n' - '"ZeroDivisionError" exception. The arguments may be ' - 'floating-point\n' - 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' - '"4*0.7 +\n' - '0.34".) The modulo operator always yields a result with the same ' - 'sign\n' - 'as its second operand (or zero); the absolute value of the result ' - 'is\n' - 'strictly smaller than the absolute value of the second operand ' - '[1].\n' - '\n' - 'The floor division and modulo operators are connected by the ' - 'following\n' - 'identity: "x == (x//y)*y + (x%y)". Floor division and modulo are ' - 'also\n' - 'connected with the built-in function "divmod()": "divmod(x, y) ==\n' - '(x//y, x%y)". [2].\n' - '\n' - 'In addition to performing the modulo operation on numbers, the ' - '"%"\n' - 'operator is also overloaded by string objects to perform ' - 'old-style\n' - 'string formatting (also known as interpolation). The syntax for\n' - 'string formatting is described in the Python Library Reference,\n' - 'section printf-style String Formatting.\n' - '\n' - 'The *modulo* operation can be customized using the special ' - '"__mod__()"\n' - 'and "__rmod__()" methods.\n' - '\n' - 'The floor division operator, the modulo operator, and the ' - '"divmod()"\n' - 'function are not defined for complex numbers. Instead, convert to ' - 'a\n' - 'floating-point number using the "abs()" function if appropriate.\n' - '\n' - 'The "+" (addition) operator yields the sum of its arguments. The\n' - 'arguments must either both be numbers or both be sequences of the ' - 'same\n' - 'type. In the former case, the numbers are converted to a common ' - 'real\n' - 'type and then added together. In the latter case, the sequences ' - 'are\n' - 'concatenated.\n' - '\n' - 'This operation can be customized using the special "__add__()" ' - 'and\n' - '"__radd__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n' - '\n' - 'The "-" (subtraction) operator yields the difference of its ' - 'arguments.\n' - 'The numeric arguments are first converted to a common real type.\n' - '\n' - 'This operation can be customized using the special "__sub__()" ' - 'and\n' - '"__rsub__()" methods.\n' - '\n' - 'Changed in version 3.14: If only one operand is a complex number, ' - 'the\n' - 'other operand is converted to a floating-point number.\n', - 'bitwise': 'Binary bitwise operations\n' - '*************************\n' - '\n' - 'Each of the three bitwise operations has a different priority ' - 'level:\n' - '\n' - ' and_expr ::= shift_expr | and_expr "&" shift_expr\n' - ' xor_expr ::= and_expr | xor_expr "^" and_expr\n' - ' or_expr ::= xor_expr | or_expr "|" xor_expr\n' - '\n' - 'The "&" operator yields the bitwise AND of its arguments, which ' - 'must\n' - 'be integers or one of them must be a custom object overriding\n' - '"__and__()" or "__rand__()" special methods.\n' - '\n' - 'The "^" operator yields the bitwise XOR (exclusive OR) of its\n' - 'arguments, which must be integers or one of them must be a ' - 'custom\n' - 'object overriding "__xor__()" or "__rxor__()" special methods.\n' - '\n' - 'The "|" operator yields the bitwise (inclusive) OR of its ' - 'arguments,\n' - 'which must be integers or one of them must be a custom object\n' - 'overriding "__or__()" or "__ror__()" special methods.\n', - 'bltin-code-objects': 'Code Objects\n' - '************\n' - '\n' - 'Code objects are used by the implementation to ' - 'represent “pseudo-\n' - 'compiled” executable Python code such as a function ' - 'body. They differ\n' - 'from function objects because they don’t contain a ' - 'reference to their\n' - 'global execution environment. Code objects are ' - 'returned by the built-\n' - 'in "compile()" function and can be extracted from ' - 'function objects\n' - 'through their "__code__" attribute. See also the ' - '"code" module.\n' - '\n' - 'Accessing "__code__" raises an auditing event ' - '"object.__getattr__"\n' - 'with arguments "obj" and ""__code__"".\n' - '\n' - 'A code object can be executed or evaluated by passing ' - 'it (instead of a\n' - 'source string) to the "exec()" or "eval()" built-in ' - 'functions.\n' - '\n' - 'See The standard type hierarchy for more ' - 'information.\n', - 'bltin-ellipsis-object': 'The Ellipsis Object\n' - '*******************\n' - '\n' - 'This object is commonly used by slicing (see ' - 'Slicings). It supports\n' - 'no special operations. There is exactly one ' - 'ellipsis object, named\n' - '"Ellipsis" (a built-in name). "type(Ellipsis)()" ' - 'produces the\n' - '"Ellipsis" singleton.\n' - '\n' - 'It is written as "Ellipsis" or "...".\n', - 'bltin-null-object': 'The Null Object\n' - '***************\n' - '\n' - 'This object is returned by functions that don’t ' - 'explicitly return a\n' - 'value. It supports no special operations. There is ' - 'exactly one null\n' - 'object, named "None" (a built-in name). "type(None)()" ' - 'produces the\n' - 'same singleton.\n' - '\n' - 'It is written as "None".\n', - 'bltin-type-objects': 'Type Objects\n' - '************\n' - '\n' - 'Type objects represent the various object types. An ' - 'object’s type is\n' - 'accessed by the built-in function "type()". There are ' - 'no special\n' - 'operations on types. The standard module "types" ' - 'defines names for\n' - 'all standard built-in types.\n' - '\n' - 'Types are written like this: "".\n', - 'booleans': 'Boolean operations\n' - '******************\n' - '\n' - ' or_test ::= and_test | or_test "or" and_test\n' - ' and_test ::= not_test | and_test "and" not_test\n' - ' not_test ::= comparison | "not" not_test\n' - '\n' - 'In the context of Boolean operations, and also when expressions ' - 'are\n' - 'used by control flow statements, the following values are ' - 'interpreted\n' - 'as false: "False", "None", numeric zero of all types, and empty\n' - 'strings and containers (including strings, tuples, lists,\n' - 'dictionaries, sets and frozensets). All other values are ' - 'interpreted\n' - 'as true. User-defined objects can customize their truth value ' - 'by\n' - 'providing a "__bool__()" method.\n' - '\n' - 'The operator "not" yields "True" if its argument is false, ' - '"False"\n' - 'otherwise.\n' - '\n' - 'The expression "x and y" first evaluates *x*; if *x* is false, ' - 'its\n' - 'value is returned; otherwise, *y* is evaluated and the resulting ' - 'value\n' - 'is returned.\n' - '\n' - 'The expression "x or y" first evaluates *x*; if *x* is true, its ' - 'value\n' - 'is returned; otherwise, *y* is evaluated and the resulting value ' - 'is\n' - 'returned.\n' - '\n' - 'Note that neither "and" nor "or" restrict the value and type ' - 'they\n' - 'return to "False" and "True", but rather return the last ' - 'evaluated\n' - 'argument. This is sometimes useful, e.g., if "s" is a string ' - 'that\n' - 'should be replaced by a default value if it is empty, the ' - 'expression\n' - '"s or \'foo\'" yields the desired value. Because "not" has to ' - 'create a\n' - 'new value, it returns a boolean value regardless of the type of ' - 'its\n' - 'argument (for example, "not \'foo\'" produces "False" rather ' - 'than "\'\'".)\n', - 'break': 'The "break" statement\n' - '*********************\n' - '\n' - ' break_stmt ::= "break"\n' - '\n' - '"break" may only occur syntactically nested in a "for" or "while"\n' - 'loop, but not nested in a function or class definition within that\n' - 'loop.\n' - '\n' - 'It terminates the nearest enclosing loop, skipping the optional ' - '"else"\n' - 'clause if the loop has one.\n' - '\n' - 'If a "for" loop is terminated by "break", the loop control target\n' - 'keeps its current value.\n' - '\n' - 'When "break" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'loop.\n', - 'callable-types': 'Emulating callable objects\n' - '**************************\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called” as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)". The "object" class ' - 'itself does\n' - ' not provide this method.\n', - 'calls': 'Calls\n' - '*****\n' - '\n' - 'A call calls a callable object (e.g., a *function*) with a ' - 'possibly\n' - 'empty series of *arguments*:\n' - '\n' - ' call ::= primary "(" [argument_list [","] | ' - 'comprehension] ")"\n' - ' argument_list ::= positional_arguments ["," ' - 'starred_and_keywords]\n' - ' ["," keywords_arguments]\n' - ' | starred_and_keywords ["," ' - 'keywords_arguments]\n' - ' | keywords_arguments\n' - ' positional_arguments ::= positional_item ("," positional_item)*\n' - ' positional_item ::= assignment_expression | "*" expression\n' - ' starred_and_keywords ::= ("*" expression | keyword_item)\n' - ' ("," "*" expression | "," ' - 'keyword_item)*\n' - ' keywords_arguments ::= (keyword_item | "**" expression)\n' - ' ("," keyword_item | "," "**" ' - 'expression)*\n' - ' keyword_item ::= identifier "=" expression\n' - '\n' - 'An optional trailing comma may be present after the positional and\n' - 'keyword arguments but does not affect the semantics.\n' - '\n' - 'The primary must evaluate to a callable object (user-defined\n' - 'functions, built-in functions, methods of built-in objects, class\n' - 'objects, methods of class instances, and all objects having a\n' - '"__call__()" method are callable). All argument expressions are\n' - 'evaluated before the call is attempted. Please refer to section\n' - 'Function definitions for the syntax of formal *parameter* lists.\n' - '\n' - 'If keyword arguments are present, they are first converted to\n' - 'positional arguments, as follows. First, a list of unfilled slots ' - 'is\n' - 'created for the formal parameters. If there are N positional\n' - 'arguments, they are placed in the first N slots. Next, for each\n' - 'keyword argument, the identifier is used to determine the\n' - 'corresponding slot (if the identifier is the same as the first ' - 'formal\n' - 'parameter name, the first slot is used, and so on). If the slot ' - 'is\n' - 'already filled, a "TypeError" exception is raised. Otherwise, the\n' - 'argument is placed in the slot, filling it (even if the expression ' - 'is\n' - '"None", it fills the slot). When all arguments have been ' - 'processed,\n' - 'the slots that are still unfilled are filled with the ' - 'corresponding\n' - 'default value from the function definition. (Default values are\n' - 'calculated, once, when the function is defined; thus, a mutable ' - 'object\n' - 'such as a list or dictionary used as default value will be shared ' - 'by\n' - 'all calls that don’t specify an argument value for the ' - 'corresponding\n' - 'slot; this should usually be avoided.) If there are any unfilled\n' - 'slots for which no default value is specified, a "TypeError" ' - 'exception\n' - 'is raised. Otherwise, the list of filled slots is used as the\n' - 'argument list for the call.\n' - '\n' - '**CPython implementation detail:** An implementation may provide\n' - 'built-in functions whose positional parameters do not have names, ' - 'even\n' - 'if they are ‘named’ for the purpose of documentation, and which\n' - 'therefore cannot be supplied by keyword. In CPython, this is the ' - 'case\n' - 'for functions implemented in C that use "PyArg_ParseTuple()" to ' - 'parse\n' - 'their arguments.\n' - '\n' - 'If there are more positional arguments than there are formal ' - 'parameter\n' - 'slots, a "TypeError" exception is raised, unless a formal ' - 'parameter\n' - 'using the syntax "*identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a tuple containing the excess positional ' - 'arguments\n' - '(or an empty tuple if there were no excess positional arguments).\n' - '\n' - 'If any keyword argument does not correspond to a formal parameter\n' - 'name, a "TypeError" exception is raised, unless a formal parameter\n' - 'using the syntax "**identifier" is present; in this case, that ' - 'formal\n' - 'parameter receives a dictionary containing the excess keyword\n' - 'arguments (using the keywords as keys and the argument values as\n' - 'corresponding values), or a (new) empty dictionary if there were ' - 'no\n' - 'excess keyword arguments.\n' - '\n' - 'If the syntax "*expression" appears in the function call, ' - '"expression"\n' - 'must evaluate to an *iterable*. Elements from these iterables are\n' - 'treated as if they were additional positional arguments. For the ' - 'call\n' - '"f(x1, x2, *y, x3, x4)", if *y* evaluates to a sequence *y1*, …, ' - '*yM*,\n' - 'this is equivalent to a call with M+4 positional arguments *x1*, ' - '*x2*,\n' - '*y1*, …, *yM*, *x3*, *x4*.\n' - '\n' - 'A consequence of this is that although the "*expression" syntax ' - 'may\n' - 'appear *after* explicit keyword arguments, it is processed ' - '*before*\n' - 'the keyword arguments (and any "**expression" arguments – see ' - 'below).\n' - 'So:\n' - '\n' - ' >>> def f(a, b):\n' - ' ... print(a, b)\n' - ' ...\n' - ' >>> f(b=1, *(2,))\n' - ' 2 1\n' - ' >>> f(a=1, *(2,))\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: f() got multiple values for keyword argument 'a'\n" - ' >>> f(1, *(2,))\n' - ' 1 2\n' - '\n' - 'It is unusual for both keyword arguments and the "*expression" ' - 'syntax\n' - 'to be used in the same call, so in practice this confusion does ' - 'not\n' - 'often arise.\n' - '\n' - 'If the syntax "**expression" appears in the function call,\n' - '"expression" must evaluate to a *mapping*, the contents of which ' - 'are\n' - 'treated as additional keyword arguments. If a parameter matching a ' - 'key\n' - 'has already been given a value (by an explicit keyword argument, ' - 'or\n' - 'from another unpacking), a "TypeError" exception is raised.\n' - '\n' - 'When "**expression" is used, each key in this mapping must be a\n' - 'string. Each value from the mapping is assigned to the first ' - 'formal\n' - 'parameter eligible for keyword assignment whose name is equal to ' - 'the\n' - 'key. A key need not be a Python identifier (e.g. ""max-temp °F"" ' - 'is\n' - 'acceptable, although it will not match any formal parameter that ' - 'could\n' - 'be declared). If there is no match to a formal parameter the ' - 'key-value\n' - 'pair is collected by the "**" parameter, if there is one, or if ' - 'there\n' - 'is not, a "TypeError" exception is raised.\n' - '\n' - 'Formal parameters using the syntax "*identifier" or "**identifier"\n' - 'cannot be used as positional argument slots or as keyword argument\n' - 'names.\n' - '\n' - 'Changed in version 3.5: Function calls accept any number of "*" ' - 'and\n' - '"**" unpackings, positional arguments may follow iterable ' - 'unpackings\n' - '("*"), and keyword arguments may follow dictionary unpackings ' - '("**").\n' - 'Originally proposed by **PEP 448**.\n' - '\n' - 'A call always returns some value, possibly "None", unless it raises ' - 'an\n' - 'exception. How this value is computed depends on the type of the\n' - 'callable object.\n' - '\n' - 'If it is—\n' - '\n' - 'a user-defined function:\n' - ' The code block for the function is executed, passing it the\n' - ' argument list. The first thing the code block will do is bind ' - 'the\n' - ' formal parameters to the arguments; this is described in ' - 'section\n' - ' Function definitions. When the code block executes a "return"\n' - ' statement, this specifies the return value of the function ' - 'call.\n' - ' If execution reaches the end of the code block without executing ' - 'a\n' - ' "return" statement, the return value is "None".\n' - '\n' - 'a built-in function or method:\n' - ' The result is up to the interpreter; see Built-in Functions for ' - 'the\n' - ' descriptions of built-in functions and methods.\n' - '\n' - 'a class object:\n' - ' A new instance of that class is returned.\n' - '\n' - 'a class instance method:\n' - ' The corresponding user-defined function is called, with an ' - 'argument\n' - ' list that is one longer than the argument list of the call: the\n' - ' instance becomes the first argument.\n' - '\n' - 'a class instance:\n' - ' The class must define a "__call__()" method; the effect is then ' - 'the\n' - ' same as if that method was called.\n', - 'class': 'Class definitions\n' - '*****************\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [type_params] ' - '[inheritance] ":" suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame (see\n' - 'Naming and binding), using a newly created local namespace and the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes execution, ' - 'its\n' - 'execution frame is discarded but its local namespace is saved. [5] ' - 'A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same as ' - 'for\n' - 'function decorators. The result is then bound to the class name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'immediately\n' - 'after the class’s name. This indicates to static type checkers ' - 'that\n' - 'the class is generic. At runtime, the type parameters can be ' - 'retrieved\n' - 'from the class’s "__type_params__" attribute. See Generic classes ' - 'for\n' - 'more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"”,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n', - 'comparisons': 'Comparisons\n' - '***********\n' - '\n' - 'Unlike C, all comparison operations in Python have the same ' - 'priority,\n' - 'which is lower than that of any arithmetic, shifting or ' - 'bitwise\n' - 'operation. Also unlike C, expressions like "a < b < c" have ' - 'the\n' - 'interpretation that is conventional in mathematics:\n' - '\n' - ' comparison ::= or_expr (comp_operator or_expr)*\n' - ' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n' - ' | "is" ["not"] | ["not"] "in"\n' - '\n' - 'Comparisons yield boolean values: "True" or "False". Custom ' - '*rich\n' - 'comparison methods* may return non-boolean values. In this ' - 'case Python\n' - 'will call "bool()" on such value in boolean contexts.\n' - '\n' - 'Comparisons can be chained arbitrarily, e.g., "x < y <= z" ' - 'is\n' - 'equivalent to "x < y and y <= z", except that "y" is ' - 'evaluated only\n' - 'once (but in both cases "z" is not evaluated at all when "x < ' - 'y" is\n' - 'found to be false).\n' - '\n' - 'Formally, if *a*, *b*, *c*, …, *y*, *z* are expressions and ' - '*op1*,\n' - '*op2*, …, *opN* are comparison operators, then "a op1 b op2 c ' - '... y\n' - 'opN z" is equivalent to "a op1 b and b op2 c and ... y opN ' - 'z", except\n' - 'that each expression is evaluated at most once.\n' - '\n' - 'Note that "a op1 b op2 c" doesn’t imply any kind of ' - 'comparison between\n' - '*a* and *c*, so that, e.g., "x < y > z" is perfectly legal ' - '(though\n' - 'perhaps not pretty).\n' - '\n' - '\n' - 'Value comparisons\n' - '=================\n' - '\n' - 'The operators "<", ">", "==", ">=", "<=", and "!=" compare ' - 'the values\n' - 'of two objects. The objects do not need to have the same ' - 'type.\n' - '\n' - 'Chapter Objects, values and types states that objects have a ' - 'value (in\n' - 'addition to type and identity). The value of an object is a ' - 'rather\n' - 'abstract notion in Python: For example, there is no canonical ' - 'access\n' - 'method for an object’s value. Also, there is no requirement ' - 'that the\n' - 'value of an object should be constructed in a particular way, ' - 'e.g.\n' - 'comprised of all its data attributes. Comparison operators ' - 'implement a\n' - 'particular notion of what the value of an object is. One can ' - 'think of\n' - 'them as defining the value of an object indirectly, by means ' - 'of their\n' - 'comparison implementation.\n' - '\n' - 'Because all types are (direct or indirect) subtypes of ' - '"object", they\n' - 'inherit the default comparison behavior from "object". Types ' - 'can\n' - 'customize their comparison behavior by implementing *rich ' - 'comparison\n' - 'methods* like "__lt__()", described in Basic customization.\n' - '\n' - 'The default behavior for equality comparison ("==" and "!=") ' - 'is based\n' - 'on the identity of the objects. Hence, equality comparison ' - 'of\n' - 'instances with the same identity results in equality, and ' - 'equality\n' - 'comparison of instances with different identities results in\n' - 'inequality. A motivation for this default behavior is the ' - 'desire that\n' - 'all objects should be reflexive (i.e. "x is y" implies "x == ' - 'y").\n' - '\n' - 'A default order comparison ("<", ">", "<=", and ">=") is not ' - 'provided;\n' - 'an attempt raises "TypeError". A motivation for this default ' - 'behavior\n' - 'is the lack of a similar invariant as for equality.\n' - '\n' - 'The behavior of the default equality comparison, that ' - 'instances with\n' - 'different identities are always unequal, may be in contrast ' - 'to what\n' - 'types will need that have a sensible definition of object ' - 'value and\n' - 'value-based equality. Such types will need to customize ' - 'their\n' - 'comparison behavior, and in fact, a number of built-in types ' - 'have done\n' - 'that.\n' - '\n' - 'The following list describes the comparison behavior of the ' - 'most\n' - 'important built-in types.\n' - '\n' - '* Numbers of built-in numeric types (Numeric Types — int, ' - 'float,\n' - ' complex) and of the standard library types ' - '"fractions.Fraction" and\n' - ' "decimal.Decimal" can be compared within and across their ' - 'types,\n' - ' with the restriction that complex numbers do not support ' - 'order\n' - ' comparison. Within the limits of the types involved, they ' - 'compare\n' - ' mathematically (algorithmically) correct without loss of ' - 'precision.\n' - '\n' - ' The not-a-number values "float(\'NaN\')" and ' - '"decimal.Decimal(\'NaN\')"\n' - ' are special. Any ordered comparison of a number to a ' - 'not-a-number\n' - ' value is false. A counter-intuitive implication is that ' - 'not-a-number\n' - ' values are not equal to themselves. For example, if "x =\n' - ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all ' - 'false, while "x\n' - ' != x" is true. This behavior is compliant with IEEE 754.\n' - '\n' - '* "None" and "NotImplemented" are singletons. **PEP 8** ' - 'advises that\n' - ' comparisons for singletons should always be done with "is" ' - 'or "is\n' - ' not", never the equality operators.\n' - '\n' - '* Binary sequences (instances of "bytes" or "bytearray") can ' - 'be\n' - ' compared within and across their types. They compare\n' - ' lexicographically using the numeric values of their ' - 'elements.\n' - '\n' - '* Strings (instances of "str") compare lexicographically ' - 'using the\n' - ' numerical Unicode code points (the result of the built-in ' - 'function\n' - ' "ord()") of their characters. [3]\n' - '\n' - ' Strings and binary sequences cannot be directly compared.\n' - '\n' - '* Sequences (instances of "tuple", "list", or "range") can be ' - 'compared\n' - ' only within each of their types, with the restriction that ' - 'ranges do\n' - ' not support order comparison. Equality comparison across ' - 'these\n' - ' types results in inequality, and ordering comparison across ' - 'these\n' - ' types raises "TypeError".\n' - '\n' - ' Sequences compare lexicographically using comparison of\n' - ' corresponding elements. The built-in containers typically ' - 'assume\n' - ' identical objects are equal to themselves. That lets them ' - 'bypass\n' - ' equality tests for identical objects to improve performance ' - 'and to\n' - ' maintain their internal invariants.\n' - '\n' - ' Lexicographical comparison between built-in collections ' - 'works as\n' - ' follows:\n' - '\n' - ' * For two collections to compare equal, they must be of the ' - 'same\n' - ' type, have the same length, and each pair of ' - 'corresponding\n' - ' elements must compare equal (for example, "[1,2] == ' - '(1,2)" is\n' - ' false because the type is not the same).\n' - '\n' - ' * Collections that support order comparison are ordered the ' - 'same as\n' - ' their first unequal elements (for example, "[1,2,x] <= ' - '[1,2,y]"\n' - ' has the same value as "x <= y"). If a corresponding ' - 'element does\n' - ' not exist, the shorter collection is ordered first (for ' - 'example,\n' - ' "[1,2] < [1,2,3]" is true).\n' - '\n' - '* Mappings (instances of "dict") compare equal if and only if ' - 'they\n' - ' have equal "(key, value)" pairs. Equality comparison of the ' - 'keys and\n' - ' values enforces reflexivity.\n' - '\n' - ' Order comparisons ("<", ">", "<=", and ">=") raise ' - '"TypeError".\n' - '\n' - '* Sets (instances of "set" or "frozenset") can be compared ' - 'within and\n' - ' across their types.\n' - '\n' - ' They define order comparison operators to mean subset and ' - 'superset\n' - ' tests. Those relations do not define total orderings (for ' - 'example,\n' - ' the two sets "{1,2}" and "{2,3}" are not equal, nor subsets ' - 'of one\n' - ' another, nor supersets of one another). Accordingly, sets ' - 'are not\n' - ' appropriate arguments for functions which depend on total ' - 'ordering\n' - ' (for example, "min()", "max()", and "sorted()" produce ' - 'undefined\n' - ' results given a list of sets as inputs).\n' - '\n' - ' Comparison of sets enforces reflexivity of its elements.\n' - '\n' - '* Most other built-in types have no comparison methods ' - 'implemented, so\n' - ' they inherit the default comparison behavior.\n' - '\n' - 'User-defined classes that customize their comparison behavior ' - 'should\n' - 'follow some consistency rules, if possible:\n' - '\n' - '* Equality comparison should be reflexive. In other words, ' - 'identical\n' - ' objects should compare equal:\n' - '\n' - ' "x is y" implies "x == y"\n' - '\n' - '* Comparison should be symmetric. In other words, the ' - 'following\n' - ' expressions should have the same result:\n' - '\n' - ' "x == y" and "y == x"\n' - '\n' - ' "x != y" and "y != x"\n' - '\n' - ' "x < y" and "y > x"\n' - '\n' - ' "x <= y" and "y >= x"\n' - '\n' - '* Comparison should be transitive. The following ' - '(non-exhaustive)\n' - ' examples illustrate that:\n' - '\n' - ' "x > y and y > z" implies "x > z"\n' - '\n' - ' "x < y and y <= z" implies "x < z"\n' - '\n' - '* Inverse comparison should result in the boolean negation. ' - 'In other\n' - ' words, the following expressions should have the same ' - 'result:\n' - '\n' - ' "x == y" and "not x != y"\n' - '\n' - ' "x < y" and "not x >= y" (for total ordering)\n' - '\n' - ' "x > y" and "not x <= y" (for total ordering)\n' - '\n' - ' The last two expressions apply to totally ordered ' - 'collections (e.g.\n' - ' to sequences, but not to sets or mappings). See also the\n' - ' "total_ordering()" decorator.\n' - '\n' - '* The "hash()" result should be consistent with equality. ' - 'Objects that\n' - ' are equal should either have the same hash value, or be ' - 'marked as\n' - ' unhashable.\n' - '\n' - 'Python does not enforce these consistency rules. In fact, ' - 'the\n' - 'not-a-number values are an example for not following these ' - 'rules.\n' - '\n' - '\n' - 'Membership test operations\n' - '==========================\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in ' - 's"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" ' - 'otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which ' - '"in" tests\n' - 'whether the dictionary has a given key. For container types ' - 'such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e ' - 'for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and ' - 'only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != ' - '-1".\n' - 'Empty strings are always considered to be a substring of any ' - 'other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" ' - 'method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true ' - 'value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" ' - 'but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", ' - 'for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it ' - 'is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class ' - 'defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a ' - 'non-\n' - 'negative integer index *i* such that "x is y[i] or x == ' - 'y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If ' - 'any other\n' - 'exception is raised, it is as if "in" raised that ' - 'exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth ' - 'value of\n' - '"in".\n' - '\n' - '\n' - 'Identity comparisons\n' - '====================\n' - '\n' - 'The operators "is" and "is not" test for an object’s ' - 'identity: "x is\n' - 'y" is true if and only if *x* and *y* are the same object. ' - 'An\n' - 'Object’s identity is determined using the "id()" function. ' - '"x is not\n' - 'y" yields the inverse truth value. [4]\n', - 'compound': 'Compound statements\n' - '*******************\n' - '\n' - 'Compound statements contain (groups of) other statements; they ' - 'affect\n' - 'or control the execution of those other statements in some way. ' - 'In\n' - 'general, compound statements span multiple lines, although in ' - 'simple\n' - 'incarnations a whole compound statement may be contained in one ' - 'line.\n' - '\n' - 'The "if", "while" and "for" statements implement traditional ' - 'control\n' - 'flow constructs. "try" specifies exception handlers and/or ' - 'cleanup\n' - 'code for a group of statements, while the "with" statement ' - 'allows the\n' - 'execution of initialization and finalization code around a block ' - 'of\n' - 'code. Function and class definitions are also syntactically ' - 'compound\n' - 'statements.\n' - '\n' - 'A compound statement consists of one or more ‘clauses.’ A ' - 'clause\n' - 'consists of a header and a ‘suite.’ The clause headers of a\n' - 'particular compound statement are all at the same indentation ' - 'level.\n' - 'Each clause header begins with a uniquely identifying keyword ' - 'and ends\n' - 'with a colon. A suite is a group of statements controlled by a\n' - 'clause. A suite can be one or more semicolon-separated simple\n' - 'statements on the same line as the header, following the ' - 'header’s\n' - 'colon, or it can be one or more indented statements on ' - 'subsequent\n' - 'lines. Only the latter form of a suite can contain nested ' - 'compound\n' - 'statements; the following is illegal, mostly because it wouldn’t ' - 'be\n' - 'clear to which "if" clause a following "else" clause would ' - 'belong:\n' - '\n' - ' if test1: if test2: print(x)\n' - '\n' - 'Also note that the semicolon binds tighter than the colon in ' - 'this\n' - 'context, so that in the following example, either all or none of ' - 'the\n' - '"print()" calls are executed:\n' - '\n' - ' if x < y < z: print(x); print(y); print(z)\n' - '\n' - 'Summarizing:\n' - '\n' - ' compound_stmt ::= if_stmt\n' - ' | while_stmt\n' - ' | for_stmt\n' - ' | try_stmt\n' - ' | with_stmt\n' - ' | match_stmt\n' - ' | funcdef\n' - ' | classdef\n' - ' | async_with_stmt\n' - ' | async_for_stmt\n' - ' | async_funcdef\n' - ' suite ::= stmt_list NEWLINE | NEWLINE INDENT ' - 'statement+ DEDENT\n' - ' statement ::= stmt_list NEWLINE | compound_stmt\n' - ' stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n' - '\n' - 'Note that statements always end in a "NEWLINE" possibly followed ' - 'by a\n' - '"DEDENT". Also note that optional continuation clauses always ' - 'begin\n' - 'with a keyword that cannot start a statement, thus there are no\n' - 'ambiguities (the ‘dangling "else"’ problem is solved in Python ' - 'by\n' - 'requiring nested "if" statements to be indented).\n' - '\n' - 'The formatting of the grammar rules in the following sections ' - 'places\n' - 'each clause on a separate line for clarity.\n' - '\n' - '\n' - 'The "if" statement\n' - '==================\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the ' - 'expressions one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is ' - 'executed\n' - '(and no other part of the "if" statement is executed or ' - 'evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, ' - 'if\n' - 'present, is executed.\n' - '\n' - '\n' - 'The "while" statement\n' - '=====================\n' - '\n' - 'The "while" statement is used for repeated execution as long as ' - 'an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, ' - 'executes the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n' - '\n' - '\n' - 'The "for" statement\n' - '===================\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" starred_list ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The "starred_list" expression is evaluated once; it should yield ' - 'an\n' - '*iterable* object. An *iterator* is created for that iterable. ' - 'The\n' - 'first item provided by the iterator is then assigned to the ' - 'target\n' - 'list using the standard rules for assignments (see Assignment\n' - 'statements), and the suite is executed. This repeats for each ' - 'item\n' - 'provided by the iterator. When the iterator is exhausted, the ' - 'suite\n' - 'in the "else" clause, if present, is executed, and the loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no ' - 'next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target ' - 'list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with ' - 'the next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is ' - 'finished,\n' - 'but if the sequence is empty, they will not have been assigned ' - 'to at\n' - 'all by the loop. Hint: the built-in type "range()" represents\n' - 'immutable arithmetic sequences of integers. For instance, ' - 'iterating\n' - '"range(3)" successively yields 0, 1, and then 2.\n' - '\n' - 'Changed in version 3.11: Starred elements are now allowed in ' - 'the\n' - 'expression list.\n' - '\n' - '\n' - 'The "try" statement\n' - '===================\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup ' - 'code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt | try3_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' ("except" "*" expression ["as" identifier] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try3_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - '\n' - '"except" clause\n' - '---------------\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. ' - 'When no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search ' - 'for an\n' - 'exception handler is started. This search inspects the "except"\n' - 'clauses in turn until one is found that matches the exception. ' - 'An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' - 'any exception.\n' - '\n' - 'For an "except" clause with an expression, the expression must\n' - 'evaluate to an exception type or a tuple of exception types. ' - 'The\n' - 'raised exception matches an "except" clause whose expression ' - 'evaluates\n' - 'to the class or a *non-virtual base class* of the exception ' - 'object, or\n' - 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' - 'invocation stack. [1]\n' - '\n' - 'If the evaluation of an expression in the header of an "except" ' - 'clause\n' - 'raises an exception, the original search for a handler is ' - 'canceled and\n' - 'a search starts for the new exception in the surrounding code ' - 'and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching "except" clause is found, the exception is ' - 'assigned to\n' - 'the target specified after the "as" keyword in that "except" ' - 'clause,\n' - 'if present, and the "except" clause’s suite is executed. All ' - '"except"\n' - 'clauses must have an executable block. When the end of this ' - 'block is\n' - 'reached, execution continues normally after the entire "try"\n' - 'statement. (This means that if two nested handlers exist for the ' - 'same\n' - 'exception, and the exception occurs in the "try" clause of the ' - 'inner\n' - 'handler, the outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is ' - 'cleared\n' - 'at the end of the "except" clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to ' - 'be\n' - 'able to refer to it after the "except" clause. Exceptions are ' - 'cleared\n' - 'because with the traceback attached to them, they form a ' - 'reference\n' - 'cycle with the stack frame, keeping all locals in that frame ' - 'alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an "except" clause’s suite is executed, the exception is ' - 'stored\n' - 'in the "sys" module, where it can be accessed from within the ' - 'body of\n' - 'the "except" clause by calling "sys.exception()". When leaving ' - 'an\n' - 'exception handler, the exception stored in the "sys" module is ' - 'reset\n' - 'to its previous value:\n' - '\n' - ' >>> print(sys.exception())\n' - ' None\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... print(repr(sys.exception()))\n' - ' ...\n' - ' TypeError()\n' - ' ValueError()\n' - ' TypeError()\n' - ' >>> print(sys.exception())\n' - ' None\n' - '\n' - '\n' - '"except*" clause\n' - '----------------\n' - '\n' - 'The "except*" clause(s) are used for handling "ExceptionGroup"s. ' - 'The\n' - 'exception type for matching is interpreted as in the case of ' - '"except",\n' - 'but in the case of exception groups we can have partial matches ' - 'when\n' - 'the type matches some of the exceptions in the group. This means ' - 'that\n' - 'multiple "except*" clauses can execute, each handling part of ' - 'the\n' - 'exception group. Each clause executes at most once and handles ' - 'an\n' - 'exception group of all matching exceptions. Each exception in ' - 'the\n' - 'group is handled by at most one "except*" clause, the first ' - 'that\n' - 'matches it.\n' - '\n' - ' >>> try:\n' - ' ... raise ExceptionGroup("eg",\n' - ' ... [ValueError(1), TypeError(2), OSError(3), ' - 'OSError(4)])\n' - ' ... except* TypeError as e:\n' - " ... print(f'caught {type(e)} with nested " - "{e.exceptions}')\n" - ' ... except* OSError as e:\n' - " ... print(f'caught {type(e)} with nested " - "{e.exceptions}')\n" - ' ...\n' - " caught with nested (TypeError(2),)\n" - " caught with nested (OSError(3), " - 'OSError(4))\n' - ' + Exception Group Traceback (most recent call last):\n' - ' | File "", line 2, in \n' - ' | ExceptionGroup: eg\n' - ' +-+---------------- 1 ----------------\n' - ' | ValueError: 1\n' - ' +------------------------------------\n' - '\n' - 'Any remaining exceptions that were not handled by any "except*" ' - 'clause\n' - 'are re-raised at the end, along with all exceptions that were ' - 'raised\n' - 'from within the "except*" clauses. If this list contains more ' - 'than one\n' - 'exception to reraise, they are combined into an exception ' - 'group.\n' - '\n' - 'If the raised exception is not an exception group and its type ' - 'matches\n' - 'one of the "except*" clauses, it is caught and wrapped by an ' - 'exception\n' - 'group with an empty message string.\n' - '\n' - ' >>> try:\n' - ' ... raise BlockingIOError\n' - ' ... except* BlockingIOError as e:\n' - ' ... print(repr(e))\n' - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' - 'An "except*" clause must have a matching expression; it cannot ' - 'be\n' - '"except*:". Furthermore, this expression cannot contain ' - 'exception\n' - 'group types, because that would have ambiguous semantics.\n' - '\n' - 'It is not possible to mix "except" and "except*" in the same ' - '"try".\n' - '"break", "continue" and "return" cannot appear in an "except*" ' - 'clause.\n' - '\n' - '\n' - '"else" clause\n' - '-------------\n' - '\n' - 'The optional "else" clause is executed if the control flow ' - 'leaves the\n' - '"try" suite, no exception was raised, and no "return", ' - '"continue", or\n' - '"break" statement was executed. Exceptions in the "else" clause ' - 'are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - '\n' - '"finally" clause\n' - '----------------\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. ' - 'If an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is ' - 'executed. If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the ' - 'saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program ' - 'during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in ' - 'the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause ' - 'is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last ' - '"return"\n' - 'statement executed. Since the "finally" clause always executes, ' - 'a\n' - '"return" statement executed in the "finally" clause will always ' - 'be the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" ' - 'statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n' - '\n' - '\n' - 'The "with" statement\n' - '====================\n' - '\n' - 'The "with" statement is used to wrap the execution of a block ' - 'with\n' - 'methods defined by a context manager (see section With ' - 'Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ' - '")" | with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item” proceeds ' - 'as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the ' - '"with_item") is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to ' - 'the\n' - ' target list, it will be treated the same as an error ' - 'occurring\n' - ' within the suite would be. See step 7 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, ' - 'and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, ' - 'and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an ' - 'exception, the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as ' - 'if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines ' - 'if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context ' - 'expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses ' - 'to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n' - '\n' - '\n' - 'The "match" statement\n' - '=====================\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'The match statement is used for pattern matching. Syntax:\n' - '\n' - ' match_stmt ::= \'match\' subject_expr ":" NEWLINE INDENT ' - 'case_block+ DEDENT\n' - ' subject_expr ::= star_named_expression "," ' - 'star_named_expressions?\n' - ' | named_expression\n' - ' case_block ::= \'case\' patterns [guard] ":" block\n' - '\n' - 'Note:\n' - '\n' - ' This section uses single quotes to denote soft keywords.\n' - '\n' - 'Pattern matching takes a pattern as input (following "case") and ' - 'a\n' - 'subject value (following "match"). The pattern (which may ' - 'contain\n' - 'subpatterns) is matched against the subject value. The outcomes ' - 'are:\n' - '\n' - '* A match success or failure (also termed a pattern success or\n' - ' failure).\n' - '\n' - '* Possible binding of matched values to a name. The ' - 'prerequisites for\n' - ' this are further discussed below.\n' - '\n' - 'The "match" and "case" keywords are soft keywords.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Overview\n' - '--------\n' - '\n' - 'Here’s an overview of the logical flow of a match statement:\n' - '\n' - '1. The subject expression "subject_expr" is evaluated and a ' - 'resulting\n' - ' subject value obtained. If the subject expression contains a ' - 'comma,\n' - ' a tuple is constructed using the standard rules.\n' - '\n' - '2. Each pattern in a "case_block" is attempted to match with ' - 'the\n' - ' subject value. The specific rules for success or failure are\n' - ' described below. The match attempt can also bind some or all ' - 'of the\n' - ' standalone names within the pattern. The precise pattern ' - 'binding\n' - ' rules vary per pattern type and are specified below. **Name\n' - ' bindings made during a successful pattern match outlive the\n' - ' executed block and can be used after the match statement**.\n' - '\n' - ' Note:\n' - '\n' - ' During failed pattern matches, some subpatterns may ' - 'succeed. Do\n' - ' not rely on bindings being made for a failed match. ' - 'Conversely,\n' - ' do not rely on variables remaining unchanged after a ' - 'failed\n' - ' match. The exact behavior is dependent on implementation ' - 'and may\n' - ' vary. This is an intentional decision made to allow ' - 'different\n' - ' implementations to add optimizations.\n' - '\n' - '3. If the pattern succeeds, the corresponding guard (if present) ' - 'is\n' - ' evaluated. In this case all name bindings are guaranteed to ' - 'have\n' - ' happened.\n' - '\n' - ' * If the guard evaluates as true or is missing, the "block" ' - 'inside\n' - ' "case_block" is executed.\n' - '\n' - ' * Otherwise, the next "case_block" is attempted as described ' - 'above.\n' - '\n' - ' * If there are no further case blocks, the match statement ' - 'is\n' - ' completed.\n' - '\n' - 'Note:\n' - '\n' - ' Users should generally never rely on a pattern being ' - 'evaluated.\n' - ' Depending on implementation, the interpreter may cache values ' - 'or use\n' - ' other optimizations which skip repeated evaluations.\n' - '\n' - 'A sample match statement:\n' - '\n' - ' >>> flag = False\n' - ' >>> match (100, 200):\n' - ' ... case (100, 300): # Mismatch: 200 != 300\n' - " ... print('Case 1')\n" - ' ... case (100, 200) if flag: # Successful match, but ' - 'guard fails\n' - " ... print('Case 2')\n" - ' ... case (100, y): # Matches and binds y to 200\n' - " ... print(f'Case 3, y: {y}')\n" - ' ... case _: # Pattern not attempted\n' - " ... print('Case 4, I match anything!')\n" - ' ...\n' - ' Case 3, y: 200\n' - '\n' - 'In this case, "if flag" is a guard. Read more about that in the ' - 'next\n' - 'section.\n' - '\n' - '\n' - 'Guards\n' - '------\n' - '\n' - ' guard ::= "if" named_expression\n' - '\n' - 'A "guard" (which is part of the "case") must succeed for code ' - 'inside\n' - 'the "case" block to execute. It takes the form: "if" followed ' - 'by an\n' - 'expression.\n' - '\n' - 'The logical flow of a "case" block with a "guard" follows:\n' - '\n' - '1. Check that the pattern in the "case" block succeeded. If ' - 'the\n' - ' pattern failed, the "guard" is not evaluated and the next ' - '"case"\n' - ' block is checked.\n' - '\n' - '2. If the pattern succeeded, evaluate the "guard".\n' - '\n' - ' * If the "guard" condition evaluates as true, the case block ' - 'is\n' - ' selected.\n' - '\n' - ' * If the "guard" condition evaluates as false, the case block ' - 'is\n' - ' not selected.\n' - '\n' - ' * If the "guard" raises an exception during evaluation, the\n' - ' exception bubbles up.\n' - '\n' - 'Guards are allowed to have side effects as they are ' - 'expressions.\n' - 'Guard evaluation must proceed from the first to the last case ' - 'block,\n' - 'one at a time, skipping case blocks whose pattern(s) don’t all\n' - 'succeed. (I.e., guard evaluation must happen in order.) Guard\n' - 'evaluation must stop once a case block is selected.\n' - '\n' - '\n' - 'Irrefutable Case Blocks\n' - '-----------------------\n' - '\n' - 'An irrefutable case block is a match-all case block. A match\n' - 'statement may have at most one irrefutable case block, and it ' - 'must be\n' - 'last.\n' - '\n' - 'A case block is considered irrefutable if it has no guard and ' - 'its\n' - 'pattern is irrefutable. A pattern is considered irrefutable if ' - 'we can\n' - 'prove from its syntax alone that it will always succeed. Only ' - 'the\n' - 'following patterns are irrefutable:\n' - '\n' - '* AS Patterns whose left-hand side is irrefutable\n' - '\n' - '* OR Patterns containing at least one irrefutable pattern\n' - '\n' - '* Capture Patterns\n' - '\n' - '* Wildcard Patterns\n' - '\n' - '* parenthesized irrefutable patterns\n' - '\n' - '\n' - 'Patterns\n' - '--------\n' - '\n' - 'Note:\n' - '\n' - ' This section uses grammar notations beyond standard EBNF:\n' - '\n' - ' * the notation "SEP.RULE+" is shorthand for "RULE (SEP ' - 'RULE)*"\n' - '\n' - ' * the notation "!RULE" is shorthand for a negative lookahead\n' - ' assertion\n' - '\n' - 'The top-level syntax for "patterns" is:\n' - '\n' - ' patterns ::= open_sequence_pattern | pattern\n' - ' pattern ::= as_pattern | or_pattern\n' - ' closed_pattern ::= | literal_pattern\n' - ' | capture_pattern\n' - ' | wildcard_pattern\n' - ' | value_pattern\n' - ' | group_pattern\n' - ' | sequence_pattern\n' - ' | mapping_pattern\n' - ' | class_pattern\n' - '\n' - 'The descriptions below will include a description “in simple ' - 'terms” of\n' - 'what a pattern does for illustration purposes (credits to ' - 'Raymond\n' - 'Hettinger for a document that inspired most of the ' - 'descriptions). Note\n' - 'that these descriptions are purely for illustration purposes and ' - '**may\n' - 'not** reflect the underlying implementation. Furthermore, they ' - 'do not\n' - 'cover all valid forms.\n' - '\n' - '\n' - 'OR Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An OR pattern is two or more patterns separated by vertical bars ' - '"|".\n' - 'Syntax:\n' - '\n' - ' or_pattern ::= "|".closed_pattern+\n' - '\n' - 'Only the final subpattern may be irrefutable, and each ' - 'subpattern must\n' - 'bind the same set of names to avoid ambiguity.\n' - '\n' - 'An OR pattern matches each of its subpatterns in turn to the ' - 'subject\n' - 'value, until one succeeds. The OR pattern is then considered\n' - 'successful. Otherwise, if none of the subpatterns succeed, the ' - 'OR\n' - 'pattern fails.\n' - '\n' - 'In simple terms, "P1 | P2 | ..." will try to match "P1", if it ' - 'fails\n' - 'it will try to match "P2", succeeding immediately if any ' - 'succeeds,\n' - 'failing otherwise.\n' - '\n' - '\n' - 'AS Patterns\n' - '~~~~~~~~~~~\n' - '\n' - 'An AS pattern matches an OR pattern on the left of the "as" ' - 'keyword\n' - 'against a subject. Syntax:\n' - '\n' - ' as_pattern ::= or_pattern "as" capture_pattern\n' - '\n' - 'If the OR pattern fails, the AS pattern fails. Otherwise, the ' - 'AS\n' - 'pattern binds the subject to the name on the right of the as ' - 'keyword\n' - 'and succeeds. "capture_pattern" cannot be a "_".\n' - '\n' - 'In simple terms "P as NAME" will match with "P", and on success ' - 'it\n' - 'will set "NAME = ".\n' - '\n' - '\n' - 'Literal Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A literal pattern corresponds to most literals in Python. ' - 'Syntax:\n' - '\n' - ' literal_pattern ::= signed_number\n' - ' | signed_number "+" NUMBER\n' - ' | signed_number "-" NUMBER\n' - ' | strings\n' - ' | "None"\n' - ' | "True"\n' - ' | "False"\n' - ' signed_number ::= ["-"] NUMBER\n' - '\n' - 'The rule "strings" and the token "NUMBER" are defined in the ' - 'standard\n' - 'Python grammar. Triple-quoted strings are supported. Raw ' - 'strings and\n' - 'byte strings are supported. f-strings are not supported.\n' - '\n' - 'The forms "signed_number \'+\' NUMBER" and "signed_number \'-\' ' - 'NUMBER"\n' - 'are for expressing complex numbers; they require a real number ' - 'on the\n' - 'left and an imaginary number on the right. E.g. "3 + 4j".\n' - '\n' - 'In simple terms, "LITERAL" will succeed only if " ==\n' - 'LITERAL". For the singletons "None", "True" and "False", the ' - '"is"\n' - 'operator is used.\n' - '\n' - '\n' - 'Capture Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A capture pattern binds the subject value to a name. Syntax:\n' - '\n' - " capture_pattern ::= !'_' NAME\n" - '\n' - 'A single underscore "_" is not a capture pattern (this is what ' - '"!\'_\'"\n' - 'expresses). It is instead treated as a "wildcard_pattern".\n' - '\n' - 'In a given pattern, a given name can only be bound once. E.g. ' - '"case\n' - 'x, x: ..." is invalid while "case [x] | x: ..." is allowed.\n' - '\n' - 'Capture patterns always succeed. The binding follows scoping ' - 'rules\n' - 'established by the assignment expression operator in **PEP ' - '572**; the\n' - 'name becomes a local variable in the closest containing function ' - 'scope\n' - 'unless there’s an applicable "global" or "nonlocal" statement.\n' - '\n' - 'In simple terms "NAME" will always succeed and it will set "NAME ' - '=\n' - '".\n' - '\n' - '\n' - 'Wildcard Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A wildcard pattern always succeeds (matches anything) and binds ' - 'no\n' - 'name. Syntax:\n' - '\n' - " wildcard_pattern ::= '_'\n" - '\n' - '"_" is a soft keyword within any pattern, but only within ' - 'patterns.\n' - 'It is an identifier, as usual, even within "match" subject\n' - 'expressions, "guard"s, and "case" blocks.\n' - '\n' - 'In simple terms, "_" will always succeed.\n' - '\n' - '\n' - 'Value Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A value pattern represents a named value in Python. Syntax:\n' - '\n' - ' value_pattern ::= attr\n' - ' attr ::= name_or_attr "." NAME\n' - ' name_or_attr ::= attr | NAME\n' - '\n' - 'The dotted name in the pattern is looked up using standard ' - 'Python name\n' - 'resolution rules. The pattern succeeds if the value found ' - 'compares\n' - 'equal to the subject value (using the "==" equality operator).\n' - '\n' - 'In simple terms "NAME1.NAME2" will succeed only if " ' - '==\n' - 'NAME1.NAME2"\n' - '\n' - 'Note:\n' - '\n' - ' If the same value occurs multiple times in the same match ' - 'statement,\n' - ' the interpreter may cache the first value found and reuse it ' - 'rather\n' - ' than repeat the same lookup. This cache is strictly tied to a ' - 'given\n' - ' execution of a given match statement.\n' - '\n' - '\n' - 'Group Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A group pattern allows users to add parentheses around patterns ' - 'to\n' - 'emphasize the intended grouping. Otherwise, it has no ' - 'additional\n' - 'syntax. Syntax:\n' - '\n' - ' group_pattern ::= "(" pattern ")"\n' - '\n' - 'In simple terms "(P)" has the same effect as "P".\n' - '\n' - '\n' - 'Sequence Patterns\n' - '~~~~~~~~~~~~~~~~~\n' - '\n' - 'A sequence pattern contains several subpatterns to be matched ' - 'against\n' - 'sequence elements. The syntax is similar to the unpacking of a ' - 'list or\n' - 'tuple.\n' - '\n' - ' sequence_pattern ::= "[" [maybe_sequence_pattern] "]"\n' - ' | "(" [open_sequence_pattern] ")"\n' - ' open_sequence_pattern ::= maybe_star_pattern "," ' - '[maybe_sequence_pattern]\n' - ' maybe_sequence_pattern ::= ",".maybe_star_pattern+ ","?\n' - ' maybe_star_pattern ::= star_pattern | pattern\n' - ' star_pattern ::= "*" (capture_pattern | ' - 'wildcard_pattern)\n' - '\n' - 'There is no difference if parentheses or square brackets are ' - 'used for\n' - 'sequence patterns (i.e. "(...)" vs "[...]" ).\n' - '\n' - 'Note:\n' - '\n' - ' A single pattern enclosed in parentheses without a trailing ' - 'comma\n' - ' (e.g. "(3 | 4)") is a group pattern. While a single pattern ' - 'enclosed\n' - ' in square brackets (e.g. "[3 | 4]") is still a sequence ' - 'pattern.\n' - '\n' - 'At most one star subpattern may be in a sequence pattern. The ' - 'star\n' - 'subpattern may occur in any position. If no star subpattern is\n' - 'present, the sequence pattern is a fixed-length sequence ' - 'pattern;\n' - 'otherwise it is a variable-length sequence pattern.\n' - '\n' - 'The following is the logical flow for matching a sequence ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a sequence [2], the sequence ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If the subject value is an instance of "str", "bytes" or\n' - ' "bytearray" the sequence pattern fails.\n' - '\n' - '3. The subsequent steps depend on whether the sequence pattern ' - 'is\n' - ' fixed or variable-length.\n' - '\n' - ' If the sequence pattern is fixed-length:\n' - '\n' - ' 1. If the length of the subject sequence is not equal to the ' - 'number\n' - ' of subpatterns, the sequence pattern fails\n' - '\n' - ' 2. Subpatterns in the sequence pattern are matched to their\n' - ' corresponding items in the subject sequence from left to ' - 'right.\n' - ' Matching stops as soon as a subpattern fails. If all\n' - ' subpatterns succeed in matching their corresponding item, ' - 'the\n' - ' sequence pattern succeeds.\n' - '\n' - ' Otherwise, if the sequence pattern is variable-length:\n' - '\n' - ' 1. If the length of the subject sequence is less than the ' - 'number of\n' - ' non-star subpatterns, the sequence pattern fails.\n' - '\n' - ' 2. The leading non-star subpatterns are matched to their\n' - ' corresponding items as for fixed-length sequences.\n' - '\n' - ' 3. If the previous step succeeds, the star subpattern matches ' - 'a\n' - ' list formed of the remaining subject items, excluding the\n' - ' remaining items corresponding to non-star subpatterns ' - 'following\n' - ' the star subpattern.\n' - '\n' - ' 4. Remaining non-star subpatterns are matched to their\n' - ' corresponding subject items, as for a fixed-length ' - 'sequence.\n' - '\n' - ' Note:\n' - '\n' - ' The length of the subject sequence is obtained via "len()" ' - '(i.e.\n' - ' via the "__len__()" protocol). This length may be cached ' - 'by the\n' - ' interpreter in a similar manner as value patterns.\n' - '\n' - 'In simple terms "[P1, P2, P3," … ", P]" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a sequence\n' - '\n' - '* "len(subject) == "\n' - '\n' - '* "P1" matches "[0]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* "P2" matches "[1]" (note that this match can also ' - 'bind\n' - ' names)\n' - '\n' - '* … and so on for the corresponding pattern/element.\n' - '\n' - '\n' - 'Mapping Patterns\n' - '~~~~~~~~~~~~~~~~\n' - '\n' - 'A mapping pattern contains one or more key-value patterns. The ' - 'syntax\n' - 'is similar to the construction of a dictionary. Syntax:\n' - '\n' - ' mapping_pattern ::= "{" [items_pattern] "}"\n' - ' items_pattern ::= ",".key_value_pattern+ ","?\n' - ' key_value_pattern ::= (literal_pattern | value_pattern) ":" ' - 'pattern\n' - ' | double_star_pattern\n' - ' double_star_pattern ::= "**" capture_pattern\n' - '\n' - 'At most one double star pattern may be in a mapping pattern. ' - 'The\n' - 'double star pattern must be the last subpattern in the mapping\n' - 'pattern.\n' - '\n' - 'Duplicate keys in mapping patterns are disallowed. Duplicate ' - 'literal\n' - 'keys will raise a "SyntaxError". Two keys that otherwise have ' - 'the same\n' - 'value will raise a "ValueError" at runtime.\n' - '\n' - 'The following is the logical flow for matching a mapping ' - 'pattern\n' - 'against a subject value:\n' - '\n' - '1. If the subject value is not a mapping [3],the mapping ' - 'pattern\n' - ' fails.\n' - '\n' - '2. If every key given in the mapping pattern is present in the ' - 'subject\n' - ' mapping, and the pattern for each key matches the ' - 'corresponding\n' - ' item of the subject mapping, the mapping pattern succeeds.\n' - '\n' - '3. If duplicate keys are detected in the mapping pattern, the ' - 'pattern\n' - ' is considered invalid. A "SyntaxError" is raised for ' - 'duplicate\n' - ' literal values; or a "ValueError" for named keys of the same ' - 'value.\n' - '\n' - 'Note:\n' - '\n' - ' Key-value pairs are matched using the two-argument form of ' - 'the\n' - ' mapping subject’s "get()" method. Matched key-value pairs ' - 'must\n' - ' already be present in the mapping, and not created on-the-fly ' - 'via\n' - ' "__missing__()" or "__getitem__()".\n' - '\n' - 'In simple terms "{KEY1: P1, KEY2: P2, ... }" matches only if all ' - 'the\n' - 'following happens:\n' - '\n' - '* check "" is a mapping\n' - '\n' - '* "KEY1 in "\n' - '\n' - '* "P1" matches "[KEY1]"\n' - '\n' - '* … and so on for the corresponding KEY/pattern pair.\n' - '\n' - '\n' - 'Class Patterns\n' - '~~~~~~~~~~~~~~\n' - '\n' - 'A class pattern represents a class and its positional and ' - 'keyword\n' - 'arguments (if any). Syntax:\n' - '\n' - ' class_pattern ::= name_or_attr "(" [pattern_arguments ' - '","?] ")"\n' - ' pattern_arguments ::= positional_patterns ["," ' - 'keyword_patterns]\n' - ' | keyword_patterns\n' - ' positional_patterns ::= ",".pattern+\n' - ' keyword_patterns ::= ",".keyword_pattern+\n' - ' keyword_pattern ::= NAME "=" pattern\n' - '\n' - 'The same keyword should not be repeated in class patterns.\n' - '\n' - 'The following is the logical flow for matching a class pattern ' - 'against\n' - 'a subject value:\n' - '\n' - '1. If "name_or_attr" is not an instance of the builtin "type" , ' - 'raise\n' - ' "TypeError".\n' - '\n' - '2. If the subject value is not an instance of "name_or_attr" ' - '(tested\n' - ' via "isinstance()"), the class pattern fails.\n' - '\n' - '3. If no pattern arguments are present, the pattern succeeds.\n' - ' Otherwise, the subsequent steps depend on whether keyword or\n' - ' positional argument patterns are present.\n' - '\n' - ' For a number of built-in types (specified below), a single\n' - ' positional subpattern is accepted which will match the ' - 'entire\n' - ' subject; for these types keyword patterns also work as for ' - 'other\n' - ' types.\n' - '\n' - ' If only keyword patterns are present, they are processed as\n' - ' follows, one by one:\n' - '\n' - ' I. The keyword is looked up as an attribute on the subject.\n' - '\n' - ' * If this raises an exception other than "AttributeError", ' - 'the\n' - ' exception bubbles up.\n' - '\n' - ' * If this raises "AttributeError", the class pattern has ' - 'failed.\n' - '\n' - ' * Else, the subpattern associated with the keyword pattern ' - 'is\n' - ' matched against the subject’s attribute value. If this ' - 'fails,\n' - ' the class pattern fails; if this succeeds, the match ' - 'proceeds\n' - ' to the next keyword.\n' - '\n' - ' II. If all keyword patterns succeed, the class pattern ' - 'succeeds.\n' - '\n' - ' If any positional patterns are present, they are converted ' - 'to\n' - ' keyword patterns using the "__match_args__" attribute on the ' - 'class\n' - ' "name_or_attr" before matching:\n' - '\n' - ' I. The equivalent of "getattr(cls, "__match_args__", ())" is\n' - ' called.\n' - '\n' - ' * If this raises an exception, the exception bubbles up.\n' - '\n' - ' * If the returned value is not a tuple, the conversion ' - 'fails and\n' - ' "TypeError" is raised.\n' - '\n' - ' * If there are more positional patterns than\n' - ' "len(cls.__match_args__)", "TypeError" is raised.\n' - '\n' - ' * Otherwise, positional pattern "i" is converted to a ' - 'keyword\n' - ' pattern using "__match_args__[i]" as the keyword.\n' - ' "__match_args__[i]" must be a string; if not "TypeError" ' - 'is\n' - ' raised.\n' - '\n' - ' * If there are duplicate keywords, "TypeError" is raised.\n' - '\n' - ' See also:\n' - '\n' - ' Customizing positional arguments in class pattern ' - 'matching\n' - '\n' - ' II. Once all positional patterns have been converted to ' - 'keyword\n' - ' patterns,\n' - ' the match proceeds as if there were only keyword ' - 'patterns.\n' - '\n' - ' For the following built-in types the handling of positional\n' - ' subpatterns is different:\n' - '\n' - ' * "bool"\n' - '\n' - ' * "bytearray"\n' - '\n' - ' * "bytes"\n' - '\n' - ' * "dict"\n' - '\n' - ' * "float"\n' - '\n' - ' * "frozenset"\n' - '\n' - ' * "int"\n' - '\n' - ' * "list"\n' - '\n' - ' * "set"\n' - '\n' - ' * "str"\n' - '\n' - ' * "tuple"\n' - '\n' - ' These classes accept a single positional argument, and the ' - 'pattern\n' - ' there is matched against the whole object rather than an ' - 'attribute.\n' - ' For example "int(0|1)" matches the value "0", but not the ' - 'value\n' - ' "0.0".\n' - '\n' - 'In simple terms "CLS(P1, attr=P2)" matches only if the ' - 'following\n' - 'happens:\n' - '\n' - '* "isinstance(, CLS)"\n' - '\n' - '* convert "P1" to a keyword pattern using "CLS.__match_args__"\n' - '\n' - '* For each keyword argument "attr=P2":\n' - '\n' - ' * "hasattr(, "attr")"\n' - '\n' - ' * "P2" matches ".attr"\n' - '\n' - '* … and so on for the corresponding keyword argument/pattern ' - 'pair.\n' - '\n' - 'See also:\n' - '\n' - ' * **PEP 634** – Structural Pattern Matching: Specification\n' - '\n' - ' * **PEP 636** – Structural Pattern Matching: Tutorial\n' - '\n' - '\n' - 'Function definitions\n' - '====================\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname ' - '[type_params] "(" [parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [star_parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' star_parameter ::= identifier [":" ["*"] ' - 'expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'between the\n' - 'function’s name and the opening parenthesis for its parameter ' - 'list.\n' - 'This indicates to static type checkers that the function is ' - 'generic.\n' - 'At runtime, the type parameters can be retrieved from the ' - 'function’s\n' - '"__type_params__" attribute. See Generic functions for more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.”\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"” must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed” value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"” ' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"” is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"” or\n' - '“"*identifier"” are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"” are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"”\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". (As a ' - 'special\n' - 'case, parameters of the form "*identifier" may have an ' - 'annotation “":\n' - '*expression"”.) Functions may have “return” annotation of the ' - 'form\n' - '“"-> expression"” after the parameter list. These annotations ' - 'can be\n' - 'any valid Python expression. The presence of annotations does ' - 'not\n' - 'change the semantics of a function. See Annotations for more\n' - 'information on annotations.\n' - '\n' - 'Changed in version 3.11: Parameters of the form “"*identifier"” ' - 'may\n' - 'have an annotation “": *expression"”. See **PEP 646**.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"” statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"” form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"”\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables.\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n' - '\n' - ' **PEP 318** - Decorators for Functions and Methods\n' - ' Function and method decorators were introduced. Class ' - 'decorators\n' - ' were introduced in **PEP 3129**.\n' - '\n' - '\n' - 'Class definitions\n' - '=================\n' - '\n' - 'A class definition defines a class object (see section The ' - 'standard\n' - 'type hierarchy):\n' - '\n' - ' classdef ::= [decorators] "class" classname [type_params] ' - '[inheritance] ":" suite\n' - ' inheritance ::= "(" [argument_list] ")"\n' - ' classname ::= identifier\n' - '\n' - 'A class definition is an executable statement. The inheritance ' - 'list\n' - 'usually gives a list of base classes (see Metaclasses for more\n' - 'advanced uses), so each item in the list should evaluate to a ' - 'class\n' - 'object which allows subclassing. Classes without an inheritance ' - 'list\n' - 'inherit, by default, from the base class "object"; hence,\n' - '\n' - ' class Foo:\n' - ' pass\n' - '\n' - 'is equivalent to\n' - '\n' - ' class Foo(object):\n' - ' pass\n' - '\n' - 'The class’s suite is then executed in a new execution frame ' - '(see\n' - 'Naming and binding), using a newly created local namespace and ' - 'the\n' - 'original global namespace. (Usually, the suite contains mostly\n' - 'function definitions.) When the class’s suite finishes ' - 'execution, its\n' - 'execution frame is discarded but its local namespace is saved. ' - '[5] A\n' - 'class object is then created using the inheritance list for the ' - 'base\n' - 'classes and the saved local namespace for the attribute ' - 'dictionary.\n' - 'The class name is bound to this class object in the original ' - 'local\n' - 'namespace.\n' - '\n' - 'The order in which attributes are defined in the class body is\n' - 'preserved in the new class’s "__dict__". Note that this is ' - 'reliable\n' - 'only right after the class is created and only for classes that ' - 'were\n' - 'defined using the definition syntax.\n' - '\n' - 'Class creation can be customized heavily using metaclasses.\n' - '\n' - 'Classes can also be decorated: just like when decorating ' - 'functions,\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' class Foo: pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' class Foo: pass\n' - ' Foo = f1(arg)(f2(Foo))\n' - '\n' - 'The evaluation rules for the decorator expressions are the same ' - 'as for\n' - 'function decorators. The result is then bound to the class ' - 'name.\n' - '\n' - 'Changed in version 3.9: Classes may be decorated with any valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'immediately\n' - 'after the class’s name. This indicates to static type checkers ' - 'that\n' - 'the class is generic. At runtime, the type parameters can be ' - 'retrieved\n' - 'from the class’s "__type_params__" attribute. See Generic ' - 'classes for\n' - 'more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - '**Programmer’s note:** Variables defined in the class definition ' - 'are\n' - 'class attributes; they are shared by instances. Instance ' - 'attributes\n' - 'can be set in a method with "self.name = value". Both class ' - 'and\n' - 'instance attributes are accessible through the notation ' - '“"self.name"”,\n' - 'and an instance attribute hides a class attribute with the same ' - 'name\n' - 'when accessed in this way. Class attributes can be used as ' - 'defaults\n' - 'for instance attributes, but using mutable values there can lead ' - 'to\n' - 'unexpected results. Descriptors can be used to create instance\n' - 'variables with different implementation details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' The proposal that changed the declaration of metaclasses to ' - 'the\n' - ' current syntax, and the semantics for how classes with\n' - ' metaclasses are constructed.\n' - '\n' - ' **PEP 3129** - Class Decorators\n' - ' The proposal that added class decorators. Function and ' - 'method\n' - ' decorators were introduced in **PEP 318**.\n' - '\n' - '\n' - 'Coroutines\n' - '==========\n' - '\n' - 'Added in version 3.5.\n' - '\n' - '\n' - 'Coroutine function definition\n' - '-----------------------------\n' - '\n' - ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' - '[parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - '\n' - 'Execution of Python coroutines can be suspended and resumed at ' - 'many\n' - 'points (see *coroutine*). "await" expressions, "async for" and ' - '"async\n' - 'with" can only be used in the body of a coroutine function.\n' - '\n' - 'Functions defined with "async def" syntax are always coroutine\n' - 'functions, even if they do not contain "await" or "async" ' - 'keywords.\n' - '\n' - 'It is a "SyntaxError" to use a "yield from" expression inside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - 'An example of a coroutine function:\n' - '\n' - ' async def func(param1, param2):\n' - ' do_stuff()\n' - ' await some_coroutine()\n' - '\n' - 'Changed in version 3.7: "await" and "async" are now keywords;\n' - 'previously they were only treated as such inside the body of a\n' - 'coroutine function.\n' - '\n' - '\n' - 'The "async for" statement\n' - '-------------------------\n' - '\n' - ' async_for_stmt ::= "async" for_stmt\n' - '\n' - 'An *asynchronous iterable* provides an "__aiter__" method that\n' - 'directly returns an *asynchronous iterator*, which can call\n' - 'asynchronous code in its "__anext__" method.\n' - '\n' - 'The "async for" statement allows convenient iteration over\n' - 'asynchronous iterables.\n' - '\n' - 'The following code:\n' - '\n' - ' async for TARGET in ITER:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'Is semantically equivalent to:\n' - '\n' - ' iter = (ITER)\n' - ' iter = type(iter).__aiter__(iter)\n' - ' running = True\n' - '\n' - ' while running:\n' - ' try:\n' - ' TARGET = await type(iter).__anext__(iter)\n' - ' except StopAsyncIteration:\n' - ' running = False\n' - ' else:\n' - ' SUITE\n' - ' else:\n' - ' SUITE2\n' - '\n' - 'See also "__aiter__()" and "__anext__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async for" statement outside ' - 'the body\n' - 'of a coroutine function.\n' - '\n' - '\n' - 'The "async with" statement\n' - '--------------------------\n' - '\n' - ' async_with_stmt ::= "async" with_stmt\n' - '\n' - 'An *asynchronous context manager* is a *context manager* that is ' - 'able\n' - 'to suspend execution in its *enter* and *exit* methods.\n' - '\n' - 'The following code:\n' - '\n' - ' async with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' aenter = type(manager).__aenter__\n' - ' aexit = type(manager).__aexit__\n' - ' value = await aenter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not await aexit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' await aexit(manager, None, None, None)\n' - '\n' - 'See also "__aenter__()" and "__aexit__()" for details.\n' - '\n' - 'It is a "SyntaxError" to use an "async with" statement outside ' - 'the\n' - 'body of a coroutine function.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 492** - Coroutines with async and await syntax\n' - ' The proposal that made coroutines a proper standalone ' - 'concept in\n' - ' Python, and added supporting syntax.\n' - '\n' - '\n' - 'Type parameter lists\n' - '====================\n' - '\n' - 'Added in version 3.12.\n' - '\n' - 'Changed in version 3.13: Support for default values was added ' - '(see\n' - '**PEP 696**).\n' - '\n' - ' type_params ::= "[" type_param ("," type_param)* "]"\n' - ' type_param ::= typevar | typevartuple | paramspec\n' - ' typevar ::= identifier (":" expression)? ("=" ' - 'expression)?\n' - ' typevartuple ::= "*" identifier ("=" expression)?\n' - ' paramspec ::= "**" identifier ("=" expression)?\n' - '\n' - 'Functions (including coroutines), classes and type aliases may ' - 'contain\n' - 'a type parameter list:\n' - '\n' - ' def max[T](args: list[T]) -> T:\n' - ' ...\n' - '\n' - ' async def amax[T](args: list[T]) -> T:\n' - ' ...\n' - '\n' - ' class Bag[T]:\n' - ' def __iter__(self) -> Iterator[T]:\n' - ' ...\n' - '\n' - ' def add(self, arg: T) -> None:\n' - ' ...\n' - '\n' - ' type ListOrSet[T] = list[T] | set[T]\n' - '\n' - 'Semantically, this indicates that the function, class, or type ' - 'alias\n' - 'is generic over a type variable. This information is primarily ' - 'used by\n' - 'static type checkers, and at runtime, generic objects behave ' - 'much like\n' - 'their non-generic counterparts.\n' - '\n' - 'Type parameters are declared in square brackets ("[]") ' - 'immediately\n' - 'after the name of the function, class, or type alias. The type\n' - 'parameters are accessible within the scope of the generic ' - 'object, but\n' - 'not elsewhere. Thus, after a declaration "def func[T](): pass", ' - 'the\n' - 'name "T" is not available in the module scope. Below, the ' - 'semantics of\n' - 'generic objects are described with more precision. The scope of ' - 'type\n' - 'parameters is modeled with a special function (technically, an\n' - 'annotation scope) that wraps the creation of the generic ' - 'object.\n' - '\n' - 'Generic functions, classes, and type aliases have a ' - '"__type_params__"\n' - 'attribute listing their type parameters.\n' - '\n' - 'Type parameters come in three kinds:\n' - '\n' - '* "typing.TypeVar", introduced by a plain name (e.g., "T").\n' - ' Semantically, this represents a single type to a type ' - 'checker.\n' - '\n' - '* "typing.TypeVarTuple", introduced by a name prefixed with a ' - 'single\n' - ' asterisk (e.g., "*Ts"). Semantically, this stands for a tuple ' - 'of any\n' - ' number of types.\n' - '\n' - '* "typing.ParamSpec", introduced by a name prefixed with two ' - 'asterisks\n' - ' (e.g., "**P"). Semantically, this stands for the parameters of ' - 'a\n' - ' callable.\n' - '\n' - '"typing.TypeVar" declarations can define *bounds* and ' - '*constraints*\n' - 'with a colon (":") followed by an expression. A single ' - 'expression\n' - 'after the colon indicates a bound (e.g. "T: int"). Semantically, ' - 'this\n' - 'means that the "typing.TypeVar" can only represent types that ' - 'are a\n' - 'subtype of this bound. A parenthesized tuple of expressions ' - 'after the\n' - 'colon indicates a set of constraints (e.g. "T: (str, bytes)"). ' - 'Each\n' - 'member of the tuple should be a type (again, this is not ' - 'enforced at\n' - 'runtime). Constrained type variables can only take on one of the ' - 'types\n' - 'in the list of constraints.\n' - '\n' - 'For "typing.TypeVar"s declared using the type parameter list ' - 'syntax,\n' - 'the bound and constraints are not evaluated when the generic ' - 'object is\n' - 'created, but only when the value is explicitly accessed through ' - 'the\n' - 'attributes "__bound__" and "__constraints__". To accomplish ' - 'this, the\n' - 'bounds or constraints are evaluated in a separate annotation ' - 'scope.\n' - '\n' - '"typing.TypeVarTuple"s and "typing.ParamSpec"s cannot have ' - 'bounds or\n' - 'constraints.\n' - '\n' - 'All three flavors of type parameters can also have a *default ' - 'value*,\n' - 'which is used when the type parameter is not explicitly ' - 'provided. This\n' - 'is added by appending a single equals sign ("=") followed by an\n' - 'expression. Like the bounds and constraints of type variables, ' - 'the\n' - 'default value is not evaluated when the object is created, but ' - 'only\n' - 'when the type parameter’s "__default__" attribute is accessed. ' - 'To this\n' - 'end, the default value is evaluated in a separate annotation ' - 'scope. If\n' - 'no default value is specified for a type parameter, the ' - '"__default__"\n' - 'attribute is set to the special sentinel object ' - '"typing.NoDefault".\n' - '\n' - 'The following example indicates the full set of allowed type ' - 'parameter\n' - 'declarations:\n' - '\n' - ' def overly_generic[\n' - ' SimpleTypeVar,\n' - ' TypeVarWithDefault = int,\n' - ' TypeVarWithBound: int,\n' - ' TypeVarWithConstraints: (str, bytes),\n' - ' *SimpleTypeVarTuple = (int, float),\n' - ' **SimpleParamSpec = (str, bytearray),\n' - ' ](\n' - ' a: SimpleTypeVar,\n' - ' b: TypeVarWithDefault,\n' - ' c: TypeVarWithBound,\n' - ' d: Callable[SimpleParamSpec, TypeVarWithConstraints],\n' - ' *e: SimpleTypeVarTuple,\n' - ' ): ...\n' - '\n' - '\n' - 'Generic functions\n' - '-----------------\n' - '\n' - 'Generic functions are declared as follows:\n' - '\n' - ' def func[T](arg: T): ...\n' - '\n' - 'This syntax is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_func():\n' - ' T = typing.TypeVar("T")\n' - ' def func(arg: T): ...\n' - ' func.__type_params__ = (T,)\n' - ' return func\n' - ' func = TYPE_PARAMS_OF_func()\n' - '\n' - 'Here "annotation-def" indicates an annotation scope, which is ' - 'not\n' - 'actually bound to any name at runtime. (One other liberty is ' - 'taken in\n' - 'the translation: the syntax does not go through attribute access ' - 'on\n' - 'the "typing" module, but creates an instance of ' - '"typing.TypeVar"\n' - 'directly.)\n' - '\n' - 'The annotations of generic functions are evaluated within the\n' - 'annotation scope used for declaring the type parameters, but ' - 'the\n' - 'function’s defaults and decorators are not.\n' - '\n' - 'The following example illustrates the scoping rules for these ' - 'cases,\n' - 'as well as for additional flavors of type parameters:\n' - '\n' - ' @decorator\n' - ' def func[T: int, *Ts, **P](*args: *Ts, arg: Callable[P, T] = ' - 'some_default):\n' - ' ...\n' - '\n' - 'Except for the lazy evaluation of the "TypeVar" bound, this is\n' - 'equivalent to:\n' - '\n' - ' DEFAULT_OF_arg = some_default\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_func():\n' - '\n' - ' annotation-def BOUND_OF_T():\n' - ' return int\n' - ' # In reality, BOUND_OF_T() is evaluated only on demand.\n' - ' T = typing.TypeVar("T", bound=BOUND_OF_T())\n' - '\n' - ' Ts = typing.TypeVarTuple("Ts")\n' - ' P = typing.ParamSpec("P")\n' - '\n' - ' def func(*args: *Ts, arg: Callable[P, T] = ' - 'DEFAULT_OF_arg):\n' - ' ...\n' - '\n' - ' func.__type_params__ = (T, Ts, P)\n' - ' return func\n' - ' func = decorator(TYPE_PARAMS_OF_func())\n' - '\n' - 'The capitalized names like "DEFAULT_OF_arg" are not actually ' - 'bound at\n' - 'runtime.\n' - '\n' - '\n' - 'Generic classes\n' - '---------------\n' - '\n' - 'Generic classes are declared as follows:\n' - '\n' - ' class Bag[T]: ...\n' - '\n' - 'This syntax is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_Bag():\n' - ' T = typing.TypeVar("T")\n' - ' class Bag(typing.Generic[T]):\n' - ' __type_params__ = (T,)\n' - ' ...\n' - ' return Bag\n' - ' Bag = TYPE_PARAMS_OF_Bag()\n' - '\n' - 'Here again "annotation-def" (not a real keyword) indicates an\n' - 'annotation scope, and the name "TYPE_PARAMS_OF_Bag" is not ' - 'actually\n' - 'bound at runtime.\n' - '\n' - 'Generic classes implicitly inherit from "typing.Generic". The ' - 'base\n' - 'classes and keyword arguments of generic classes are evaluated ' - 'within\n' - 'the type scope for the type parameters, and decorators are ' - 'evaluated\n' - 'outside that scope. This is illustrated by this example:\n' - '\n' - ' @decorator\n' - ' class Bag(Base[T], arg=T): ...\n' - '\n' - 'This is equivalent to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_Bag():\n' - ' T = typing.TypeVar("T")\n' - ' class Bag(Base[T], typing.Generic[T], arg=T):\n' - ' __type_params__ = (T,)\n' - ' ...\n' - ' return Bag\n' - ' Bag = decorator(TYPE_PARAMS_OF_Bag())\n' - '\n' - '\n' - 'Generic type aliases\n' - '--------------------\n' - '\n' - 'The "type" statement can also be used to create a generic type ' - 'alias:\n' - '\n' - ' type ListOrSet[T] = list[T] | set[T]\n' - '\n' - 'Except for the lazy evaluation of the value, this is equivalent ' - 'to:\n' - '\n' - ' annotation-def TYPE_PARAMS_OF_ListOrSet():\n' - ' T = typing.TypeVar("T")\n' - '\n' - ' annotation-def VALUE_OF_ListOrSet():\n' - ' return list[T] | set[T]\n' - ' # In reality, the value is lazily evaluated\n' - ' return typing.TypeAliasType("ListOrSet", ' - 'VALUE_OF_ListOrSet(), type_params=(T,))\n' - ' ListOrSet = TYPE_PARAMS_OF_ListOrSet()\n' - '\n' - 'Here, "annotation-def" (not a real keyword) indicates an ' - 'annotation\n' - 'scope. The capitalized names like "TYPE_PARAMS_OF_ListOrSet" are ' - 'not\n' - 'actually bound at runtime.\n' - '\n' - '\n' - 'Annotations\n' - '===========\n' - '\n' - 'Changed in version 3.14: Annotations are now lazily evaluated ' - 'by\n' - 'default.\n' - '\n' - 'Variables and function parameters may carry *annotations*, ' - 'created by\n' - 'adding a colon after the name, followed by an expression:\n' - '\n' - ' x: annotation = 1\n' - ' def f(param: annotation): ...\n' - '\n' - 'Functions may also carry a return annotation following an ' - 'arrow:\n' - '\n' - ' def f() -> annotation: ...\n' - '\n' - 'Annotations are conventionally used for *type hints*, but this ' - 'is not\n' - 'enforced by the language, and in general annotations may ' - 'contain\n' - 'arbitrary expressions. The presence of annotations does not ' - 'change the\n' - 'runtime semantics of the code, except if some mechanism is used ' - 'that\n' - 'introspects and uses the annotations (such as "dataclasses" or\n' - '"functools.singledispatch()").\n' - '\n' - 'By default, annotations are lazily evaluated in a annotation ' - 'scope.\n' - 'This means that they are not evaluated when the code containing ' - 'the\n' - 'annotation is evaluated. Instead, the interpreter saves ' - 'information\n' - 'that can be used to evaluate the annotation later if requested. ' - 'The\n' - '"annotationlib" module provides tools for evaluating ' - 'annotations.\n' - '\n' - 'If the future statement "from __future__ import annotations" is\n' - 'present, all annotations are instead stored as strings:\n' - '\n' - ' >>> from __future__ import annotations\n' - ' >>> def f(param: annotation): ...\n' - ' >>> f.__annotations__\n' - " {'param': 'annotation'}\n" - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] The exception is propagated to the invocation stack unless ' - 'there\n' - ' is a "finally" clause which happens to raise another ' - 'exception.\n' - ' That new exception causes the old one to be lost.\n' - '\n' - '[2] In pattern matching, a sequence is defined as one of the\n' - ' following:\n' - '\n' - ' * a class that inherits from "collections.abc.Sequence"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Sequence"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_SEQUENCE" bit\n' - ' set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The following standard library classes are sequences:\n' - '\n' - ' * "array.array"\n' - '\n' - ' * "collections.deque"\n' - '\n' - ' * "list"\n' - '\n' - ' * "memoryview"\n' - '\n' - ' * "range"\n' - '\n' - ' * "tuple"\n' - '\n' - ' Note:\n' - '\n' - ' Subject values of type "str", "bytes", and "bytearray" do ' - 'not\n' - ' match sequence patterns.\n' - '\n' - '[3] In pattern matching, a mapping is defined as one of the ' - 'following:\n' - '\n' - ' * a class that inherits from "collections.abc.Mapping"\n' - '\n' - ' * a Python class that has been registered as\n' - ' "collections.abc.Mapping"\n' - '\n' - ' * a builtin class that has its (CPython) ' - '"Py_TPFLAGS_MAPPING" bit\n' - ' set\n' - '\n' - ' * a class that inherits from any of the above\n' - '\n' - ' The standard library classes "dict" and ' - '"types.MappingProxyType"\n' - ' are mappings.\n' - '\n' - '[4] A string literal appearing as the first statement in the ' - 'function\n' - ' body is transformed into the function’s "__doc__" attribute ' - 'and\n' - ' therefore the function’s *docstring*.\n' - '\n' - '[5] A string literal appearing as the first statement in the ' - 'class\n' - ' body is transformed into the namespace’s "__doc__" item and\n' - ' therefore the class’s *docstring*.\n', - 'context-managers': 'With Statement Context Managers\n' - '*******************************\n' - '\n' - 'A *context manager* is an object that defines the ' - 'runtime context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context ' - 'managers are normally\n' - 'invoked using the "with" statement (described in section ' - 'The with\n' - 'statement), but can also be used by directly invoking ' - 'their methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - 'The "object" class itself does not provide the context ' - 'manager\n' - 'methods.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if ' - 'any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), ' - 'it should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n', - 'continue': 'The "continue" statement\n' - '************************\n' - '\n' - ' continue_stmt ::= "continue"\n' - '\n' - '"continue" may only occur syntactically nested in a "for" or ' - '"while"\n' - 'loop, but not nested in a function or class definition within ' - 'that\n' - 'loop. It continues with the next cycle of the nearest enclosing ' - 'loop.\n' - '\n' - 'When "continue" passes control out of a "try" statement with a\n' - '"finally" clause, that "finally" clause is executed before ' - 'really\n' - 'starting the next loop cycle.\n', - 'conversions': 'Arithmetic conversions\n' - '**********************\n' - '\n' - 'When a description of an arithmetic operator below uses the ' - 'phrase\n' - '“the numeric arguments are converted to a common real type”, ' - 'this\n' - 'means that the operator implementation for built-in types ' - 'works as\n' - 'follows:\n' - '\n' - '* If both arguments are complex numbers, no conversion is ' - 'performed;\n' - '\n' - '* if either argument is a complex or a floating-point number, ' - 'the\n' - ' other is converted to a floating-point number;\n' - '\n' - '* otherwise, both must be integers and no conversion is ' - 'necessary.\n' - '\n' - 'Some additional rules apply for certain operators (e.g., a ' - 'string as a\n' - 'left argument to the ‘%’ operator). Extensions must define ' - 'their own\n' - 'conversion behavior.\n', - 'customization': 'Basic customization\n' - '*******************\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those ' - 'passed to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance of *cls*, then the new instance’s ' - '"__init__()" method\n' - ' will be invoked like "__init__(self[, ...])", where ' - '*self* is the\n' - ' new instance and the remaining arguments are the same as ' - 'were\n' - ' passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, ' - 'then the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by ' - '"__init__()"; doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s ' - '"__del__()" method,\n' - ' if any, must explicitly call it to ensure proper ' - 'deletion of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a ' - 'new reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - ' "weakref.finalize" provides a straightforward way to ' - 'register a\n' - ' cleanup function to be called when an object is garbage ' - 'collected.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected ' - 'and deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during ' - 'their execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins ' - 'with a single\n' - ' underscore are deleted from their module before ' - 'other globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“official”\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. ' - 'The return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informal”\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous. A ' - 'default\n' - ' implementation is provided by the "object" class ' - 'itself.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)", the default "__format__()" ' - 'implementation,\n' - ' and the built-in function "print()", to compute the ' - '“informal” or\n' - ' nicely printable string representation of an object. ' - 'The return\n' - ' value must be a str object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object. The ' - '"object" class\n' - ' itself does not provide this method.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted” string representation ' - 'of an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' The default implementation by the "object" class should ' - 'be given an\n' - ' empty *format_spec* string. It delegates to ' - '"__str__()".\n' - '\n' - ' Changed in version 3.4: The __format__ method of ' - '"object" itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison” methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are ' - 'returned for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using ' - '"is", returning\n' - ' "NotImplemented" in the case of a false comparison: ' - '"True if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted” with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of ' - 'Python.This is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully chosen inputs that exploit the worst ' - 'case\n' - ' performance of a dict insertion, *O*(*n*^2) ' - 'complexity. See\n' - ' http://ocert.org/advisories/ocert-2011-003.html for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and ' - 'the object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()" (which is true of ' - 'the "object"\n' - ' class itself), all its instances are considered true.\n', - 'debugger': '"pdb" — The Python Debugger\n' - '***************************\n' - '\n' - '**Source code:** Lib/pdb.py\n' - '\n' - '======================================================================\n' - '\n' - 'The module "pdb" defines an interactive source code debugger ' - 'for\n' - 'Python programs. It supports setting (conditional) breakpoints ' - 'and\n' - 'single stepping at the source line level, inspection of stack ' - 'frames,\n' - 'source code listing, and evaluation of arbitrary Python code in ' - 'the\n' - 'context of any stack frame. It also supports post-mortem ' - 'debugging\n' - 'and can be called under program control.\n' - '\n' - 'The debugger is extensible – it is actually defined as the ' - 'class\n' - '"Pdb". This is currently undocumented but easily understood by ' - 'reading\n' - 'the source. The extension interface uses the modules "bdb" and ' - '"cmd".\n' - '\n' - 'See also:\n' - '\n' - ' Module "faulthandler"\n' - ' Used to dump Python tracebacks explicitly, on a fault, ' - 'after a\n' - ' timeout, or on a user signal.\n' - '\n' - ' Module "traceback"\n' - ' Standard interface to extract, format and print stack ' - 'traces of\n' - ' Python programs.\n' - '\n' - 'The typical usage to break into the debugger is to insert:\n' - '\n' - ' import pdb; pdb.set_trace()\n' - '\n' - 'Or:\n' - '\n' - ' breakpoint()\n' - '\n' - 'at the location you want to break into the debugger, and then ' - 'run the\n' - 'program. You can then step through the code following this ' - 'statement,\n' - 'and continue running without the debugger using the "continue"\n' - 'command.\n' - '\n' - 'Changed in version 3.7: The built-in "breakpoint()", when called ' - 'with\n' - 'defaults, can be used instead of "import pdb; pdb.set_trace()".\n' - '\n' - ' def double(x):\n' - ' breakpoint()\n' - ' return x * 2\n' - ' val = 3\n' - ' print(f"{val} * 2 is {double(val)}")\n' - '\n' - 'The debugger’s prompt is "(Pdb)", which is the indicator that ' - 'you are\n' - 'in debug mode:\n' - '\n' - ' > ...(2)double()\n' - ' -> breakpoint()\n' - ' (Pdb) p x\n' - ' 3\n' - ' (Pdb) continue\n' - ' 3 * 2 is 6\n' - '\n' - 'Changed in version 3.3: Tab-completion via the "readline" module ' - 'is\n' - 'available for commands and command arguments, e.g. the current ' - 'global\n' - 'and local names are offered as arguments of the "p" command.\n' - '\n' - 'You can also invoke "pdb" from the command line to debug other\n' - 'scripts. For example:\n' - '\n' - ' python -m pdb myscript.py\n' - '\n' - 'When invoked as a module, pdb will automatically enter ' - 'post-mortem\n' - 'debugging if the program being debugged exits abnormally. After ' - 'post-\n' - 'mortem debugging (or after normal exit of the program), pdb ' - 'will\n' - 'restart the program. Automatic restarting preserves pdb’s state ' - '(such\n' - 'as breakpoints) and in most cases is more useful than quitting ' - 'the\n' - 'debugger upon program’s exit.\n' - '\n' - 'Changed in version 3.2: Added the "-c" option to execute ' - 'commands as\n' - 'if given in a ".pdbrc" file; see Debugger Commands.\n' - '\n' - 'Changed in version 3.7: Added the "-m" option to execute ' - 'modules\n' - 'similar to the way "python -m" does. As with a script, the ' - 'debugger\n' - 'will pause execution just before the first line of the module.\n' - '\n' - 'Typical usage to execute a statement under control of the ' - 'debugger is:\n' - '\n' - ' >>> import pdb\n' - ' >>> def f(x):\n' - ' ... print(1 / x)\n' - ' >>> pdb.run("f(2)")\n' - ' > (1)()\n' - ' (Pdb) continue\n' - ' 0.5\n' - ' >>>\n' - '\n' - 'The typical usage to inspect a crashed program is:\n' - '\n' - ' >>> import pdb\n' - ' >>> def f(x):\n' - ' ... print(1 / x)\n' - ' ...\n' - ' >>> f(0)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - ' File "", line 2, in f\n' - ' ZeroDivisionError: division by zero\n' - ' >>> pdb.pm()\n' - ' > (2)f()\n' - ' (Pdb) p x\n' - ' 0\n' - ' (Pdb)\n' - '\n' - 'Changed in version 3.13: The implementation of **PEP 667** means ' - 'that\n' - 'name assignments made via "pdb" will immediately affect the ' - 'active\n' - 'scope, even when running inside an *optimized scope*.\n' - '\n' - 'The module defines the following functions; each enters the ' - 'debugger\n' - 'in a slightly different way:\n' - '\n' - 'pdb.run(statement, globals=None, locals=None)\n' - '\n' - ' Execute the *statement* (given as a string or a code object) ' - 'under\n' - ' debugger control. The debugger prompt appears before any ' - 'code is\n' - ' executed; you can set breakpoints and type "continue", or you ' - 'can\n' - ' step through the statement using "step" or "next" (all these\n' - ' commands are explained below). The optional *globals* and ' - '*locals*\n' - ' arguments specify the environment in which the code is ' - 'executed; by\n' - ' default the dictionary of the module "__main__" is used. ' - '(See the\n' - ' explanation of the built-in "exec()" or "eval()" functions.)\n' - '\n' - 'pdb.runeval(expression, globals=None, locals=None)\n' - '\n' - ' Evaluate the *expression* (given as a string or a code ' - 'object)\n' - ' under debugger control. When "runeval()" returns, it returns ' - 'the\n' - ' value of the *expression*. Otherwise this function is ' - 'similar to\n' - ' "run()".\n' - '\n' - 'pdb.runcall(function, *args, **kwds)\n' - '\n' - ' Call the *function* (a function or method object, not a ' - 'string)\n' - ' with the given arguments. When "runcall()" returns, it ' - 'returns\n' - ' whatever the function call returned. The debugger prompt ' - 'appears\n' - ' as soon as the function is entered.\n' - '\n' - 'pdb.set_trace(*, header=None, commands=None)\n' - '\n' - ' Enter the debugger at the calling stack frame. This is ' - 'useful to\n' - ' hard-code a breakpoint at a given point in a program, even if ' - 'the\n' - ' code is not otherwise being debugged (e.g. when an assertion\n' - ' fails). If given, *header* is printed to the console just ' - 'before\n' - ' debugging begins. The *commands* argument, if given, is a ' - 'list of\n' - ' commands to execute when the debugger starts.\n' - '\n' - ' Changed in version 3.7: The keyword-only argument *header*.\n' - '\n' - ' Changed in version 3.13: "set_trace()" will enter the ' - 'debugger\n' - ' immediately, rather than on the next line of code to be ' - 'executed.\n' - '\n' - ' Added in version 3.14: The *commands* argument.\n' - '\n' - 'pdb.post_mortem(t=None)\n' - '\n' - ' Enter post-mortem debugging of the given exception or ' - 'traceback\n' - ' object. If no value is given, it uses the exception that is\n' - ' currently being handled, or raises "ValueError" if there ' - 'isn’t one.\n' - '\n' - ' Changed in version 3.13: Support for exception objects was ' - 'added.\n' - '\n' - 'pdb.pm()\n' - '\n' - ' Enter post-mortem debugging of the exception found in\n' - ' "sys.last_exc".\n' - '\n' - 'The "run*" functions and "set_trace()" are aliases for ' - 'instantiating\n' - 'the "Pdb" class and calling the method of the same name. If you ' - 'want\n' - 'to access further features, you have to do this yourself:\n' - '\n' - "class pdb.Pdb(completekey='tab', stdin=None, stdout=None, " - 'skip=None, nosigint=False, readrc=True, mode=None)\n' - '\n' - ' "Pdb" is the debugger class.\n' - '\n' - ' The *completekey*, *stdin* and *stdout* arguments are passed ' - 'to the\n' - ' underlying "cmd.Cmd" class; see the description there.\n' - '\n' - ' The *skip* argument, if given, must be an iterable of ' - 'glob-style\n' - ' module name patterns. The debugger will not step into frames ' - 'that\n' - ' originate in a module that matches one of these patterns. ' - '[1]\n' - '\n' - ' By default, Pdb sets a handler for the SIGINT signal (which ' - 'is sent\n' - ' when the user presses "Ctrl-C" on the console) when you give ' - 'a\n' - ' "continue" command. This allows you to break into the ' - 'debugger\n' - ' again by pressing "Ctrl-C". If you want Pdb not to touch ' - 'the\n' - ' SIGINT handler, set *nosigint* to true.\n' - '\n' - ' The *readrc* argument defaults to true and controls whether ' - 'Pdb\n' - ' will load .pdbrc files from the filesystem.\n' - '\n' - ' The *mode* argument specifies how the debugger was invoked. ' - 'It\n' - ' impacts the workings of some debugger commands. Valid values ' - 'are\n' - ' "\'inline\'" (used by the breakpoint() builtin), "\'cli\'" ' - '(used by the\n' - ' command line invocation) or "None" (for backwards compatible\n' - ' behaviour, as before the *mode* argument was added).\n' - '\n' - ' Example call to enable tracing with *skip*:\n' - '\n' - " import pdb; pdb.Pdb(skip=['django.*']).set_trace()\n" - '\n' - ' Raises an auditing event "pdb.Pdb" with no arguments.\n' - '\n' - ' Changed in version 3.1: Added the *skip* parameter.\n' - '\n' - ' Changed in version 3.2: Added the *nosigint* parameter. ' - 'Previously,\n' - ' a SIGINT handler was never set by Pdb.\n' - '\n' - ' Changed in version 3.6: The *readrc* argument.\n' - '\n' - ' Added in version 3.14: Added the *mode* argument.\n' - '\n' - ' run(statement, globals=None, locals=None)\n' - ' runeval(expression, globals=None, locals=None)\n' - ' runcall(function, *args, **kwds)\n' - ' set_trace()\n' - '\n' - ' See the documentation for the functions explained above.\n' - '\n' - '\n' - 'Debugger Commands\n' - '=================\n' - '\n' - 'The commands recognized by the debugger are listed below. Most\n' - 'commands can be abbreviated to one or two letters as indicated; ' - 'e.g.\n' - '"h(elp)" means that either "h" or "help" can be used to enter ' - 'the help\n' - 'command (but not "he" or "hel", nor "H" or "Help" or "HELP").\n' - 'Arguments to commands must be separated by whitespace (spaces ' - 'or\n' - 'tabs). Optional arguments are enclosed in square brackets ' - '("[]") in\n' - 'the command syntax; the square brackets must not be typed.\n' - 'Alternatives in the command syntax are separated by a vertical ' - 'bar\n' - '("|").\n' - '\n' - 'Entering a blank line repeats the last command entered. ' - 'Exception: if\n' - 'the last command was a "list" command, the next 11 lines are ' - 'listed.\n' - '\n' - 'Commands that the debugger doesn’t recognize are assumed to be ' - 'Python\n' - 'statements and are executed in the context of the program being\n' - 'debugged. Python statements can also be prefixed with an ' - 'exclamation\n' - 'point ("!"). This is a powerful way to inspect the program ' - 'being\n' - 'debugged; it is even possible to change a variable or call a ' - 'function.\n' - 'When an exception occurs in such a statement, the exception name ' - 'is\n' - 'printed but the debugger’s state is not changed.\n' - '\n' - 'Changed in version 3.13: Expressions/Statements whose prefix is ' - 'a pdb\n' - 'command are now correctly identified and executed.\n' - '\n' - 'The debugger supports aliases. Aliases can have parameters ' - 'which\n' - 'allows one a certain level of adaptability to the context under\n' - 'examination.\n' - '\n' - 'Multiple commands may be entered on a single line, separated by ' - '";;".\n' - '(A single ";" is not used as it is the separator for multiple ' - 'commands\n' - 'in a line that is passed to the Python parser.) No intelligence ' - 'is\n' - 'applied to separating the commands; the input is split at the ' - 'first\n' - '";;" pair, even if it is in the middle of a quoted string. A\n' - 'workaround for strings with double semicolons is to use ' - 'implicit\n' - 'string concatenation "\';\'\';\'" or "";"";"".\n' - '\n' - 'To set a temporary global variable, use a *convenience ' - 'variable*. A\n' - '*convenience variable* is a variable whose name starts with ' - '"$". For\n' - 'example, "$foo = 1" sets a global variable "$foo" which you can ' - 'use in\n' - 'the debugger session. The *convenience variables* are cleared ' - 'when\n' - 'the program resumes execution so it’s less likely to interfere ' - 'with\n' - 'your program compared to using normal variables like "foo = 1".\n' - '\n' - 'There are three preset *convenience variables*:\n' - '\n' - '* "$_frame": the current frame you are debugging\n' - '\n' - '* "$_retval": the return value if the frame is returning\n' - '\n' - '* "$_exception": the exception if the frame is raising an ' - 'exception\n' - '\n' - 'Added in version 3.12: Added the *convenience variable* ' - 'feature.\n' - '\n' - 'If a file ".pdbrc" exists in the user’s home directory or in ' - 'the\n' - 'current directory, it is read with "\'utf-8\'" encoding and ' - 'executed as\n' - 'if it had been typed at the debugger prompt, with the exception ' - 'that\n' - 'empty lines and lines starting with "#" are ignored. This is\n' - 'particularly useful for aliases. If both files exist, the one ' - 'in the\n' - 'home directory is read first and aliases defined there can be\n' - 'overridden by the local file.\n' - '\n' - 'Changed in version 3.2: ".pdbrc" can now contain commands that\n' - 'continue debugging, such as "continue" or "next". Previously, ' - 'these\n' - 'commands had no effect.\n' - '\n' - 'Changed in version 3.11: ".pdbrc" is now read with "\'utf-8\'" ' - 'encoding.\n' - 'Previously, it was read with the system locale encoding.\n' - '\n' - 'h(elp) [command]\n' - '\n' - ' Without argument, print the list of available commands. With ' - 'a\n' - ' *command* as argument, print help about that command. "help ' - 'pdb"\n' - ' displays the full documentation (the docstring of the "pdb"\n' - ' module). Since the *command* argument must be an identifier, ' - '"help\n' - ' exec" must be entered to get help on the "!" command.\n' - '\n' - 'w(here) [count]\n' - '\n' - ' Print a stack trace, with the most recent frame at the ' - 'bottom. if\n' - ' *count* is 0, print the current frame entry. If *count* is\n' - ' negative, print the least recent - *count* frames. If *count* ' - 'is\n' - ' positive, print the most recent *count* frames. An arrow ' - '(">")\n' - ' indicates the current frame, which determines the context of ' - 'most\n' - ' commands.\n' - '\n' - ' Changed in version 3.14: *count* argument is added.\n' - '\n' - 'd(own) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels down in ' - 'the\n' - ' stack trace (to a newer frame).\n' - '\n' - 'u(p) [count]\n' - '\n' - ' Move the current frame *count* (default one) levels up in the ' - 'stack\n' - ' trace (to an older frame).\n' - '\n' - 'b(reak) [([filename:]lineno | function) [, condition]]\n' - '\n' - ' With a *lineno* argument, set a break at line *lineno* in ' - 'the\n' - ' current file. The line number may be prefixed with a ' - '*filename* and\n' - ' a colon, to specify a breakpoint in another file (possibly ' - 'one that\n' - ' hasn’t been loaded yet). The file is searched on ' - '"sys.path".\n' - ' Acceptable forms of *filename* are "/abspath/to/file.py",\n' - ' "relpath/file.py", "module" and "package.module".\n' - '\n' - ' With a *function* argument, set a break at the first ' - 'executable\n' - ' statement within that function. *function* can be any ' - 'expression\n' - ' that evaluates to a function in the current namespace.\n' - '\n' - ' If a second argument is present, it is an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored.\n' - '\n' - ' Without argument, list all breaks, including for each ' - 'breakpoint,\n' - ' the number of times that breakpoint has been hit, the ' - 'current\n' - ' ignore count, and the associated condition if any.\n' - '\n' - ' Each breakpoint is assigned a number to which all the other\n' - ' breakpoint commands refer.\n' - '\n' - 'tbreak [([filename:]lineno | function) [, condition]]\n' - '\n' - ' Temporary breakpoint, which is removed automatically when it ' - 'is\n' - ' first hit. The arguments are the same as for "break".\n' - '\n' - 'cl(ear) [filename:lineno | bpnumber ...]\n' - '\n' - ' With a *filename:lineno* argument, clear all the breakpoints ' - 'at\n' - ' this line. With a space separated list of breakpoint numbers, ' - 'clear\n' - ' those breakpoints. Without argument, clear all breaks (but ' - 'first\n' - ' ask confirmation).\n' - '\n' - 'disable bpnumber [bpnumber ...]\n' - '\n' - ' Disable the breakpoints given as a space separated list of\n' - ' breakpoint numbers. Disabling a breakpoint means it cannot ' - 'cause\n' - ' the program to stop execution, but unlike clearing a ' - 'breakpoint, it\n' - ' remains in the list of breakpoints and can be (re-)enabled.\n' - '\n' - 'enable bpnumber [bpnumber ...]\n' - '\n' - ' Enable the breakpoints specified.\n' - '\n' - 'ignore bpnumber [count]\n' - '\n' - ' Set the ignore count for the given breakpoint number. If ' - '*count*\n' - ' is omitted, the ignore count is set to 0. A breakpoint ' - 'becomes\n' - ' active when the ignore count is zero. When non-zero, the ' - '*count*\n' - ' is decremented each time the breakpoint is reached and the\n' - ' breakpoint is not disabled and any associated condition ' - 'evaluates\n' - ' to true.\n' - '\n' - 'condition bpnumber [condition]\n' - '\n' - ' Set a new *condition* for the breakpoint, an expression which ' - 'must\n' - ' evaluate to true before the breakpoint is honored. If ' - '*condition*\n' - ' is absent, any existing condition is removed; i.e., the ' - 'breakpoint\n' - ' is made unconditional.\n' - '\n' - 'commands [bpnumber]\n' - '\n' - ' Specify a list of commands for breakpoint number *bpnumber*. ' - 'The\n' - ' commands themselves appear on the following lines. Type a ' - 'line\n' - ' containing just "end" to terminate the commands. An example:\n' - '\n' - ' (Pdb) commands 1\n' - ' (com) p some_variable\n' - ' (com) end\n' - ' (Pdb)\n' - '\n' - ' To remove all commands from a breakpoint, type "commands" ' - 'and\n' - ' follow it immediately with "end"; that is, give no commands.\n' - '\n' - ' With no *bpnumber* argument, "commands" refers to the last\n' - ' breakpoint set.\n' - '\n' - ' You can use breakpoint commands to start your program up ' - 'again.\n' - ' Simply use the "continue" command, or "step", or any other ' - 'command\n' - ' that resumes execution.\n' - '\n' - ' Specifying any command resuming execution (currently ' - '"continue",\n' - ' "step", "next", "return", "until", "jump", "quit" and their\n' - ' abbreviations) terminates the command list (as if that ' - 'command was\n' - ' immediately followed by end). This is because any time you ' - 'resume\n' - ' execution (even with a simple next or step), you may ' - 'encounter\n' - ' another breakpoint—which could have its own command list, ' - 'leading\n' - ' to ambiguities about which list to execute.\n' - '\n' - ' If the list of commands contains the "silent" command, or a ' - 'command\n' - ' that resumes execution, then the breakpoint message ' - 'containing\n' - ' information about the frame is not displayed.\n' - '\n' - ' Changed in version 3.14: Frame information will not be ' - 'displayed if\n' - ' a command that resumes execution is present in the command ' - 'list.\n' - '\n' - 's(tep)\n' - '\n' - ' Execute the current line, stop at the first possible ' - 'occasion\n' - ' (either in a function that is called or on the next line in ' - 'the\n' - ' current function).\n' - '\n' - 'n(ext)\n' - '\n' - ' Continue execution until the next line in the current ' - 'function is\n' - ' reached or it returns. (The difference between "next" and ' - '"step"\n' - ' is that "step" stops inside a called function, while "next"\n' - ' executes called functions at (nearly) full speed, only ' - 'stopping at\n' - ' the next line in the current function.)\n' - '\n' - 'unt(il) [lineno]\n' - '\n' - ' Without argument, continue execution until the line with a ' - 'number\n' - ' greater than the current one is reached.\n' - '\n' - ' With *lineno*, continue execution until a line with a number\n' - ' greater or equal to *lineno* is reached. In both cases, also ' - 'stop\n' - ' when the current frame returns.\n' - '\n' - ' Changed in version 3.2: Allow giving an explicit line ' - 'number.\n' - '\n' - 'r(eturn)\n' - '\n' - ' Continue execution until the current function returns.\n' - '\n' - 'c(ont(inue))\n' - '\n' - ' Continue execution, only stop when a breakpoint is ' - 'encountered.\n' - '\n' - 'j(ump) lineno\n' - '\n' - ' Set the next line that will be executed. Only available in ' - 'the\n' - ' bottom-most frame. This lets you jump back and execute code ' - 'again,\n' - ' or jump forward to skip code that you don’t want to run.\n' - '\n' - ' It should be noted that not all jumps are allowed – for ' - 'instance it\n' - ' is not possible to jump into the middle of a "for" loop or ' - 'out of a\n' - ' "finally" clause.\n' - '\n' - 'l(ist) [first[, last]]\n' - '\n' - ' List source code for the current file. Without arguments, ' - 'list 11\n' - ' lines around the current line or continue the previous ' - 'listing.\n' - ' With "." as argument, list 11 lines around the current line. ' - 'With\n' - ' one argument, list 11 lines around at that line. With two\n' - ' arguments, list the given range; if the second argument is ' - 'less\n' - ' than the first, it is interpreted as a count.\n' - '\n' - ' The current line in the current frame is indicated by "->". ' - 'If an\n' - ' exception is being debugged, the line where the exception ' - 'was\n' - ' originally raised or propagated is indicated by ">>", if it ' - 'differs\n' - ' from the current line.\n' - '\n' - ' Changed in version 3.2: Added the ">>" marker.\n' - '\n' - 'll | longlist\n' - '\n' - ' List all source code for the current function or frame.\n' - ' Interesting lines are marked as for "list".\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'a(rgs)\n' - '\n' - ' Print the arguments of the current function and their ' - 'current\n' - ' values.\n' - '\n' - 'p expression\n' - '\n' - ' Evaluate *expression* in the current context and print its ' - 'value.\n' - '\n' - ' Note:\n' - '\n' - ' "print()" can also be used, but is not a debugger command — ' - 'this\n' - ' executes the Python "print()" function.\n' - '\n' - 'pp expression\n' - '\n' - ' Like the "p" command, except the value of *expression* is ' - 'pretty-\n' - ' printed using the "pprint" module.\n' - '\n' - 'whatis expression\n' - '\n' - ' Print the type of *expression*.\n' - '\n' - 'source expression\n' - '\n' - ' Try to get source code of *expression* and display it.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'display [expression]\n' - '\n' - ' Display the value of *expression* if it changed, each time\n' - ' execution stops in the current frame.\n' - '\n' - ' Without *expression*, list all display expressions for the ' - 'current\n' - ' frame.\n' - '\n' - ' Note:\n' - '\n' - ' Display evaluates *expression* and compares to the result ' - 'of the\n' - ' previous evaluation of *expression*, so when the result is\n' - ' mutable, display may not be able to pick up the changes.\n' - '\n' - ' Example:\n' - '\n' - ' lst = []\n' - ' breakpoint()\n' - ' pass\n' - ' lst.append(1)\n' - ' print(lst)\n' - '\n' - ' Display won’t realize "lst" has been changed because the ' - 'result of\n' - ' evaluation is modified in place by "lst.append(1)" before ' - 'being\n' - ' compared:\n' - '\n' - ' > example.py(3)()\n' - ' -> pass\n' - ' (Pdb) display lst\n' - ' display lst: []\n' - ' (Pdb) n\n' - ' > example.py(4)()\n' - ' -> lst.append(1)\n' - ' (Pdb) n\n' - ' > example.py(5)()\n' - ' -> print(lst)\n' - ' (Pdb)\n' - '\n' - ' You can do some tricks with copy mechanism to make it work:\n' - '\n' - ' > example.py(3)()\n' - ' -> pass\n' - ' (Pdb) display lst[:]\n' - ' display lst[:]: []\n' - ' (Pdb) n\n' - ' > example.py(4)()\n' - ' -> lst.append(1)\n' - ' (Pdb) n\n' - ' > example.py(5)()\n' - ' -> print(lst)\n' - ' display lst[:]: [1] [old: []]\n' - ' (Pdb)\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'undisplay [expression]\n' - '\n' - ' Do not display *expression* anymore in the current frame. ' - 'Without\n' - ' *expression*, clear all display expressions for the current ' - 'frame.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - 'interact\n' - '\n' - ' Start an interactive interpreter (using the "code" module) in ' - 'a new\n' - ' global namespace initialised from the local and global ' - 'namespaces\n' - ' for the current scope. Use "exit()" or "quit()" to exit the\n' - ' interpreter and return to the debugger.\n' - '\n' - ' Note:\n' - '\n' - ' As "interact" creates a new dedicated namespace for code\n' - ' execution, assignments to variables will not affect the ' - 'original\n' - ' namespaces. However, modifications to any referenced ' - 'mutable\n' - ' objects will be reflected in the original namespaces as ' - 'usual.\n' - '\n' - ' Added in version 3.2.\n' - '\n' - ' Changed in version 3.13: "exit()" and "quit()" can be used to ' - 'exit\n' - ' the "interact" command.\n' - '\n' - ' Changed in version 3.13: "interact" directs its output to ' - 'the\n' - ' debugger’s output channel rather than "sys.stderr".\n' - '\n' - 'alias [name [command]]\n' - '\n' - ' Create an alias called *name* that executes *command*. The\n' - ' *command* must *not* be enclosed in quotes. Replaceable ' - 'parameters\n' - ' can be indicated by "%1", "%2", … and "%9", while "%*" is ' - 'replaced\n' - ' by all the parameters. If *command* is omitted, the current ' - 'alias\n' - ' for *name* is shown. If no arguments are given, all aliases ' - 'are\n' - ' listed.\n' - '\n' - ' Aliases may be nested and can contain anything that can be ' - 'legally\n' - ' typed at the pdb prompt. Note that internal pdb commands ' - '*can* be\n' - ' overridden by aliases. Such a command is then hidden until ' - 'the\n' - ' alias is removed. Aliasing is recursively applied to the ' - 'first\n' - ' word of the command line; all other words in the line are ' - 'left\n' - ' alone.\n' - '\n' - ' As an example, here are two useful aliases (especially when ' - 'placed\n' - ' in the ".pdbrc" file):\n' - '\n' - ' # Print instance variables (usage "pi classInst")\n' - ' alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = ' - '{%1.__dict__[k]}")\n' - ' # Print instance variables in self\n' - ' alias ps pi self\n' - '\n' - 'unalias name\n' - '\n' - ' Delete the specified alias *name*.\n' - '\n' - '! statement\n' - '\n' - ' Execute the (one-line) *statement* in the context of the ' - 'current\n' - ' stack frame. The exclamation point can be omitted unless the ' - 'first\n' - ' word of the statement resembles a debugger command, e.g.:\n' - '\n' - ' (Pdb) ! n=42\n' - ' (Pdb)\n' - '\n' - ' To set a global variable, you can prefix the assignment ' - 'command\n' - ' with a "global" statement on the same line, e.g.:\n' - '\n' - " (Pdb) global list_options; list_options = ['-l']\n" - ' (Pdb)\n' - '\n' - 'run [args ...]\n' - 'restart [args ...]\n' - '\n' - ' Restart the debugged Python program. If *args* is supplied, ' - 'it is\n' - ' split with "shlex" and the result is used as the new ' - '"sys.argv".\n' - ' History, breakpoints, actions and debugger options are ' - 'preserved.\n' - ' "restart" is an alias for "run".\n' - '\n' - ' Changed in version 3.14: "run" and "restart" commands are ' - 'disabled\n' - ' when the debugger is invoked in "\'inline\'" mode.\n' - '\n' - 'q(uit)\n' - '\n' - ' Quit from the debugger. The program being executed is ' - 'aborted.\n' - '\n' - 'debug code\n' - '\n' - ' Enter a recursive debugger that steps through *code* (which ' - 'is an\n' - ' arbitrary expression or statement to be executed in the ' - 'current\n' - ' environment).\n' - '\n' - 'retval\n' - '\n' - ' Print the return value for the last return of the current ' - 'function.\n' - '\n' - 'exceptions [excnumber]\n' - '\n' - ' List or jump between chained exceptions.\n' - '\n' - ' When using "pdb.pm()" or "Pdb.post_mortem(...)" with a ' - 'chained\n' - ' exception instead of a traceback, it allows the user to move\n' - ' between the chained exceptions using "exceptions" command to ' - 'list\n' - ' exceptions, and "exception " to switch to that ' - 'exception.\n' - '\n' - ' Example:\n' - '\n' - ' def out():\n' - ' try:\n' - ' middle()\n' - ' except Exception as e:\n' - ' raise ValueError("reraise middle() error") from e\n' - '\n' - ' def middle():\n' - ' try:\n' - ' return inner(0)\n' - ' except Exception as e:\n' - ' raise ValueError("Middle fail")\n' - '\n' - ' def inner(x):\n' - ' 1 / x\n' - '\n' - ' out()\n' - '\n' - ' calling "pdb.pm()" will allow to move between exceptions:\n' - '\n' - ' > example.py(5)out()\n' - ' -> raise ValueError("reraise middle() error") from e\n' - '\n' - ' (Pdb) exceptions\n' - " 0 ZeroDivisionError('division by zero')\n" - " 1 ValueError('Middle fail')\n" - " > 2 ValueError('reraise middle() error')\n" - '\n' - ' (Pdb) exceptions 0\n' - ' > example.py(16)inner()\n' - ' -> 1 / x\n' - '\n' - ' (Pdb) up\n' - ' > example.py(10)middle()\n' - ' -> return inner(0)\n' - '\n' - ' Added in version 3.13.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] Whether a frame is considered to originate in a certain ' - 'module is\n' - ' determined by the "__name__" in the frame globals.\n', - 'del': 'The "del" statement\n' - '*******************\n' - '\n' - ' del_stmt ::= "del" target_list\n' - '\n' - 'Deletion is recursively defined very similar to the way assignment ' - 'is\n' - 'defined. Rather than spelling it out in full details, here are some\n' - 'hints.\n' - '\n' - 'Deletion of a target list recursively deletes each target, from left\n' - 'to right.\n' - '\n' - 'Deletion of a name removes the binding of that name from the local ' - 'or\n' - 'global namespace, depending on whether the name occurs in a "global"\n' - 'statement in the same code block. If the name is unbound, a\n' - '"NameError" exception will be raised.\n' - '\n' - 'Deletion of attribute references, subscriptions and slicings is ' - 'passed\n' - 'to the primary object involved; deletion of a slicing is in general\n' - 'equivalent to assignment of an empty slice of the right type (but ' - 'even\n' - 'this is determined by the sliced object).\n' - '\n' - 'Changed in version 3.2: Previously it was illegal to delete a name\n' - 'from the local namespace if it occurs as a free variable in a nested\n' - 'block.\n', - 'dict': 'Dictionary displays\n' - '*******************\n' - '\n' - 'A dictionary display is a possibly empty series of dict items\n' - '(key/value pairs) enclosed in curly braces:\n' - '\n' - ' dict_display ::= "{" [dict_item_list | dict_comprehension] ' - '"}"\n' - ' dict_item_list ::= dict_item ("," dict_item)* [","]\n' - ' dict_item ::= expression ":" expression | "**" or_expr\n' - ' dict_comprehension ::= expression ":" expression comp_for\n' - '\n' - 'A dictionary display yields a new dictionary object.\n' - '\n' - 'If a comma-separated sequence of dict items is given, they are\n' - 'evaluated from left to right to define the entries of the ' - 'dictionary:\n' - 'each key object is used as a key into the dictionary to store the\n' - 'corresponding value. This means that you can specify the same key\n' - 'multiple times in the dict item list, and the final dictionary’s ' - 'value\n' - 'for that key will be the last one given.\n' - '\n' - 'A double asterisk "**" denotes *dictionary unpacking*. Its operand\n' - 'must be a *mapping*. Each mapping item is added to the new\n' - 'dictionary. Later values replace values already set by earlier ' - 'dict\n' - 'items and earlier dictionary unpackings.\n' - '\n' - 'Added in version 3.5: Unpacking into dictionary displays, ' - 'originally\n' - 'proposed by **PEP 448**.\n' - '\n' - 'A dict comprehension, in contrast to list and set comprehensions,\n' - 'needs two expressions separated with a colon followed by the usual\n' - '“for” and “if” clauses. When the comprehension is run, the ' - 'resulting\n' - 'key and value elements are inserted in the new dictionary in the ' - 'order\n' - 'they are produced.\n' - '\n' - 'Restrictions on the types of the key values are listed earlier in\n' - 'section The standard type hierarchy. (To summarize, the key type\n' - 'should be *hashable*, which excludes all mutable objects.) Clashes\n' - 'between duplicate keys are not detected; the last value (textually\n' - 'rightmost in the display) stored for a given key value prevails.\n' - '\n' - 'Changed in version 3.8: Prior to Python 3.8, in dict ' - 'comprehensions,\n' - 'the evaluation order of key and value was not well-defined. In\n' - 'CPython, the value was evaluated before the key. Starting with ' - '3.8,\n' - 'the key is evaluated before the value, as proposed by **PEP 572**.\n', - 'dynamic-features': 'Interaction with dynamic features\n' - '*********************************\n' - '\n' - 'Name resolution of free variables occurs at runtime, not ' - 'at compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access ' - 'to the full\n' - 'environment for resolving names. Names may be resolved ' - 'in the local\n' - 'and global namespaces of the caller. Free variables are ' - 'not resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional ' - 'arguments to\n' - 'override the global and local namespace. If only one ' - 'namespace is\n' - 'specified, it is used for both.\n', - 'else': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean ' - 'operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'exceptions': 'Exceptions\n' - '**********\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other ' - 'exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination” model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at ' - 'an outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece ' - 'of code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a *non-virtual base class* thereof. ' - 'The\n' - 'instance can be received by the handler and can carry ' - 'additional\n' - 'information about the exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed ' - 'by these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'execmodel': 'Execution model\n' - '***************\n' - '\n' - '\n' - 'Structure of a program\n' - '======================\n' - '\n' - 'A Python program is constructed from code blocks. A *block* is ' - 'a piece\n' - 'of Python program text that is executed as a unit. The ' - 'following are\n' - 'blocks: a module, a function body, and a class definition. ' - 'Each\n' - 'command typed interactively is a block. A script file (a file ' - 'given\n' - 'as standard input to the interpreter or specified as a command ' - 'line\n' - 'argument to the interpreter) is a code block. A script command ' - '(a\n' - 'command specified on the interpreter command line with the ' - '"-c"\n' - 'option) is a code block. A module run as a top level script (as ' - 'module\n' - '"__main__") from the command line using a "-m" argument is also ' - 'a code\n' - 'block. The string argument passed to the built-in functions ' - '"eval()"\n' - 'and "exec()" is a code block.\n' - '\n' - 'A code block is executed in an *execution frame*. A frame ' - 'contains\n' - 'some administrative information (used for debugging) and ' - 'determines\n' - 'where and how execution continues after the code block’s ' - 'execution has\n' - 'completed.\n' - '\n' - '\n' - 'Naming and binding\n' - '==================\n' - '\n' - '\n' - 'Binding of names\n' - '----------------\n' - '\n' - '*Names* refer to objects. Names are introduced by name ' - 'binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names:\n' - '\n' - '* formal parameters to functions,\n' - '\n' - '* class definitions,\n' - '\n' - '* function definitions,\n' - '\n' - '* assignment expressions,\n' - '\n' - '* targets that are identifiers if occurring in an assignment:\n' - '\n' - ' * "for" loop header,\n' - '\n' - ' * after "as" in a "with" statement, "except" clause, ' - '"except*"\n' - ' clause, or in the as-pattern in structural pattern ' - 'matching,\n' - '\n' - ' * in a capture pattern in structural pattern matching\n' - '\n' - '* "import" statements.\n' - '\n' - '* "type" statements.\n' - '\n' - '* type parameter lists.\n' - '\n' - 'The "import" statement of the form "from ... import *" binds ' - 'all names\n' - 'defined in the imported module, except those beginning with an\n' - 'underscore. This form may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered ' - 'bound for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block ' - 'defined by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound ' - 'at the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution ' - 'rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '-------------------\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If ' - 'a local\n' - 'variable is defined in a block, its scope includes that block. ' - 'If the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point ' - 'where the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code ' - 'block, all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used ' - 'within a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur ' - 'anywhere\n' - 'within a code block. The local variables of a code block can ' - 'be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations. See the FAQ entry on UnboundLocalError for ' - 'examples.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of ' - 'the names\n' - 'specified in the statement refer to the bindings of those names ' - 'in the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the ' - 'module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the names are not found there, the builtins namespace is ' - 'searched\n' - 'next. If the names are also not found in the builtins ' - 'namespace, new\n' - 'variables are created in the global namespace. The global ' - 'statement\n' - 'must precede all uses of the listed names.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer ' - 'to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope. Type parameters cannot ' - 'be\n' - 'rebound with the "nonlocal" statement.\n' - '\n' - 'The namespace for a module is automatically created the first ' - 'time a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition ' - 'is an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute ' - 'dictionary of\n' - 'the class. The scope of names defined in a class block is ' - 'limited to\n' - 'the class block; it does not extend to the code blocks of ' - 'methods.\n' - 'This includes comprehensions and generator expressions, but it ' - 'does\n' - 'not include annotation scopes, which have access to their ' - 'enclosing\n' - 'class scopes. This means that the following will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - 'However, the following will succeed:\n' - '\n' - ' class A:\n' - ' type Alias = Nested\n' - ' class Nested: pass\n' - '\n' - " print(A.Alias.__value__) # \n" - '\n' - '\n' - 'Annotation scopes\n' - '-----------------\n' - '\n' - '*Annotations*, type parameter lists and "type" statements ' - 'introduce\n' - '*annotation scopes*, which behave mostly like function scopes, ' - 'but\n' - 'with some exceptions discussed below.\n' - '\n' - 'Annotation scopes are used in the following contexts:\n' - '\n' - '* *Function annotations*.\n' - '\n' - '* *Variable annotations*.\n' - '\n' - '* Type parameter lists for generic type aliases.\n' - '\n' - '* Type parameter lists for generic functions. A generic ' - 'function’s\n' - ' annotations are executed within the annotation scope, but ' - 'its\n' - ' defaults and decorators are not.\n' - '\n' - '* Type parameter lists for generic classes. A generic class’s ' - 'base\n' - ' classes and keyword arguments are executed within the ' - 'annotation\n' - ' scope, but its decorators are not.\n' - '\n' - '* The bounds, constraints, and default values for type ' - 'parameters\n' - ' (lazily evaluated).\n' - '\n' - '* The value of type aliases (lazily evaluated).\n' - '\n' - 'Annotation scopes differ from function scopes in the following ' - 'ways:\n' - '\n' - '* Annotation scopes have access to their enclosing class ' - 'namespace. If\n' - ' an annotation scope is immediately within a class scope, or ' - 'within\n' - ' another annotation scope that is immediately within a class ' - 'scope,\n' - ' the code in the annotation scope can use names defined in the ' - 'class\n' - ' scope as if it were executed directly within the class body. ' - 'This\n' - ' contrasts with regular functions defined within classes, ' - 'which\n' - ' cannot access names defined in the class scope.\n' - '\n' - '* Expressions in annotation scopes cannot contain "yield", ' - '"yield\n' - ' from", "await", or ":=" expressions. (These expressions are ' - 'allowed\n' - ' in other scopes contained within the annotation scope.)\n' - '\n' - '* Names defined in annotation scopes cannot be rebound with ' - '"nonlocal"\n' - ' statements in inner scopes. This includes only type ' - 'parameters, as\n' - ' no other syntactic elements that can appear within annotation ' - 'scopes\n' - ' can introduce new names.\n' - '\n' - '* While annotation scopes have an internal name, that name is ' - 'not\n' - ' reflected in the *qualified name* of objects defined within ' - 'the\n' - ' scope. Instead, the "__qualname__" of such objects is as if ' - 'the\n' - ' object were defined in the enclosing scope.\n' - '\n' - 'Added in version 3.12: Annotation scopes were introduced in ' - 'Python\n' - '3.12 as part of **PEP 695**.\n' - '\n' - 'Changed in version 3.13: Annotation scopes are also used for ' - 'type\n' - 'parameter defaults, as introduced by **PEP 696**.\n' - '\n' - 'Changed in version 3.14: Annotation scopes are now also used ' - 'for\n' - 'annotations, as specified in **PEP 649** and **PEP 749**.\n' - '\n' - '\n' - 'Lazy evaluation\n' - '---------------\n' - '\n' - 'Most annotation scopes are *lazily evaluated*. This includes\n' - 'annotations, the values of type aliases created through the ' - '"type"\n' - 'statement, and the bounds, constraints, and default values of ' - 'type\n' - 'variables created through the type parameter syntax. This means ' - 'that\n' - 'they are not evaluated when the type alias or type variable is\n' - 'created, or when the object carrying annotations is created. ' - 'Instead,\n' - 'they are only evaluated when necessary, for example when the\n' - '"__value__" attribute on a type alias is accessed.\n' - '\n' - 'Example:\n' - '\n' - ' >>> type Alias = 1/0\n' - ' >>> Alias.__value__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - ' >>> def func[T: 1/0](): pass\n' - ' >>> T = func.__type_params__[0]\n' - ' >>> T.__bound__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - '\n' - 'Here the exception is raised only when the "__value__" ' - 'attribute of\n' - 'the type alias or the "__bound__" attribute of the type ' - 'variable is\n' - 'accessed.\n' - '\n' - 'This behavior is primarily useful for references to types that ' - 'have\n' - 'not yet been defined when the type alias or type variable is ' - 'created.\n' - 'For example, lazy evaluation enables creation of mutually ' - 'recursive\n' - 'type aliases:\n' - '\n' - ' from typing import Literal\n' - '\n' - ' type SimpleExpr = int | Parenthesized\n' - ' type Parenthesized = tuple[Literal["("], Expr, ' - 'Literal[")"]]\n' - ' type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", ' - '"-"], Expr]\n' - '\n' - 'Lazily evaluated values are evaluated in annotation scope, ' - 'which means\n' - 'that names that appear inside the lazily evaluated value are ' - 'looked up\n' - 'as if they were used in the immediately enclosing scope.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '---------------------------------\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. ' - 'Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the ' - 'latter case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when ' - 'in any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '---------------------------------\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global ' - 'namespace. [1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n' - '\n' - '\n' - 'Exceptions\n' - '==========\n' - '\n' - 'Exceptions are a means of breaking out of the normal flow of ' - 'control\n' - 'of a code block in order to handle errors or other exceptional\n' - 'conditions. An exception is *raised* at the point where the ' - 'error is\n' - 'detected; it may be *handled* by the surrounding code block or ' - 'by any\n' - 'code block that directly or indirectly invoked the code block ' - 'where\n' - 'the error occurred.\n' - '\n' - 'The Python interpreter raises an exception when it detects a ' - 'run-time\n' - 'error (such as division by zero). A Python program can also\n' - 'explicitly raise an exception with the "raise" statement. ' - 'Exception\n' - 'handlers are specified with the "try" … "except" statement. ' - 'The\n' - '"finally" clause of such a statement can be used to specify ' - 'cleanup\n' - 'code which does not handle the exception, but is executed ' - 'whether an\n' - 'exception occurred or not in the preceding code.\n' - '\n' - 'Python uses the “termination” model of error handling: an ' - 'exception\n' - 'handler can find out what happened and continue execution at an ' - 'outer\n' - 'level, but it cannot repair the cause of the error and retry ' - 'the\n' - 'failing operation (except by re-entering the offending piece of ' - 'code\n' - 'from the top).\n' - '\n' - 'When an exception is not handled at all, the interpreter ' - 'terminates\n' - 'execution of the program, or returns to its interactive main ' - 'loop. In\n' - 'either case, it prints a stack traceback, except when the ' - 'exception is\n' - '"SystemExit".\n' - '\n' - 'Exceptions are identified by class instances. The "except" ' - 'clause is\n' - 'selected depending on the class of the instance: it must ' - 'reference the\n' - 'class of the instance or a *non-virtual base class* thereof. ' - 'The\n' - 'instance can be received by the handler and can carry ' - 'additional\n' - 'information about the exceptional condition.\n' - '\n' - 'Note:\n' - '\n' - ' Exception messages are not part of the Python API. Their ' - 'contents\n' - ' may change from one version of Python to the next without ' - 'warning\n' - ' and should not be relied on by code which will run under ' - 'multiple\n' - ' versions of the interpreter.\n' - '\n' - 'See also the description of the "try" statement in section The ' - 'try\n' - 'statement and "raise" statement in section The raise ' - 'statement.\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] This limitation occurs because the code that is executed by ' - 'these\n' - ' operations is not available at the time the module is ' - 'compiled.\n', - 'exprlists': 'Expression lists\n' - '****************\n' - '\n' - ' starred_expression ::= ["*"] or_expr\n' - ' flexible_expression ::= assignment_expression | ' - 'starred_expression\n' - ' flexible_expression_list ::= flexible_expression ("," ' - 'flexible_expression)* [","]\n' - ' starred_expression_list ::= starred_expression ("," ' - 'starred_expression)* [","]\n' - ' expression_list ::= expression ("," expression)* ' - '[","]\n' - ' yield_list ::= expression_list | ' - 'starred_expression "," [starred_expression_list]\n' - '\n' - 'Except when part of a list or set display, an expression list\n' - 'containing at least one comma yields a tuple. The length of ' - 'the tuple\n' - 'is the number of expressions in the list. The expressions are\n' - 'evaluated from left to right.\n' - '\n' - 'An asterisk "*" denotes *iterable unpacking*. Its operand must ' - 'be an\n' - '*iterable*. The iterable is expanded into a sequence of items, ' - 'which\n' - 'are included in the new tuple, list, or set, at the site of ' - 'the\n' - 'unpacking.\n' - '\n' - 'Added in version 3.5: Iterable unpacking in expression lists,\n' - 'originally proposed by **PEP 448**.\n' - '\n' - 'Added in version 3.11: Any item in an expression list may be ' - 'starred.\n' - 'See **PEP 646**.\n' - '\n' - 'A trailing comma is required only to create a one-item tuple, ' - 'such as\n' - '"1,"; it is optional in all other cases. A single expression ' - 'without a\n' - 'trailing comma doesn’t create a tuple, but rather yields the ' - 'value of\n' - 'that expression. (To create an empty tuple, use an empty pair ' - 'of\n' - 'parentheses: "()".)\n', - 'floating': 'Floating-point literals\n' - '***********************\n' - '\n' - 'Floating-point literals are described by the following lexical\n' - 'definitions:\n' - '\n' - ' floatnumber ::= pointfloat | exponentfloat\n' - ' pointfloat ::= [digitpart] fraction | digitpart "."\n' - ' exponentfloat ::= (digitpart | pointfloat) exponent\n' - ' digitpart ::= digit (["_"] digit)*\n' - ' fraction ::= "." digitpart\n' - ' exponent ::= ("e" | "E") ["+" | "-"] digitpart\n' - '\n' - 'Note that the integer and exponent parts are always interpreted ' - 'using\n' - 'radix 10. For example, "077e010" is legal, and denotes the same ' - 'number\n' - 'as "77e10". The allowed range of floating-point literals is\n' - 'implementation-dependent. As in integer literals, underscores ' - 'are\n' - 'supported for digit grouping.\n' - '\n' - 'Some examples of floating-point literals:\n' - '\n' - ' 3.14 10. .001 1e100 3.14e-10 0e0 ' - '3.14_15_93\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'for': 'The "for" statement\n' - '*******************\n' - '\n' - 'The "for" statement is used to iterate over the elements of a ' - 'sequence\n' - '(such as a string, tuple or list) or other iterable object:\n' - '\n' - ' for_stmt ::= "for" target_list "in" starred_list ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'The "starred_list" expression is evaluated once; it should yield an\n' - '*iterable* object. An *iterator* is created for that iterable. The\n' - 'first item provided by the iterator is then assigned to the target\n' - 'list using the standard rules for assignments (see Assignment\n' - 'statements), and the suite is executed. This repeats for each item\n' - 'provided by the iterator. When the iterator is exhausted, the suite\n' - 'in the "else" clause, if present, is executed, and the loop\n' - 'terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the loop\n' - 'without executing the "else" clause’s suite. A "continue" statement\n' - 'executed in the first suite skips the rest of the suite and ' - 'continues\n' - 'with the next item, or with the "else" clause if there is no next\n' - 'item.\n' - '\n' - 'The for-loop makes assignments to the variables in the target list.\n' - 'This overwrites all previous assignments to those variables ' - 'including\n' - 'those made in the suite of the for-loop:\n' - '\n' - ' for i in range(10):\n' - ' print(i)\n' - ' i = 5 # this will not affect the for-loop\n' - ' # because i will be overwritten with the ' - 'next\n' - ' # index in the range\n' - '\n' - 'Names in the target list are not deleted when the loop is finished,\n' - 'but if the sequence is empty, they will not have been assigned to at\n' - 'all by the loop. Hint: the built-in type "range()" represents\n' - 'immutable arithmetic sequences of integers. For instance, iterating\n' - '"range(3)" successively yields 0, 1, and then 2.\n' - '\n' - 'Changed in version 3.11: Starred elements are now allowed in the\n' - 'expression list.\n', - 'formatstrings': 'Format String Syntax\n' - '********************\n' - '\n' - 'The "str.format()" method and the "Formatter" class share ' - 'the same\n' - 'syntax for format strings (although in the case of ' - '"Formatter",\n' - 'subclasses can define their own format string syntax). The ' - 'syntax is\n' - 'related to that of formatted string literals, but it is ' - 'less\n' - 'sophisticated and, in particular, does not support ' - 'arbitrary\n' - 'expressions.\n' - '\n' - 'Format strings contain “replacement fields” surrounded by ' - 'curly braces\n' - '"{}". Anything that is not contained in braces is ' - 'considered literal\n' - 'text, which is copied unchanged to the output. If you need ' - 'to include\n' - 'a brace character in the literal text, it can be escaped by ' - 'doubling:\n' - '"{{" and "}}".\n' - '\n' - 'The grammar for a replacement field is as follows:\n' - '\n' - ' replacement_field ::= "{" [field_name] ["!" conversion] ' - '[":" format_spec] "}"\n' - ' field_name ::= arg_name ("." attribute_name | "[" ' - 'element_index "]")*\n' - ' arg_name ::= [identifier | digit+]\n' - ' attribute_name ::= identifier\n' - ' element_index ::= digit+ | index_string\n' - ' index_string ::= ' - '+\n' - ' conversion ::= "r" | "s" | "a"\n' - ' format_spec ::= format-spec:format_spec\n' - '\n' - 'In less formal terms, the replacement field can start with ' - 'a\n' - '*field_name* that specifies the object whose value is to be ' - 'formatted\n' - 'and inserted into the output instead of the replacement ' - 'field. The\n' - '*field_name* is optionally followed by a *conversion* ' - 'field, which is\n' - 'preceded by an exclamation point "\'!\'", and a ' - '*format_spec*, which is\n' - 'preceded by a colon "\':\'". These specify a non-default ' - 'format for the\n' - 'replacement value.\n' - '\n' - 'See also the Format Specification Mini-Language section.\n' - '\n' - 'The *field_name* itself begins with an *arg_name* that is ' - 'either a\n' - 'number or a keyword. If it’s a number, it refers to a ' - 'positional\n' - 'argument, and if it’s a keyword, it refers to a named ' - 'keyword\n' - 'argument. An *arg_name* is treated as a number if a call ' - 'to\n' - '"str.isdecimal()" on the string would return true. If the ' - 'numerical\n' - 'arg_names in a format string are 0, 1, 2, … in sequence, ' - 'they can all\n' - 'be omitted (not just some) and the numbers 0, 1, 2, … will ' - 'be\n' - 'automatically inserted in that order. Because *arg_name* is ' - 'not quote-\n' - 'delimited, it is not possible to specify arbitrary ' - 'dictionary keys\n' - '(e.g., the strings "\'10\'" or "\':-]\'") within a format ' - 'string. The\n' - '*arg_name* can be followed by any number of index or ' - 'attribute\n' - 'expressions. An expression of the form "\'.name\'" selects ' - 'the named\n' - 'attribute using "getattr()", while an expression of the ' - 'form\n' - '"\'[index]\'" does an index lookup using "__getitem__()".\n' - '\n' - 'Changed in version 3.1: The positional argument specifiers ' - 'can be\n' - 'omitted for "str.format()", so "\'{} {}\'.format(a, b)" is ' - 'equivalent to\n' - '"\'{0} {1}\'.format(a, b)".\n' - '\n' - 'Changed in version 3.4: The positional argument specifiers ' - 'can be\n' - 'omitted for "Formatter".\n' - '\n' - 'Some simple format string examples:\n' - '\n' - ' "First, thou shalt count to {0}" # References first ' - 'positional argument\n' - ' "Bring me a {}" # Implicitly ' - 'references the first positional argument\n' - ' "From {} to {}" # Same as "From {0} to ' - '{1}"\n' - ' "My quest is {name}" # References keyword ' - "argument 'name'\n" - ' "Weight in tons {0.weight}" # \'weight\' attribute ' - 'of first positional arg\n' - ' "Units destroyed: {players[0]}" # First element of ' - "keyword argument 'players'.\n" - '\n' - 'The *conversion* field causes a type coercion before ' - 'formatting.\n' - 'Normally, the job of formatting a value is done by the ' - '"__format__()"\n' - 'method of the value itself. However, in some cases it is ' - 'desirable to\n' - 'force a type to be formatted as a string, overriding its ' - 'own\n' - 'definition of formatting. By converting the value to a ' - 'string before\n' - 'calling "__format__()", the normal formatting logic is ' - 'bypassed.\n' - '\n' - 'Three conversion flags are currently supported: "\'!s\'" ' - 'which calls\n' - '"str()" on the value, "\'!r\'" which calls "repr()" and ' - '"\'!a\'" which\n' - 'calls "ascii()".\n' - '\n' - 'Some examples:\n' - '\n' - ' "Harold\'s a clever {0!s}" # Calls str() on the ' - 'argument first\n' - ' "Bring out the holy {name!r}" # Calls repr() on the ' - 'argument first\n' - ' "More {!a}" # Calls ascii() on the ' - 'argument first\n' - '\n' - 'The *format_spec* field contains a specification of how the ' - 'value\n' - 'should be presented, including such details as field width, ' - 'alignment,\n' - 'padding, decimal precision and so on. Each value type can ' - 'define its\n' - 'own “formatting mini-language” or interpretation of the ' - '*format_spec*.\n' - '\n' - 'Most built-in types support a common formatting ' - 'mini-language, which\n' - 'is described in the next section.\n' - '\n' - 'A *format_spec* field can also include nested replacement ' - 'fields\n' - 'within it. These nested replacement fields may contain a ' - 'field name,\n' - 'conversion flag and format specification, but deeper ' - 'nesting is not\n' - 'allowed. The replacement fields within the format_spec ' - 'are\n' - 'substituted before the *format_spec* string is interpreted. ' - 'This\n' - 'allows the formatting of a value to be dynamically ' - 'specified.\n' - '\n' - 'See the Format examples section for some examples.\n' - '\n' - '\n' - 'Format Specification Mini-Language\n' - '==================================\n' - '\n' - '“Format specifications” are used within replacement fields ' - 'contained\n' - 'within a format string to define how individual values are ' - 'presented\n' - '(see Format String Syntax and f-strings). They can also be ' - 'passed\n' - 'directly to the built-in "format()" function. Each ' - 'formattable type\n' - 'may define how the format specification is to be ' - 'interpreted.\n' - '\n' - 'Most built-in types implement the following options for ' - 'format\n' - 'specifications, although some of the formatting options are ' - 'only\n' - 'supported by the numeric types.\n' - '\n' - 'A general convention is that an empty format specification ' - 'produces\n' - 'the same result as if you had called "str()" on the value. ' - 'A non-empty\n' - 'format specification typically modifies the result.\n' - '\n' - 'The general form of a *standard format specifier* is:\n' - '\n' - ' format_spec ::= ' - '[[fill]align][sign]["z"]["#"]["0"][width][grouping_option]["." ' - 'precision][type]\n' - ' fill ::= \n' - ' align ::= "<" | ">" | "=" | "^"\n' - ' sign ::= "+" | "-" | " "\n' - ' width ::= digit+\n' - ' grouping_option ::= "_" | ","\n' - ' precision ::= digit+\n' - ' type ::= "b" | "c" | "d" | "e" | "E" | "f" | ' - '"F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n' - '\n' - 'If a valid *align* value is specified, it can be preceded ' - 'by a *fill*\n' - 'character that can be any character and defaults to a space ' - 'if\n' - 'omitted. It is not possible to use a literal curly brace ' - '(”"{"” or\n' - '“"}"”) as the *fill* character in a formatted string ' - 'literal or when\n' - 'using the "str.format()" method. However, it is possible ' - 'to insert a\n' - 'curly brace with a nested replacement field. This ' - 'limitation doesn’t\n' - 'affect the "format()" function.\n' - '\n' - 'The meaning of the various alignment options is as ' - 'follows:\n' - '\n' - '+-----------+------------------------------------------------------------+\n' - '| Option | ' - 'Meaning ' - '|\n' - '|===========|============================================================|\n' - '| "\'<\'" | Forces the field to be left-aligned within ' - 'the available |\n' - '| | space (this is the default for most ' - 'objects). |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'>\'" | Forces the field to be right-aligned within ' - 'the available |\n' - '| | space (this is the default for ' - 'numbers). |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'=\'" | Forces the padding to be placed after the ' - 'sign (if any) |\n' - '| | but before the digits. This is used for ' - 'printing fields |\n' - '| | in the form ‘+000000120’. This alignment ' - 'option is only |\n' - '| | valid for numeric types, excluding "complex". ' - 'It becomes |\n' - '| | the default for numbers when ‘0’ immediately ' - 'precedes the |\n' - '| | field ' - 'width. |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'^\'" | Forces the field to be centered within the ' - 'available |\n' - '| | ' - 'space. ' - '|\n' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'Note that unless a minimum field width is defined, the ' - 'field width\n' - 'will always be the same size as the data to fill it, so ' - 'that the\n' - 'alignment option has no meaning in this case.\n' - '\n' - 'The *sign* option is only valid for number types, and can ' - 'be one of\n' - 'the following:\n' - '\n' - '+-----------+------------------------------------------------------------+\n' - '| Option | ' - 'Meaning ' - '|\n' - '|===========|============================================================|\n' - '| "\'+\'" | indicates that a sign should be used for ' - 'both positive as |\n' - '| | well as negative ' - 'numbers. |\n' - '+-----------+------------------------------------------------------------+\n' - '| "\'-\'" | indicates that a sign should be used only ' - 'for negative |\n' - '| | numbers (this is the default ' - 'behavior). |\n' - '+-----------+------------------------------------------------------------+\n' - '| space | indicates that a leading space should be used ' - 'on positive |\n' - '| | numbers, and a minus sign on negative ' - 'numbers. |\n' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The "\'z\'" option coerces negative zero floating-point ' - 'values to\n' - 'positive zero after rounding to the format precision. This ' - 'option is\n' - 'only valid for floating-point presentation types.\n' - '\n' - 'Changed in version 3.11: Added the "\'z\'" option (see also ' - '**PEP\n' - '682**).\n' - '\n' - 'The "\'#\'" option causes the “alternate form” to be used ' - 'for the\n' - 'conversion. The alternate form is defined differently for ' - 'different\n' - 'types. This option is only valid for integer, float and ' - 'complex\n' - 'types. For integers, when binary, octal, or hexadecimal ' - 'output is\n' - 'used, this option adds the respective prefix "\'0b\'", ' - '"\'0o\'", "\'0x\'",\n' - 'or "\'0X\'" to the output value. For float and complex the ' - 'alternate\n' - 'form causes the result of the conversion to always contain ' - 'a decimal-\n' - 'point character, even if no digits follow it. Normally, a ' - 'decimal-\n' - 'point character appears in the result of these conversions ' - 'only if a\n' - 'digit follows it. In addition, for "\'g\'" and "\'G\'" ' - 'conversions,\n' - 'trailing zeros are not removed from the result.\n' - '\n' - 'The "\',\'" option signals the use of a comma for a ' - 'thousands separator\n' - 'for floating-point presentation types and for integer ' - 'presentation\n' - 'type "\'d\'". For other presentation types, this option is ' - 'an error. For\n' - 'a locale aware separator, use the "\'n\'" integer ' - 'presentation type\n' - 'instead.\n' - '\n' - 'Changed in version 3.1: Added the "\',\'" option (see also ' - '**PEP 378**).\n' - '\n' - 'The "\'_\'" option signals the use of an underscore for a ' - 'thousands\n' - 'separator for floating-point presentation types and for ' - 'integer\n' - 'presentation type "\'d\'". For integer presentation types ' - '"\'b\'", "\'o\'",\n' - '"\'x\'", and "\'X\'", underscores will be inserted every 4 ' - 'digits. For\n' - 'other presentation types, specifying this option is an ' - 'error.\n' - '\n' - 'Changed in version 3.6: Added the "\'_\'" option (see also ' - '**PEP 515**).\n' - '\n' - '*width* is a decimal integer defining the minimum total ' - 'field width,\n' - 'including any prefixes, separators, and other formatting ' - 'characters.\n' - 'If not specified, then the field width will be determined ' - 'by the\n' - 'content.\n' - '\n' - 'When no explicit alignment is given, preceding the *width* ' - 'field by a\n' - 'zero ("\'0\'") character enables sign-aware zero-padding ' - 'for numeric\n' - 'types, excluding "complex". This is equivalent to a *fill* ' - 'character\n' - 'of "\'0\'" with an *alignment* type of "\'=\'".\n' - '\n' - 'Changed in version 3.10: Preceding the *width* field by ' - '"\'0\'" no\n' - 'longer affects the default alignment for strings.\n' - '\n' - 'The *precision* is a decimal integer indicating how many ' - 'digits should\n' - 'be displayed after the decimal point for presentation types ' - '"\'f\'" and\n' - '"\'F\'", or before and after the decimal point for ' - 'presentation types\n' - '"\'g\'" or "\'G\'". For string presentation types the ' - 'field indicates the\n' - 'maximum field size - in other words, how many characters ' - 'will be used\n' - 'from the field content. The *precision* is not allowed for ' - 'integer\n' - 'presentation types.\n' - '\n' - 'Finally, the *type* determines how the data should be ' - 'presented.\n' - '\n' - 'The available string presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'s\'" | String format. This is the default type ' - 'for strings and |\n' - ' | | may be ' - 'omitted. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'s\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The available integer presentation types are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'b\'" | Binary format. Outputs the number in ' - 'base 2. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'c\'" | Character. Converts the integer to the ' - 'corresponding |\n' - ' | | unicode character before ' - 'printing. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'d\'" | Decimal Integer. Outputs the number in ' - 'base 10. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'o\'" | Octal format. Outputs the number in base ' - '8. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'x\'" | Hex format. Outputs the number in base ' - '16, using lower- |\n' - ' | | case letters for the digits above ' - '9. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'X\'" | Hex format. Outputs the number in base ' - '16, using upper- |\n' - ' | | case letters for the digits above 9. In ' - 'case "\'#\'" is |\n' - ' | | specified, the prefix "\'0x\'" will be ' - 'upper-cased to "\'0X\'" |\n' - ' | | as ' - 'well. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'d\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | The same as ' - '"\'d\'". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'In addition to the above presentation types, integers can ' - 'be formatted\n' - 'with the floating-point presentation types listed below ' - '(except "\'n\'"\n' - 'and "None"). When doing so, "float()" is used to convert ' - 'the integer\n' - 'to a floating-point number before formatting.\n' - '\n' - 'The available presentation types for "float" and "Decimal" ' - 'values are:\n' - '\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | Type | ' - 'Meaning ' - '|\n' - ' ' - '|===========|============================================================|\n' - ' | "\'e\'" | Scientific notation. For a given ' - 'precision "p", formats |\n' - ' | | the number in scientific notation with the ' - 'letter ‘e’ |\n' - ' | | separating the coefficient from the ' - 'exponent. The |\n' - ' | | coefficient has one digit before and "p" ' - 'digits after the |\n' - ' | | decimal point, for a total of "p + 1" ' - 'significant digits. |\n' - ' | | With no precision given, uses a precision ' - 'of "6" digits |\n' - ' | | after the decimal point for "float", and ' - 'shows all |\n' - ' | | coefficient digits for "Decimal". If ' - '"p=0", the decimal |\n' - ' | | point is omitted unless the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'E\'" | Scientific notation. Same as "\'e\'" ' - 'except it uses an upper |\n' - ' | | case ‘E’ as the separator ' - 'character. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'f\'" | Fixed-point notation. For a given ' - 'precision "p", formats |\n' - ' | | the number as a decimal number with ' - 'exactly "p" digits |\n' - ' | | following the decimal point. With no ' - 'precision given, uses |\n' - ' | | a precision of "6" digits after the ' - 'decimal point for |\n' - ' | | "float", and uses a precision large enough ' - 'to show all |\n' - ' | | coefficient digits for "Decimal". If ' - '"p=0", the decimal |\n' - ' | | point is omitted unless the "#" option is ' - 'used. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'F\'" | Fixed-point notation. Same as "\'f\'", ' - 'but converts "nan" to |\n' - ' | | "NAN" and "inf" to ' - '"INF". |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'g\'" | General format. For a given precision ' - '"p >= 1", this |\n' - ' | | rounds the number to "p" significant ' - 'digits and then |\n' - ' | | formats the result in either fixed-point ' - 'format or in |\n' - ' | | scientific notation, depending on its ' - 'magnitude. A |\n' - ' | | precision of "0" is treated as equivalent ' - 'to a precision |\n' - ' | | of "1". The precise rules are as follows: ' - 'suppose that |\n' - ' | | the result formatted with presentation ' - 'type "\'e\'" and |\n' - ' | | precision "p-1" would have exponent ' - '"exp". Then, if "m <= |\n' - ' | | exp < p", where "m" is -4 for floats and ' - '-6 for |\n' - ' | | "Decimals", the number is formatted with ' - 'presentation type |\n' - ' | | "\'f\'" and precision "p-1-exp". ' - 'Otherwise, the number is |\n' - ' | | formatted with presentation type "\'e\'" ' - 'and precision |\n' - ' | | "p-1". In both cases insignificant ' - 'trailing zeros are |\n' - ' | | removed from the significand, and the ' - 'decimal point is |\n' - ' | | also removed if there are no remaining ' - 'digits following |\n' - ' | | it, unless the "\'#\'" option is used. ' - 'With no precision |\n' - ' | | given, uses a precision of "6" significant ' - 'digits for |\n' - ' | | "float". For "Decimal", the coefficient of ' - 'the result is |\n' - ' | | formed from the coefficient digits of the ' - 'value; |\n' - ' | | scientific notation is used for values ' - 'smaller than "1e-6" |\n' - ' | | in absolute value and values where the ' - 'place value of the |\n' - ' | | least significant digit is larger than 1, ' - 'and fixed-point |\n' - ' | | notation is used otherwise. Positive and ' - 'negative |\n' - ' | | infinity, positive and negative zero, and ' - 'nans, are |\n' - ' | | formatted as "inf", "-inf", "0", "-0" and ' - '"nan" |\n' - ' | | respectively, regardless of the ' - 'precision. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'G\'" | General format. Same as "\'g\'" except ' - 'switches to "\'E\'" if |\n' - ' | | the number gets too large. The ' - 'representations of infinity |\n' - ' | | and NaN are uppercased, ' - 'too. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'n\'" | Number. This is the same as "\'g\'", ' - 'except that it uses the |\n' - ' | | current locale setting to insert the ' - 'appropriate number |\n' - ' | | separator ' - 'characters. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | "\'%\'" | Percentage. Multiplies the number by 100 ' - 'and displays in |\n' - ' | | fixed ("\'f\'") format, followed by a ' - 'percent sign. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - ' | None | For "float" this is like the "\'g\'" type, ' - 'except that when |\n' - ' | | fixed- point notation is used to format ' - 'the result, it |\n' - ' | | always includes at least one digit past ' - 'the decimal point, |\n' - ' | | and switches to the scientific notation ' - 'when "exp >= p - |\n' - ' | | 1". When the precision is not specified, ' - 'the latter will |\n' - ' | | be as large as needed to represent the ' - 'given value |\n' - ' | | faithfully. For "Decimal", this is the ' - 'same as either |\n' - ' | | "\'g\'" or "\'G\'" depending on the value ' - 'of |\n' - ' | | "context.capitals" for the current decimal ' - 'context. The |\n' - ' | | overall effect is to match the output of ' - '"str()" as |\n' - ' | | altered by the other format ' - 'modifiers. |\n' - ' ' - '+-----------+------------------------------------------------------------+\n' - '\n' - 'The result should be correctly rounded to a given precision ' - '"p" of\n' - 'digits after the decimal point. The rounding mode for ' - '"float" matches\n' - 'that of the "round()" builtin. For "Decimal", the rounding ' - 'mode of\n' - 'the current context will be used.\n' - '\n' - 'The available presentation types for "complex" are the same ' - 'as those\n' - 'for "float" ("\'%\'" is not allowed). Both the real and ' - 'imaginary\n' - 'components of a complex number are formatted as ' - 'floating-point\n' - 'numbers, according to the specified presentation type. ' - 'They are\n' - 'separated by the mandatory sign of the imaginary part, the ' - 'latter\n' - 'being terminated by a "j" suffix. If the presentation type ' - 'is\n' - 'missing, the result will match the output of "str()" ' - '(complex numbers\n' - 'with a non-zero real part are also surrounded by ' - 'parentheses),\n' - 'possibly altered by other format modifiers.\n' - '\n' - '\n' - 'Format examples\n' - '===============\n' - '\n' - 'This section contains examples of the "str.format()" syntax ' - 'and\n' - 'comparison with the old "%"-formatting.\n' - '\n' - 'In most of the cases the syntax is similar to the old ' - '"%"-formatting,\n' - 'with the addition of the "{}" and with ":" used instead of ' - '"%". For\n' - 'example, "\'%03.2f\'" can be translated to "\'{:03.2f}\'".\n' - '\n' - 'The new format syntax also supports new and different ' - 'options, shown\n' - 'in the following examples.\n' - '\n' - 'Accessing arguments by position:\n' - '\n' - " >>> '{0}, {1}, {2}'.format('a', 'b', 'c')\n" - " 'a, b, c'\n" - " >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only\n" - " 'a, b, c'\n" - " >>> '{2}, {1}, {0}'.format('a', 'b', 'c')\n" - " 'c, b, a'\n" - " >>> '{2}, {1}, {0}'.format(*'abc') # unpacking " - 'argument sequence\n' - " 'c, b, a'\n" - " >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' " - 'indices can be repeated\n' - " 'abracadabra'\n" - '\n' - 'Accessing arguments by name:\n' - '\n' - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(latitude='37.24N', " - "longitude='-115.81W')\n" - " 'Coordinates: 37.24N, -115.81W'\n" - " >>> coord = {'latitude': '37.24N', 'longitude': " - "'-115.81W'}\n" - " >>> 'Coordinates: {latitude}, " - "{longitude}'.format(**coord)\n" - " 'Coordinates: 37.24N, -115.81W'\n" - '\n' - 'Accessing arguments’ attributes:\n' - '\n' - ' >>> c = 3-5j\n' - " >>> ('The complex number {0} is formed from the real " - "part {0.real} '\n" - " ... 'and the imaginary part {0.imag}.').format(c)\n" - " 'The complex number (3-5j) is formed from the real part " - "3.0 and the imaginary part -5.0.'\n" - ' >>> class Point:\n' - ' ... def __init__(self, x, y):\n' - ' ... self.x, self.y = x, y\n' - ' ... def __str__(self):\n' - " ... return 'Point({self.x}, " - "{self.y})'.format(self=self)\n" - ' ...\n' - ' >>> str(Point(4, 2))\n' - " 'Point(4, 2)'\n" - '\n' - 'Accessing arguments’ items:\n' - '\n' - ' >>> coord = (3, 5)\n' - " >>> 'X: {0[0]}; Y: {0[1]}'.format(coord)\n" - " 'X: 3; Y: 5'\n" - '\n' - 'Replacing "%s" and "%r":\n' - '\n' - ' >>> "repr() shows quotes: {!r}; str() doesn\'t: ' - '{!s}".format(\'test1\', \'test2\')\n' - ' "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n' - '\n' - 'Aligning the text and specifying a width:\n' - '\n' - " >>> '{:<30}'.format('left aligned')\n" - " 'left aligned '\n" - " >>> '{:>30}'.format('right aligned')\n" - " ' right aligned'\n" - " >>> '{:^30}'.format('centered')\n" - " ' centered '\n" - " >>> '{:*^30}'.format('centered') # use '*' as a fill " - 'char\n' - " '***********centered***********'\n" - '\n' - 'Replacing "%+f", "%-f", and "% f" and specifying a sign:\n' - '\n' - " >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it " - 'always\n' - " '+3.140000; -3.140000'\n" - " >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space " - 'for positive numbers\n' - " ' 3.140000; -3.140000'\n" - " >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the " - "minus -- same as '{:f}; {:f}'\n" - " '3.140000; -3.140000'\n" - '\n' - 'Replacing "%x" and "%o" and converting the value to ' - 'different bases:\n' - '\n' - ' >>> # format also supports binary numbers\n' - ' >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: ' - '{0:b}".format(42)\n' - " 'int: 42; hex: 2a; oct: 52; bin: 101010'\n" - ' >>> # with 0x, 0o, or 0b as prefix:\n' - ' >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: ' - '{0:#b}".format(42)\n' - " 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010'\n" - '\n' - 'Using the comma as a thousands separator:\n' - '\n' - " >>> '{:,}'.format(1234567890)\n" - " '1,234,567,890'\n" - '\n' - 'Expressing a percentage:\n' - '\n' - ' >>> points = 19\n' - ' >>> total = 22\n' - " >>> 'Correct answers: {:.2%}'.format(points/total)\n" - " 'Correct answers: 86.36%'\n" - '\n' - 'Using type-specific formatting:\n' - '\n' - ' >>> import datetime\n' - ' >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n' - " >>> '{:%Y-%m-%d %H:%M:%S}'.format(d)\n" - " '2010-07-04 12:15:58'\n" - '\n' - 'Nesting arguments and more complex examples:\n' - '\n' - " >>> for align, text in zip('<^>', ['left', 'center', " - "'right']):\n" - " ... '{0:{fill}{align}16}'.format(text, fill=align, " - 'align=align)\n' - ' ...\n' - " 'left<<<<<<<<<<<<'\n" - " '^^^^^center^^^^^'\n" - " '>>>>>>>>>>>right'\n" - ' >>>\n' - ' >>> octets = [192, 168, 0, 1]\n' - " >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets)\n" - " 'C0A80001'\n" - ' >>> int(_, 16)\n' - ' 3232235521\n' - ' >>>\n' - ' >>> width = 5\n' - ' >>> for num in range(5,12): \n' - " ... for base in 'dXob':\n" - " ... print('{0:{width}{base}}'.format(num, " - "base=base, width=width), end=' ')\n" - ' ... print()\n' - ' ...\n' - ' 5 5 5 101\n' - ' 6 6 6 110\n' - ' 7 7 7 111\n' - ' 8 8 10 1000\n' - ' 9 9 11 1001\n' - ' 10 A 12 1010\n' - ' 11 B 13 1011\n', - 'function': 'Function definitions\n' - '********************\n' - '\n' - 'A function definition defines a user-defined function object ' - '(see\n' - 'section The standard type hierarchy):\n' - '\n' - ' funcdef ::= [decorators] "def" funcname ' - '[type_params] "(" [parameter_list] ")"\n' - ' ["->" expression] ":" suite\n' - ' decorators ::= decorator+\n' - ' decorator ::= "@" assignment_expression ' - 'NEWLINE\n' - ' parameter_list ::= defparameter ("," ' - 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n' - ' | parameter_list_no_posonly\n' - ' parameter_list_no_posonly ::= defparameter ("," ' - 'defparameter)* ["," [parameter_list_starargs]]\n' - ' | parameter_list_starargs\n' - ' parameter_list_starargs ::= "*" [star_parameter] ("," ' - 'defparameter)* ["," ["**" parameter [","]]]\n' - ' | "**" parameter [","]\n' - ' parameter ::= identifier [":" expression]\n' - ' star_parameter ::= identifier [":" ["*"] ' - 'expression]\n' - ' defparameter ::= parameter ["=" expression]\n' - ' funcname ::= identifier\n' - '\n' - 'A function definition is an executable statement. Its execution ' - 'binds\n' - 'the function name in the current local namespace to a function ' - 'object\n' - '(a wrapper around the executable code for the function). This\n' - 'function object contains a reference to the current global ' - 'namespace\n' - 'as the global namespace to be used when the function is called.\n' - '\n' - 'The function definition does not execute the function body; this ' - 'gets\n' - 'executed only when the function is called. [4]\n' - '\n' - 'A function definition may be wrapped by one or more *decorator*\n' - 'expressions. Decorator expressions are evaluated when the ' - 'function is\n' - 'defined, in the scope that contains the function definition. ' - 'The\n' - 'result must be a callable, which is invoked with the function ' - 'object\n' - 'as the only argument. The returned value is bound to the ' - 'function name\n' - 'instead of the function object. Multiple decorators are applied ' - 'in\n' - 'nested fashion. For example, the following code\n' - '\n' - ' @f1(arg)\n' - ' @f2\n' - ' def func(): pass\n' - '\n' - 'is roughly equivalent to\n' - '\n' - ' def func(): pass\n' - ' func = f1(arg)(f2(func))\n' - '\n' - 'except that the original function is not temporarily bound to ' - 'the name\n' - '"func".\n' - '\n' - 'Changed in version 3.9: Functions may be decorated with any ' - 'valid\n' - '"assignment_expression". Previously, the grammar was much more\n' - 'restrictive; see **PEP 614** for details.\n' - '\n' - 'A list of type parameters may be given in square brackets ' - 'between the\n' - 'function’s name and the opening parenthesis for its parameter ' - 'list.\n' - 'This indicates to static type checkers that the function is ' - 'generic.\n' - 'At runtime, the type parameters can be retrieved from the ' - 'function’s\n' - '"__type_params__" attribute. See Generic functions for more.\n' - '\n' - 'Changed in version 3.12: Type parameter lists are new in Python ' - '3.12.\n' - '\n' - 'When one or more *parameters* have the form *parameter* "="\n' - '*expression*, the function is said to have “default parameter ' - 'values.”\n' - 'For a parameter with a default value, the corresponding ' - '*argument* may\n' - 'be omitted from a call, in which case the parameter’s default ' - 'value is\n' - 'substituted. If a parameter has a default value, all following\n' - 'parameters up until the “"*"” must also have a default value — ' - 'this is\n' - 'a syntactic restriction that is not expressed by the grammar.\n' - '\n' - '**Default parameter values are evaluated from left to right when ' - 'the\n' - 'function definition is executed.** This means that the ' - 'expression is\n' - 'evaluated once, when the function is defined, and that the same ' - '“pre-\n' - 'computed” value is used for each call. This is especially ' - 'important\n' - 'to understand when a default parameter value is a mutable ' - 'object, such\n' - 'as a list or a dictionary: if the function modifies the object ' - '(e.g.\n' - 'by appending an item to a list), the default parameter value is ' - 'in\n' - 'effect modified. This is generally not what was intended. A ' - 'way\n' - 'around this is to use "None" as the default, and explicitly test ' - 'for\n' - 'it in the body of the function, e.g.:\n' - '\n' - ' def whats_on_the_telly(penguin=None):\n' - ' if penguin is None:\n' - ' penguin = []\n' - ' penguin.append("property of the zoo")\n' - ' return penguin\n' - '\n' - 'Function call semantics are described in more detail in section ' - 'Calls.\n' - 'A function call always assigns values to all parameters ' - 'mentioned in\n' - 'the parameter list, either from positional arguments, from ' - 'keyword\n' - 'arguments, or from default values. If the form “"*identifier"” ' - 'is\n' - 'present, it is initialized to a tuple receiving any excess ' - 'positional\n' - 'parameters, defaulting to the empty tuple. If the form\n' - '“"**identifier"” is present, it is initialized to a new ordered\n' - 'mapping receiving any excess keyword arguments, defaulting to a ' - 'new\n' - 'empty mapping of the same type. Parameters after “"*"” or\n' - '“"*identifier"” are keyword-only parameters and may only be ' - 'passed by\n' - 'keyword arguments. Parameters before “"/"” are positional-only\n' - 'parameters and may only be passed by positional arguments.\n' - '\n' - 'Changed in version 3.8: The "/" function parameter syntax may be ' - 'used\n' - 'to indicate positional-only parameters. See **PEP 570** for ' - 'details.\n' - '\n' - 'Parameters may have an *annotation* of the form “": ' - 'expression"”\n' - 'following the parameter name. Any parameter may have an ' - 'annotation,\n' - 'even those of the form "*identifier" or "**identifier". (As a ' - 'special\n' - 'case, parameters of the form "*identifier" may have an ' - 'annotation “":\n' - '*expression"”.) Functions may have “return” annotation of the ' - 'form\n' - '“"-> expression"” after the parameter list. These annotations ' - 'can be\n' - 'any valid Python expression. The presence of annotations does ' - 'not\n' - 'change the semantics of a function. See Annotations for more\n' - 'information on annotations.\n' - '\n' - 'Changed in version 3.11: Parameters of the form “"*identifier"” ' - 'may\n' - 'have an annotation “": *expression"”. See **PEP 646**.\n' - '\n' - 'It is also possible to create anonymous functions (functions not ' - 'bound\n' - 'to a name), for immediate use in expressions. This uses lambda\n' - 'expressions, described in section Lambdas. Note that the ' - 'lambda\n' - 'expression is merely a shorthand for a simplified function ' - 'definition;\n' - 'a function defined in a “"def"” statement can be passed around ' - 'or\n' - 'assigned to another name just like a function defined by a ' - 'lambda\n' - 'expression. The “"def"” form is actually more powerful since ' - 'it\n' - 'allows the execution of multiple statements and annotations.\n' - '\n' - '**Programmer’s note:** Functions are first-class objects. A ' - '“"def"”\n' - 'statement executed inside a function definition defines a local\n' - 'function that can be returned or passed around. Free variables ' - 'used\n' - 'in the nested function can access the local variables of the ' - 'function\n' - 'containing the def. See section Naming and binding for ' - 'details.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3107** - Function Annotations\n' - ' The original specification for function annotations.\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Definition of a standard meaning for annotations: type ' - 'hints.\n' - '\n' - ' **PEP 526** - Syntax for Variable Annotations\n' - ' Ability to type hint variable declarations, including ' - 'class\n' - ' variables and instance variables.\n' - '\n' - ' **PEP 563** - Postponed Evaluation of Annotations\n' - ' Support for forward references within annotations by ' - 'preserving\n' - ' annotations in a string form at runtime instead of eager\n' - ' evaluation.\n' - '\n' - ' **PEP 318** - Decorators for Functions and Methods\n' - ' Function and method decorators were introduced. Class ' - 'decorators\n' - ' were introduced in **PEP 3129**.\n', - 'global': 'The "global" statement\n' - '**********************\n' - '\n' - ' global_stmt ::= "global" identifier ("," identifier)*\n' - '\n' - 'The "global" statement causes the listed identifiers to be ' - 'interpreted\n' - 'as globals. It would be impossible to assign to a global variable\n' - 'without "global", although free variables may refer to globals ' - 'without\n' - 'being declared global.\n' - '\n' - 'The "global" statement applies to the entire scope of a function ' - 'or\n' - 'class body. A "SyntaxError" is raised if a variable is used or\n' - 'assigned to prior to its global declaration in the scope.\n' - '\n' - '**Programmer’s note:** "global" is a directive to the parser. It\n' - 'applies only to code parsed at the same time as the "global"\n' - 'statement. In particular, a "global" statement contained in a ' - 'string\n' - 'or code object supplied to the built-in "exec()" function does ' - 'not\n' - 'affect the code block *containing* the function call, and code\n' - 'contained in such a string is unaffected by "global" statements in ' - 'the\n' - 'code containing the function call. The same applies to the ' - '"eval()"\n' - 'and "compile()" functions.\n', - 'id-classes': 'Reserved classes of identifiers\n' - '*******************************\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *".\n' - '\n' - '"_"\n' - ' In a "case" pattern within a "match" statement, "_" is a ' - 'soft\n' - ' keyword that denotes a wildcard.\n' - '\n' - ' Separately, the interactive interpreter makes the result of ' - 'the\n' - ' last evaluation available in the variable "_". (It is ' - 'stored in the\n' - ' "builtins" module, alongside built-in functions like ' - '"print".)\n' - '\n' - ' Elsewhere, "_" is a regular identifier. It is often used to ' - 'name\n' - ' “special” items, but it is not special to Python itself.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for the\n' - ' "gettext" module for more information on this ' - 'convention.It is\n' - ' also commonly used for unused variables.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder” names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. *Any* ' - 'use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private” attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'identifiers': 'Identifiers and keywords\n' - '************************\n' - '\n' - 'Identifiers (also referred to as *names*) are described by ' - 'the\n' - 'following lexical definitions.\n' - '\n' - 'The syntax of identifiers in Python is based on the Unicode ' - 'standard\n' - 'annex UAX-31, with elaboration and changes as defined below; ' - 'see also\n' - '**PEP 3131** for further details.\n' - '\n' - 'Within the ASCII range (U+0001..U+007F), the valid characters ' - 'for\n' - 'identifiers include the uppercase and lowercase letters "A" ' - 'through\n' - '"Z", the underscore "_" and, except for the first character, ' - 'the\n' - 'digits "0" through "9". Python 3.0 introduced additional ' - 'characters\n' - 'from outside the ASCII range (see **PEP 3131**). For these\n' - 'characters, the classification uses the version of the ' - 'Unicode\n' - 'Character Database as included in the "unicodedata" module.\n' - '\n' - 'Identifiers are unlimited in length. Case is significant.\n' - '\n' - ' identifier ::= xid_start xid_continue*\n' - ' id_start ::= \n' - ' id_continue ::= \n' - ' xid_start ::= \n' - ' xid_continue ::= \n' - '\n' - 'The Unicode category codes mentioned above stand for:\n' - '\n' - '* *Lu* - uppercase letters\n' - '\n' - '* *Ll* - lowercase letters\n' - '\n' - '* *Lt* - titlecase letters\n' - '\n' - '* *Lm* - modifier letters\n' - '\n' - '* *Lo* - other letters\n' - '\n' - '* *Nl* - letter numbers\n' - '\n' - '* *Mn* - nonspacing marks\n' - '\n' - '* *Mc* - spacing combining marks\n' - '\n' - '* *Nd* - decimal numbers\n' - '\n' - '* *Pc* - connector punctuations\n' - '\n' - '* *Other_ID_Start* - explicit list of characters in ' - 'PropList.txt to\n' - ' support backwards compatibility\n' - '\n' - '* *Other_ID_Continue* - likewise\n' - '\n' - 'All identifiers are converted into the normal form NFKC while ' - 'parsing;\n' - 'comparison of identifiers is based on NFKC.\n' - '\n' - 'A non-normative HTML file listing all valid identifier ' - 'characters for\n' - 'Unicode 16.0.0 can be found at\n' - 'https://www.unicode.org/Public/16.0.0/ucd/DerivedCoreProperties.txt\n' - '\n' - '\n' - 'Keywords\n' - '========\n' - '\n' - 'The following identifiers are used as reserved words, or ' - '*keywords* of\n' - 'the language, and cannot be used as ordinary identifiers. ' - 'They must\n' - 'be spelled exactly as written here:\n' - '\n' - ' False await else import pass\n' - ' None break except in raise\n' - ' True class finally is return\n' - ' and continue for lambda try\n' - ' as def from nonlocal while\n' - ' assert del global not with\n' - ' async elif if or yield\n' - '\n' - '\n' - 'Soft Keywords\n' - '=============\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'Some identifiers are only reserved under specific contexts. ' - 'These are\n' - 'known as *soft keywords*. The identifiers "match", "case", ' - '"type" and\n' - '"_" can syntactically act as keywords in certain contexts, ' - 'but this\n' - 'distinction is done at the parser level, not when ' - 'tokenizing.\n' - '\n' - 'As soft keywords, their use in the grammar is possible while ' - 'still\n' - 'preserving compatibility with existing code that uses these ' - 'names as\n' - 'identifier names.\n' - '\n' - '"match", "case", and "_" are used in the "match" statement. ' - '"type" is\n' - 'used in the "type" statement.\n' - '\n' - 'Changed in version 3.12: "type" is now a soft keyword.\n' - '\n' - '\n' - 'Reserved classes of identifiers\n' - '===============================\n' - '\n' - 'Certain classes of identifiers (besides keywords) have ' - 'special\n' - 'meanings. These classes are identified by the patterns of ' - 'leading and\n' - 'trailing underscore characters:\n' - '\n' - '"_*"\n' - ' Not imported by "from module import *".\n' - '\n' - '"_"\n' - ' In a "case" pattern within a "match" statement, "_" is a ' - 'soft\n' - ' keyword that denotes a wildcard.\n' - '\n' - ' Separately, the interactive interpreter makes the result ' - 'of the\n' - ' last evaluation available in the variable "_". (It is ' - 'stored in the\n' - ' "builtins" module, alongside built-in functions like ' - '"print".)\n' - '\n' - ' Elsewhere, "_" is a regular identifier. It is often used ' - 'to name\n' - ' “special” items, but it is not special to Python itself.\n' - '\n' - ' Note:\n' - '\n' - ' The name "_" is often used in conjunction with\n' - ' internationalization; refer to the documentation for ' - 'the\n' - ' "gettext" module for more information on this ' - 'convention.It is\n' - ' also commonly used for unused variables.\n' - '\n' - '"__*__"\n' - ' System-defined names, informally known as “dunder” names. ' - 'These\n' - ' names are defined by the interpreter and its ' - 'implementation\n' - ' (including the standard library). Current system names ' - 'are\n' - ' discussed in the Special method names section and ' - 'elsewhere. More\n' - ' will likely be defined in future versions of Python. ' - '*Any* use of\n' - ' "__*__" names, in any context, that does not follow ' - 'explicitly\n' - ' documented use, is subject to breakage without warning.\n' - '\n' - '"__*"\n' - ' Class-private names. Names in this category, when used ' - 'within the\n' - ' context of a class definition, are re-written to use a ' - 'mangled form\n' - ' to help avoid name clashes between “private” attributes of ' - 'base and\n' - ' derived classes. See section Identifiers (Names).\n', - 'if': 'The "if" statement\n' - '******************\n' - '\n' - 'The "if" statement is used for conditional execution:\n' - '\n' - ' if_stmt ::= "if" assignment_expression ":" suite\n' - ' ("elif" assignment_expression ":" suite)*\n' - ' ["else" ":" suite]\n' - '\n' - 'It selects exactly one of the suites by evaluating the expressions ' - 'one\n' - 'by one until one is found to be true (see section Boolean operations\n' - 'for the definition of true and false); then that suite is executed\n' - '(and no other part of the "if" statement is executed or evaluated).\n' - 'If all expressions are false, the suite of the "else" clause, if\n' - 'present, is executed.\n', - 'imaginary': 'Imaginary literals\n' - '******************\n' - '\n' - 'Imaginary literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' imagnumber ::= (floatnumber | digitpart) ("j" | "J")\n' - '\n' - 'An imaginary literal yields a complex number with a real part ' - 'of 0.0.\n' - 'Complex numbers are represented as a pair of floating-point ' - 'numbers\n' - 'and have the same restrictions on their range. To create a ' - 'complex\n' - 'number with a nonzero real part, add a floating-point number to ' - 'it,\n' - 'e.g., "(3+4j)". Some examples of imaginary literals:\n' - '\n' - ' 3.14j 10.j 10j .001j 1e100j 3.14e-10j ' - '3.14_15_93j\n', - 'import': 'The "import" statement\n' - '**********************\n' - '\n' - ' import_stmt ::= "import" module ["as" identifier] ("," ' - 'module ["as" identifier])*\n' - ' | "from" relative_module "import" identifier ' - '["as" identifier]\n' - ' ("," identifier ["as" identifier])*\n' - ' | "from" relative_module "import" "(" ' - 'identifier ["as" identifier]\n' - ' ("," identifier ["as" identifier])* [","] ")"\n' - ' | "from" relative_module "import" "*"\n' - ' module ::= (identifier ".")* identifier\n' - ' relative_module ::= "."* module | "."+\n' - '\n' - 'The basic import statement (no "from" clause) is executed in two\n' - 'steps:\n' - '\n' - '1. find a module, loading and initializing it if necessary\n' - '\n' - '2. define a name or names in the local namespace for the scope ' - 'where\n' - ' the "import" statement occurs.\n' - '\n' - 'When the statement contains multiple clauses (separated by commas) ' - 'the\n' - 'two steps are carried out separately for each clause, just as ' - 'though\n' - 'the clauses had been separated out into individual import ' - 'statements.\n' - '\n' - 'The details of the first step, finding and loading modules, are\n' - 'described in greater detail in the section on the import system, ' - 'which\n' - 'also describes the various types of packages and modules that can ' - 'be\n' - 'imported, as well as all the hooks that can be used to customize ' - 'the\n' - 'import system. Note that failures in this step may indicate ' - 'either\n' - 'that the module could not be located, *or* that an error occurred\n' - 'while initializing the module, which includes execution of the\n' - 'module’s code.\n' - '\n' - 'If the requested module is retrieved successfully, it will be ' - 'made\n' - 'available in the local namespace in one of three ways:\n' - '\n' - '* If the module name is followed by "as", then the name following ' - '"as"\n' - ' is bound directly to the imported module.\n' - '\n' - '* If no other name is specified, and the module being imported is ' - 'a\n' - ' top level module, the module’s name is bound in the local ' - 'namespace\n' - ' as a reference to the imported module\n' - '\n' - '* If the module being imported is *not* a top level module, then ' - 'the\n' - ' name of the top level package that contains the module is bound ' - 'in\n' - ' the local namespace as a reference to the top level package. ' - 'The\n' - ' imported module must be accessed using its full qualified name\n' - ' rather than directly\n' - '\n' - 'The "from" form uses a slightly more complex process:\n' - '\n' - '1. find the module specified in the "from" clause, loading and\n' - ' initializing it if necessary;\n' - '\n' - '2. for each of the identifiers specified in the "import" clauses:\n' - '\n' - ' 1. check if the imported module has an attribute by that name\n' - '\n' - ' 2. if not, attempt to import a submodule with that name and ' - 'then\n' - ' check the imported module again for that attribute\n' - '\n' - ' 3. if the attribute is not found, "ImportError" is raised.\n' - '\n' - ' 4. otherwise, a reference to that value is stored in the local\n' - ' namespace, using the name in the "as" clause if it is ' - 'present,\n' - ' otherwise using the attribute name\n' - '\n' - 'Examples:\n' - '\n' - ' import foo # foo imported and bound locally\n' - ' import foo.bar.baz # foo, foo.bar, and foo.bar.baz ' - 'imported, foo bound locally\n' - ' import foo.bar.baz as fbb # foo, foo.bar, and foo.bar.baz ' - 'imported, foo.bar.baz bound as fbb\n' - ' from foo.bar import baz # foo, foo.bar, and foo.bar.baz ' - 'imported, foo.bar.baz bound as baz\n' - ' from foo import attr # foo imported and foo.attr bound as ' - 'attr\n' - '\n' - 'If the list of identifiers is replaced by a star ("\'*\'"), all ' - 'public\n' - 'names defined in the module are bound in the local namespace for ' - 'the\n' - 'scope where the "import" statement occurs.\n' - '\n' - 'The *public names* defined by a module are determined by checking ' - 'the\n' - 'module’s namespace for a variable named "__all__"; if defined, it ' - 'must\n' - 'be a sequence of strings which are names defined or imported by ' - 'that\n' - 'module. The names given in "__all__" are all considered public ' - 'and\n' - 'are required to exist. If "__all__" is not defined, the set of ' - 'public\n' - 'names includes all names found in the module’s namespace which do ' - 'not\n' - 'begin with an underscore character ("\'_\'"). "__all__" should ' - 'contain\n' - 'the entire public API. It is intended to avoid accidentally ' - 'exporting\n' - 'items that are not part of the API (such as library modules which ' - 'were\n' - 'imported and used within the module).\n' - '\n' - 'The wild card form of import — "from module import *" — is only\n' - 'allowed at the module level. Attempting to use it in class or\n' - 'function definitions will raise a "SyntaxError".\n' - '\n' - 'When specifying what module to import you do not have to specify ' - 'the\n' - 'absolute name of the module. When a module or package is ' - 'contained\n' - 'within another package it is possible to make a relative import ' - 'within\n' - 'the same top package without having to mention the package name. ' - 'By\n' - 'using leading dots in the specified module or package after "from" ' - 'you\n' - 'can specify how high to traverse up the current package hierarchy\n' - 'without specifying exact names. One leading dot means the current\n' - 'package where the module making the import exists. Two dots means ' - 'up\n' - 'one package level. Three dots is up two levels, etc. So if you ' - 'execute\n' - '"from . import mod" from a module in the "pkg" package then you ' - 'will\n' - 'end up importing "pkg.mod". If you execute "from ..subpkg2 import ' - 'mod"\n' - 'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\n' - 'specification for relative imports is contained in the Package\n' - 'Relative Imports section.\n' - '\n' - '"importlib.import_module()" is provided to support applications ' - 'that\n' - 'determine dynamically the modules to be loaded.\n' - '\n' - 'Raises an auditing event "import" with arguments "module", ' - '"filename",\n' - '"sys.path", "sys.meta_path", "sys.path_hooks".\n' - '\n' - '\n' - 'Future statements\n' - '=================\n' - '\n' - 'A *future statement* is a directive to the compiler that a ' - 'particular\n' - 'module should be compiled using syntax or semantics that will be\n' - 'available in a specified future release of Python where the ' - 'feature\n' - 'becomes standard.\n' - '\n' - 'The future statement is intended to ease migration to future ' - 'versions\n' - 'of Python that introduce incompatible changes to the language. ' - 'It\n' - 'allows use of the new features on a per-module basis before the\n' - 'release in which the feature becomes standard.\n' - '\n' - ' future_stmt ::= "from" "__future__" "import" feature ["as" ' - 'identifier]\n' - ' ("," feature ["as" identifier])*\n' - ' | "from" "__future__" "import" "(" feature ' - '["as" identifier]\n' - ' ("," feature ["as" identifier])* [","] ")"\n' - ' feature ::= identifier\n' - '\n' - 'A future statement must appear near the top of the module. The ' - 'only\n' - 'lines that can appear before a future statement are:\n' - '\n' - '* the module docstring (if any),\n' - '\n' - '* comments,\n' - '\n' - '* blank lines, and\n' - '\n' - '* other future statements.\n' - '\n' - 'The only feature that requires using the future statement is\n' - '"annotations" (see **PEP 563**).\n' - '\n' - 'All historical features enabled by the future statement are still\n' - 'recognized by Python 3. The list includes "absolute_import",\n' - '"division", "generators", "generator_stop", "unicode_literals",\n' - '"print_function", "nested_scopes" and "with_statement". They are ' - 'all\n' - 'redundant because they are always enabled, and only kept for ' - 'backwards\n' - 'compatibility.\n' - '\n' - 'A future statement is recognized and treated specially at compile\n' - 'time: Changes to the semantics of core constructs are often\n' - 'implemented by generating different code. It may even be the ' - 'case\n' - 'that a new feature introduces new incompatible syntax (such as a ' - 'new\n' - 'reserved word), in which case the compiler may need to parse the\n' - 'module differently. Such decisions cannot be pushed off until\n' - 'runtime.\n' - '\n' - 'For any given release, the compiler knows which feature names ' - 'have\n' - 'been defined, and raises a compile-time error if a future ' - 'statement\n' - 'contains a feature not known to it.\n' - '\n' - 'The direct runtime semantics are the same as for any import ' - 'statement:\n' - 'there is a standard module "__future__", described later, and it ' - 'will\n' - 'be imported in the usual way at the time the future statement is\n' - 'executed.\n' - '\n' - 'The interesting runtime semantics depend on the specific feature\n' - 'enabled by the future statement.\n' - '\n' - 'Note that there is nothing special about the statement:\n' - '\n' - ' import __future__ [as name]\n' - '\n' - 'That is not a future statement; it’s an ordinary import statement ' - 'with\n' - 'no special semantics or syntax restrictions.\n' - '\n' - 'Code compiled by calls to the built-in functions "exec()" and\n' - '"compile()" that occur in a module "M" containing a future ' - 'statement\n' - 'will, by default, use the new syntax or semantics associated with ' - 'the\n' - 'future statement. This can be controlled by optional arguments ' - 'to\n' - '"compile()" — see the documentation of that function for details.\n' - '\n' - 'A future statement typed at an interactive interpreter prompt ' - 'will\n' - 'take effect for the rest of the interpreter session. If an\n' - 'interpreter is started with the "-i" option, is passed a script ' - 'name\n' - 'to execute, and the script includes a future statement, it will be ' - 'in\n' - 'effect in the interactive session started after the script is\n' - 'executed.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 236** - Back to the __future__\n' - ' The original proposal for the __future__ mechanism.\n', - 'in': 'Membership test operations\n' - '**************************\n' - '\n' - 'The operators "in" and "not in" test for membership. "x in s"\n' - 'evaluates to "True" if *x* is a member of *s*, and "False" otherwise.\n' - '"x not in s" returns the negation of "x in s". All built-in ' - 'sequences\n' - 'and set types support this as well as dictionary, for which "in" ' - 'tests\n' - 'whether the dictionary has a given key. For container types such as\n' - 'list, tuple, set, frozenset, dict, or collections.deque, the\n' - 'expression "x in y" is equivalent to "any(x is e or x == e for e in\n' - 'y)".\n' - '\n' - 'For the string and bytes types, "x in y" is "True" if and only if *x*\n' - 'is a substring of *y*. An equivalent test is "y.find(x) != -1".\n' - 'Empty strings are always considered to be a substring of any other\n' - 'string, so """ in "abc"" will return "True".\n' - '\n' - 'For user-defined classes which define the "__contains__()" method, "x\n' - 'in y" returns "True" if "y.__contains__(x)" returns a true value, and\n' - '"False" otherwise.\n' - '\n' - 'For user-defined classes which do not define "__contains__()" but do\n' - 'define "__iter__()", "x in y" is "True" if some value "z", for which\n' - 'the expression "x is z or x == z" is true, is produced while ' - 'iterating\n' - 'over "y". If an exception is raised during the iteration, it is as if\n' - '"in" raised that exception.\n' - '\n' - 'Lastly, the old-style iteration protocol is tried: if a class defines\n' - '"__getitem__()", "x in y" is "True" if and only if there is a non-\n' - 'negative integer index *i* such that "x is y[i] or x == y[i]", and no\n' - 'lower integer index raises the "IndexError" exception. (If any other\n' - 'exception is raised, it is as if "in" raised that exception).\n' - '\n' - 'The operator "not in" is defined to have the inverse truth value of\n' - '"in".\n', - 'integers': 'Integer literals\n' - '****************\n' - '\n' - 'Integer literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' integer ::= decinteger | bininteger | octinteger | ' - 'hexinteger\n' - ' decinteger ::= nonzerodigit (["_"] digit)* | "0"+ (["_"] ' - '"0")*\n' - ' bininteger ::= "0" ("b" | "B") (["_"] bindigit)+\n' - ' octinteger ::= "0" ("o" | "O") (["_"] octdigit)+\n' - ' hexinteger ::= "0" ("x" | "X") (["_"] hexdigit)+\n' - ' nonzerodigit ::= "1"..."9"\n' - ' digit ::= "0"..."9"\n' - ' bindigit ::= "0" | "1"\n' - ' octdigit ::= "0"..."7"\n' - ' hexdigit ::= digit | "a"..."f" | "A"..."F"\n' - '\n' - 'There is no limit for the length of integer literals apart from ' - 'what\n' - 'can be stored in available memory.\n' - '\n' - 'Underscores are ignored for determining the numeric value of ' - 'the\n' - 'literal. They can be used to group digits for enhanced ' - 'readability.\n' - 'One underscore can occur between digits, and after base ' - 'specifiers\n' - 'like "0x".\n' - '\n' - 'Note that leading zeros in a non-zero decimal number are not ' - 'allowed.\n' - 'This is for disambiguation with C-style octal literals, which ' - 'Python\n' - 'used before version 3.0.\n' - '\n' - 'Some examples of integer literals:\n' - '\n' - ' 7 2147483647 0o177 0b100110111\n' - ' 3 79228162514264337593543950336 0o377 0xdeadbeef\n' - ' 100_000_000_000 0b_1110_0101\n' - '\n' - 'Changed in version 3.6: Underscores are now allowed for ' - 'grouping\n' - 'purposes in literals.\n', - 'lambda': 'Lambdas\n' - '*******\n' - '\n' - ' lambda_expr ::= "lambda" [parameter_list] ":" expression\n' - '\n' - 'Lambda expressions (sometimes called lambda forms) are used to ' - 'create\n' - 'anonymous functions. The expression "lambda parameters: ' - 'expression"\n' - 'yields a function object. The unnamed object behaves like a ' - 'function\n' - 'object defined with:\n' - '\n' - ' def (parameters):\n' - ' return expression\n' - '\n' - 'See section Function definitions for the syntax of parameter ' - 'lists.\n' - 'Note that functions created with lambda expressions cannot ' - 'contain\n' - 'statements or annotations.\n', - 'lists': 'List displays\n' - '*************\n' - '\n' - 'A list display is a possibly empty series of expressions enclosed ' - 'in\n' - 'square brackets:\n' - '\n' - ' list_display ::= "[" [flexible_expression_list | comprehension] ' - '"]"\n' - '\n' - 'A list display yields a new list object, the contents being ' - 'specified\n' - 'by either a list of expressions or a comprehension. When a comma-\n' - 'separated list of expressions is supplied, its elements are ' - 'evaluated\n' - 'from left to right and placed into the list object in that order.\n' - 'When a comprehension is supplied, the list is constructed from the\n' - 'elements resulting from the comprehension.\n', - 'naming': 'Naming and binding\n' - '******************\n' - '\n' - '\n' - 'Binding of names\n' - '================\n' - '\n' - '*Names* refer to objects. Names are introduced by name binding\n' - 'operations.\n' - '\n' - 'The following constructs bind names:\n' - '\n' - '* formal parameters to functions,\n' - '\n' - '* class definitions,\n' - '\n' - '* function definitions,\n' - '\n' - '* assignment expressions,\n' - '\n' - '* targets that are identifiers if occurring in an assignment:\n' - '\n' - ' * "for" loop header,\n' - '\n' - ' * after "as" in a "with" statement, "except" clause, "except*"\n' - ' clause, or in the as-pattern in structural pattern matching,\n' - '\n' - ' * in a capture pattern in structural pattern matching\n' - '\n' - '* "import" statements.\n' - '\n' - '* "type" statements.\n' - '\n' - '* type parameter lists.\n' - '\n' - 'The "import" statement of the form "from ... import *" binds all ' - 'names\n' - 'defined in the imported module, except those beginning with an\n' - 'underscore. This form may only be used at the module level.\n' - '\n' - 'A target occurring in a "del" statement is also considered bound ' - 'for\n' - 'this purpose (though the actual semantics are to unbind the ' - 'name).\n' - '\n' - 'Each assignment or import statement occurs within a block defined ' - 'by a\n' - 'class or function definition or at the module level (the ' - 'top-level\n' - 'code block).\n' - '\n' - 'If a name is bound in a block, it is a local variable of that ' - 'block,\n' - 'unless declared as "nonlocal" or "global". If a name is bound at ' - 'the\n' - 'module level, it is a global variable. (The variables of the ' - 'module\n' - 'code block are local and global.) If a variable is used in a ' - 'code\n' - 'block but not defined there, it is a *free variable*.\n' - '\n' - 'Each occurrence of a name in the program text refers to the ' - '*binding*\n' - 'of that name established by the following name resolution rules.\n' - '\n' - '\n' - 'Resolution of names\n' - '===================\n' - '\n' - 'A *scope* defines the visibility of a name within a block. If a ' - 'local\n' - 'variable is defined in a block, its scope includes that block. If ' - 'the\n' - 'definition occurs in a function block, the scope extends to any ' - 'blocks\n' - 'contained within the defining one, unless a contained block ' - 'introduces\n' - 'a different binding for the name.\n' - '\n' - 'When a name is used in a code block, it is resolved using the ' - 'nearest\n' - 'enclosing scope. The set of all such scopes visible to a code ' - 'block\n' - 'is called the block’s *environment*.\n' - '\n' - 'When a name is not found at all, a "NameError" exception is ' - 'raised. If\n' - 'the current scope is a function scope, and the name refers to a ' - 'local\n' - 'variable that has not yet been bound to a value at the point where ' - 'the\n' - 'name is used, an "UnboundLocalError" exception is raised.\n' - '"UnboundLocalError" is a subclass of "NameError".\n' - '\n' - 'If a name binding operation occurs anywhere within a code block, ' - 'all\n' - 'uses of the name within the block are treated as references to ' - 'the\n' - 'current block. This can lead to errors when a name is used within ' - 'a\n' - 'block before it is bound. This rule is subtle. Python lacks\n' - 'declarations and allows name binding operations to occur anywhere\n' - 'within a code block. The local variables of a code block can be\n' - 'determined by scanning the entire text of the block for name ' - 'binding\n' - 'operations. See the FAQ entry on UnboundLocalError for examples.\n' - '\n' - 'If the "global" statement occurs within a block, all uses of the ' - 'names\n' - 'specified in the statement refer to the bindings of those names in ' - 'the\n' - 'top-level namespace. Names are resolved in the top-level ' - 'namespace by\n' - 'searching the global namespace, i.e. the namespace of the module\n' - 'containing the code block, and the builtins namespace, the ' - 'namespace\n' - 'of the module "builtins". The global namespace is searched ' - 'first. If\n' - 'the names are not found there, the builtins namespace is searched\n' - 'next. If the names are also not found in the builtins namespace, ' - 'new\n' - 'variables are created in the global namespace. The global ' - 'statement\n' - 'must precede all uses of the listed names.\n' - '\n' - 'The "global" statement has the same scope as a name binding ' - 'operation\n' - 'in the same block. If the nearest enclosing scope for a free ' - 'variable\n' - 'contains a global statement, the free variable is treated as a ' - 'global.\n' - '\n' - 'The "nonlocal" statement causes corresponding names to refer to\n' - 'previously bound variables in the nearest enclosing function ' - 'scope.\n' - '"SyntaxError" is raised at compile time if the given name does ' - 'not\n' - 'exist in any enclosing function scope. Type parameters cannot be\n' - 'rebound with the "nonlocal" statement.\n' - '\n' - 'The namespace for a module is automatically created the first time ' - 'a\n' - 'module is imported. The main module for a script is always ' - 'called\n' - '"__main__".\n' - '\n' - 'Class definition blocks and arguments to "exec()" and "eval()" ' - 'are\n' - 'special in the context of name resolution. A class definition is ' - 'an\n' - 'executable statement that may use and define names. These ' - 'references\n' - 'follow the normal rules for name resolution with an exception ' - 'that\n' - 'unbound local variables are looked up in the global namespace. ' - 'The\n' - 'namespace of the class definition becomes the attribute dictionary ' - 'of\n' - 'the class. The scope of names defined in a class block is limited ' - 'to\n' - 'the class block; it does not extend to the code blocks of ' - 'methods.\n' - 'This includes comprehensions and generator expressions, but it ' - 'does\n' - 'not include annotation scopes, which have access to their ' - 'enclosing\n' - 'class scopes. This means that the following will fail:\n' - '\n' - ' class A:\n' - ' a = 42\n' - ' b = list(a + i for i in range(10))\n' - '\n' - 'However, the following will succeed:\n' - '\n' - ' class A:\n' - ' type Alias = Nested\n' - ' class Nested: pass\n' - '\n' - " print(A.Alias.__value__) # \n" - '\n' - '\n' - 'Annotation scopes\n' - '=================\n' - '\n' - '*Annotations*, type parameter lists and "type" statements ' - 'introduce\n' - '*annotation scopes*, which behave mostly like function scopes, ' - 'but\n' - 'with some exceptions discussed below.\n' - '\n' - 'Annotation scopes are used in the following contexts:\n' - '\n' - '* *Function annotations*.\n' - '\n' - '* *Variable annotations*.\n' - '\n' - '* Type parameter lists for generic type aliases.\n' - '\n' - '* Type parameter lists for generic functions. A generic ' - 'function’s\n' - ' annotations are executed within the annotation scope, but its\n' - ' defaults and decorators are not.\n' - '\n' - '* Type parameter lists for generic classes. A generic class’s ' - 'base\n' - ' classes and keyword arguments are executed within the ' - 'annotation\n' - ' scope, but its decorators are not.\n' - '\n' - '* The bounds, constraints, and default values for type parameters\n' - ' (lazily evaluated).\n' - '\n' - '* The value of type aliases (lazily evaluated).\n' - '\n' - 'Annotation scopes differ from function scopes in the following ' - 'ways:\n' - '\n' - '* Annotation scopes have access to their enclosing class ' - 'namespace. If\n' - ' an annotation scope is immediately within a class scope, or ' - 'within\n' - ' another annotation scope that is immediately within a class ' - 'scope,\n' - ' the code in the annotation scope can use names defined in the ' - 'class\n' - ' scope as if it were executed directly within the class body. ' - 'This\n' - ' contrasts with regular functions defined within classes, which\n' - ' cannot access names defined in the class scope.\n' - '\n' - '* Expressions in annotation scopes cannot contain "yield", "yield\n' - ' from", "await", or ":=" expressions. (These expressions are ' - 'allowed\n' - ' in other scopes contained within the annotation scope.)\n' - '\n' - '* Names defined in annotation scopes cannot be rebound with ' - '"nonlocal"\n' - ' statements in inner scopes. This includes only type parameters, ' - 'as\n' - ' no other syntactic elements that can appear within annotation ' - 'scopes\n' - ' can introduce new names.\n' - '\n' - '* While annotation scopes have an internal name, that name is not\n' - ' reflected in the *qualified name* of objects defined within the\n' - ' scope. Instead, the "__qualname__" of such objects is as if the\n' - ' object were defined in the enclosing scope.\n' - '\n' - 'Added in version 3.12: Annotation scopes were introduced in ' - 'Python\n' - '3.12 as part of **PEP 695**.\n' - '\n' - 'Changed in version 3.13: Annotation scopes are also used for type\n' - 'parameter defaults, as introduced by **PEP 696**.\n' - '\n' - 'Changed in version 3.14: Annotation scopes are now also used for\n' - 'annotations, as specified in **PEP 649** and **PEP 749**.\n' - '\n' - '\n' - 'Lazy evaluation\n' - '===============\n' - '\n' - 'Most annotation scopes are *lazily evaluated*. This includes\n' - 'annotations, the values of type aliases created through the ' - '"type"\n' - 'statement, and the bounds, constraints, and default values of ' - 'type\n' - 'variables created through the type parameter syntax. This means ' - 'that\n' - 'they are not evaluated when the type alias or type variable is\n' - 'created, or when the object carrying annotations is created. ' - 'Instead,\n' - 'they are only evaluated when necessary, for example when the\n' - '"__value__" attribute on a type alias is accessed.\n' - '\n' - 'Example:\n' - '\n' - ' >>> type Alias = 1/0\n' - ' >>> Alias.__value__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - ' >>> def func[T: 1/0](): pass\n' - ' >>> T = func.__type_params__[0]\n' - ' >>> T.__bound__\n' - ' Traceback (most recent call last):\n' - ' ...\n' - ' ZeroDivisionError: division by zero\n' - '\n' - 'Here the exception is raised only when the "__value__" attribute ' - 'of\n' - 'the type alias or the "__bound__" attribute of the type variable ' - 'is\n' - 'accessed.\n' - '\n' - 'This behavior is primarily useful for references to types that ' - 'have\n' - 'not yet been defined when the type alias or type variable is ' - 'created.\n' - 'For example, lazy evaluation enables creation of mutually ' - 'recursive\n' - 'type aliases:\n' - '\n' - ' from typing import Literal\n' - '\n' - ' type SimpleExpr = int | Parenthesized\n' - ' type Parenthesized = tuple[Literal["("], Expr, Literal[")"]]\n' - ' type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], ' - 'Expr]\n' - '\n' - 'Lazily evaluated values are evaluated in annotation scope, which ' - 'means\n' - 'that names that appear inside the lazily evaluated value are ' - 'looked up\n' - 'as if they were used in the immediately enclosing scope.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - '\n' - 'Builtins and restricted execution\n' - '=================================\n' - '\n' - '**CPython implementation detail:** Users should not touch\n' - '"__builtins__"; it is strictly an implementation detail. Users\n' - 'wanting to override values in the builtins namespace should ' - '"import"\n' - 'the "builtins" module and modify its attributes appropriately.\n' - '\n' - 'The builtins namespace associated with the execution of a code ' - 'block\n' - 'is actually found by looking up the name "__builtins__" in its ' - 'global\n' - 'namespace; this should be a dictionary or a module (in the latter ' - 'case\n' - 'the module’s dictionary is used). By default, when in the ' - '"__main__"\n' - 'module, "__builtins__" is the built-in module "builtins"; when in ' - 'any\n' - 'other module, "__builtins__" is an alias for the dictionary of ' - 'the\n' - '"builtins" module itself.\n' - '\n' - '\n' - 'Interaction with dynamic features\n' - '=================================\n' - '\n' - 'Name resolution of free variables occurs at runtime, not at ' - 'compile\n' - 'time. This means that the following code will print 42:\n' - '\n' - ' i = 10\n' - ' def f():\n' - ' print(i)\n' - ' i = 42\n' - ' f()\n' - '\n' - 'The "eval()" and "exec()" functions do not have access to the ' - 'full\n' - 'environment for resolving names. Names may be resolved in the ' - 'local\n' - 'and global namespaces of the caller. Free variables are not ' - 'resolved\n' - 'in the nearest enclosing namespace, but in the global namespace. ' - '[1]\n' - 'The "exec()" and "eval()" functions have optional arguments to\n' - 'override the global and local namespace. If only one namespace ' - 'is\n' - 'specified, it is used for both.\n', - 'nonlocal': 'The "nonlocal" statement\n' - '************************\n' - '\n' - ' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n' - '\n' - 'When the definition of a function or class is nested (enclosed) ' - 'within\n' - 'the definitions of other functions, its nonlocal scopes are the ' - 'local\n' - 'scopes of the enclosing functions. The "nonlocal" statement ' - 'causes the\n' - 'listed identifiers to refer to names previously bound in ' - 'nonlocal\n' - 'scopes. It allows encapsulated code to rebind such nonlocal\n' - 'identifiers. If a name is bound in more than one nonlocal ' - 'scope, the\n' - 'nearest binding is used. If a name is not bound in any nonlocal ' - 'scope,\n' - 'or if there is no nonlocal scope, a "SyntaxError" is raised.\n' - '\n' - 'The "nonlocal" statement applies to the entire scope of a ' - 'function or\n' - 'class body. A "SyntaxError" is raised if a variable is used or\n' - 'assigned to prior to its nonlocal declaration in the scope.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3104** - Access to Names in Outer Scopes\n' - ' The specification for the "nonlocal" statement.\n' - '\n' - '**Programmer’s note:** "nonlocal" is a directive to the parser ' - 'and\n' - 'applies only to code parsed along with it. See the note for ' - 'the\n' - '"global" statement.\n', - 'numbers': 'Numeric literals\n' - '****************\n' - '\n' - 'There are three types of numeric literals: integers, ' - 'floating-point\n' - 'numbers, and imaginary numbers. There are no complex literals\n' - '(complex numbers can be formed by adding a real number and an\n' - 'imaginary number).\n' - '\n' - 'Note that numeric literals do not include a sign; a phrase like ' - '"-1"\n' - 'is actually an expression composed of the unary operator ‘"-"’ ' - 'and the\n' - 'literal "1".\n', - 'numeric-types': 'Emulating numeric types\n' - '***********************\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For ' - 'instance, to\n' - ' evaluate the expression "x + y", where *x* is an ' - 'instance of a\n' - ' class that has an "__add__()" method, ' - '"type(x).__add__(x, y)" is\n' - ' called. The "__divmod__()" method should be the ' - 'equivalent to\n' - ' using "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be ' - 'defined to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the ' - 'operands are of\n' - ' different types, when the left operand does not support ' - 'the\n' - ' corresponding operation [3], or the right operand’s ' - 'class is\n' - ' derived from the left operand’s class. [4] For instance, ' - 'to\n' - ' evaluate the expression "x - y", where *y* is an ' - 'instance of a\n' - ' class that has an "__rsub__()" method, ' - '"type(y).__rsub__(y, x)" is\n' - ' called if "type(x).__sub__(x, y)" returns ' - '"NotImplemented" or\n' - ' "type(y)" is a subclass of "type(x)". [5]\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will ' - 'be called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, or if that method returns ' - '"NotImplemented",\n' - ' the augmented assignment falls back to the normal ' - 'methods. For\n' - ' instance, if *x* is an instance of a class with an ' - '"__iadd__()"\n' - ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' - 'If\n' - ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' - 'returns\n' - ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" ' - 'are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in ' - 'unexpected errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ' - '("-", "+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are ' - 'not defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' Changed in version 3.14: "int()" no longer delegates to ' - 'the\n' - ' "__trunc__()" method.\n', - 'objects': 'Objects, values and types\n' - '*************************\n' - '\n' - '*Objects* are Python’s abstraction for data. All data in a ' - 'Python\n' - 'program is represented by objects or by relations between ' - 'objects. (In\n' - 'a sense, and in conformance to Von Neumann’s model of a “stored\n' - 'program computer”, code is also represented by objects.)\n' - '\n' - 'Every object has an identity, a type and a value. An object’s\n' - '*identity* never changes once it has been created; you may think ' - 'of it\n' - 'as the object’s address in memory. The "is" operator compares ' - 'the\n' - 'identity of two objects; the "id()" function returns an integer\n' - 'representing its identity.\n' - '\n' - '**CPython implementation detail:** For CPython, "id(x)" is the ' - 'memory\n' - 'address where "x" is stored.\n' - '\n' - 'An object’s type determines the operations that the object ' - 'supports\n' - '(e.g., “does it have a length?”) and also defines the possible ' - 'values\n' - 'for objects of that type. The "type()" function returns an ' - 'object’s\n' - 'type (which is an object itself). Like its identity, an ' - 'object’s\n' - '*type* is also unchangeable. [1]\n' - '\n' - 'The *value* of some objects can change. Objects whose value can\n' - 'change are said to be *mutable*; objects whose value is ' - 'unchangeable\n' - 'once they are created are called *immutable*. (The value of an\n' - 'immutable container object that contains a reference to a ' - 'mutable\n' - 'object can change when the latter’s value is changed; however ' - 'the\n' - 'container is still considered immutable, because the collection ' - 'of\n' - 'objects it contains cannot be changed. So, immutability is not\n' - 'strictly the same as having an unchangeable value, it is more ' - 'subtle.)\n' - 'An object’s mutability is determined by its type; for instance,\n' - 'numbers, strings and tuples are immutable, while dictionaries ' - 'and\n' - 'lists are mutable.\n' - '\n' - 'Objects are never explicitly destroyed; however, when they ' - 'become\n' - 'unreachable they may be garbage-collected. An implementation is\n' - 'allowed to postpone garbage collection or omit it altogether — it ' - 'is a\n' - 'matter of implementation quality how garbage collection is\n' - 'implemented, as long as no objects are collected that are still\n' - 'reachable.\n' - '\n' - '**CPython implementation detail:** CPython currently uses a ' - 'reference-\n' - 'counting scheme with (optional) delayed detection of cyclically ' - 'linked\n' - 'garbage, which collects most objects as soon as they become\n' - 'unreachable, but is not guaranteed to collect garbage containing\n' - 'circular references. See the documentation of the "gc" module ' - 'for\n' - 'information on controlling the collection of cyclic garbage. ' - 'Other\n' - 'implementations act differently and CPython may change. Do not ' - 'depend\n' - 'on immediate finalization of objects when they become unreachable ' - '(so\n' - 'you should always close files explicitly).\n' - '\n' - 'Note that the use of the implementation’s tracing or debugging\n' - 'facilities may keep objects alive that would normally be ' - 'collectable.\n' - 'Also note that catching an exception with a "try"…"except" ' - 'statement\n' - 'may keep objects alive.\n' - '\n' - 'Some objects contain references to “external” resources such as ' - 'open\n' - 'files or windows. It is understood that these resources are ' - 'freed\n' - 'when the object is garbage-collected, but since garbage ' - 'collection is\n' - 'not guaranteed to happen, such objects also provide an explicit ' - 'way to\n' - 'release the external resource, usually a "close()" method. ' - 'Programs\n' - 'are strongly recommended to explicitly close such objects. The\n' - '"try"…"finally" statement and the "with" statement provide ' - 'convenient\n' - 'ways to do this.\n' - '\n' - 'Some objects contain references to other objects; these are ' - 'called\n' - '*containers*. Examples of containers are tuples, lists and\n' - 'dictionaries. The references are part of a container’s value. ' - 'In\n' - 'most cases, when we talk about the value of a container, we imply ' - 'the\n' - 'values, not the identities of the contained objects; however, ' - 'when we\n' - 'talk about the mutability of a container, only the identities of ' - 'the\n' - 'immediately contained objects are implied. So, if an immutable\n' - 'container (like a tuple) contains a reference to a mutable ' - 'object, its\n' - 'value changes if that mutable object is changed.\n' - '\n' - 'Types affect almost all aspects of object behavior. Even the\n' - 'importance of object identity is affected in some sense: for ' - 'immutable\n' - 'types, operations that compute new values may actually return a\n' - 'reference to any existing object with the same type and value, ' - 'while\n' - 'for mutable objects this is not allowed. For example, after "a = ' - '1; b\n' - '= 1", *a* and *b* may or may not refer to the same object with ' - 'the\n' - 'value one, depending on the implementation. This is because "int" ' - 'is\n' - 'an immutable type, so the reference to "1" can be reused. This\n' - 'behaviour depends on the implementation used, so should not be ' - 'relied\n' - 'upon, but is something to be aware of when making use of object\n' - 'identity tests. However, after "c = []; d = []", *c* and *d* are\n' - 'guaranteed to refer to two different, unique, newly created ' - 'empty\n' - 'lists. (Note that "e = f = []" assigns the *same* object to both ' - '*e*\n' - 'and *f*.)\n', - 'operator-summary': 'Operator precedence\n' - '*******************\n' - '\n' - 'The following table summarizes the operator precedence ' - 'in Python, from\n' - 'highest precedence (most binding) to lowest precedence ' - '(least\n' - 'binding). Operators in the same box have the same ' - 'precedence. Unless\n' - 'the syntax is explicitly given, operators are binary. ' - 'Operators in\n' - 'the same box group left to right (except for ' - 'exponentiation and\n' - 'conditional expressions, which group from right to ' - 'left).\n' - '\n' - 'Note that comparisons, membership tests, and identity ' - 'tests, all have\n' - 'the same precedence and have a left-to-right chaining ' - 'feature as\n' - 'described in the Comparisons section.\n' - '\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| Operator | ' - 'Description |\n' - '|=================================================|=======================================|\n' - '| "(expressions...)", "[expressions...]", "{key: | ' - 'Binding or parenthesized expression, |\n' - '| value...}", "{expressions...}" | list ' - 'display, dictionary display, set |\n' - '| | ' - 'display |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "x[index]", "x[index:index]", | ' - 'Subscription, slicing, call, |\n' - '| "x(arguments...)", "x.attribute" | ' - 'attribute reference |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "await x" | ' - 'Await expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "**" | ' - 'Exponentiation [5] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+x", "-x", "~x" | ' - 'Positive, negative, bitwise NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "*", "@", "/", "//", "%" | ' - 'Multiplication, matrix |\n' - '| | ' - 'multiplication, division, floor |\n' - '| | ' - 'division, remainder [6] |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "+", "-" | ' - 'Addition and subtraction |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "<<", ">>" | ' - 'Shifts |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "&" | ' - 'Bitwise AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "^" | ' - 'Bitwise XOR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "|" | ' - 'Bitwise OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "in", "not in", "is", "is not", "<", "<=", ">", | ' - 'Comparisons, including membership |\n' - '| ">=", "!=", "==" | ' - 'tests and identity tests |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "not x" | ' - 'Boolean NOT |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "and" | ' - 'Boolean AND |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "or" | ' - 'Boolean OR |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "if" – "else" | ' - 'Conditional expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| "lambda" | ' - 'Lambda expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '| ":=" | ' - 'Assignment expression |\n' - '+-------------------------------------------------+---------------------------------------+\n' - '\n' - '-[ Footnotes ]-\n' - '\n' - '[1] While "abs(x%y) < abs(y)" is true mathematically, ' - 'for floats it\n' - ' may not be true numerically due to roundoff. For ' - 'example, and\n' - ' assuming a platform on which a Python float is an ' - 'IEEE 754 double-\n' - ' precision number, in order that "-1e-100 % 1e100" ' - 'have the same\n' - ' sign as "1e100", the computed result is "-1e-100 + ' - '1e100", which\n' - ' is numerically exactly equal to "1e100". The ' - 'function\n' - ' "math.fmod()" returns a result whose sign matches ' - 'the sign of the\n' - ' first argument instead, and so returns "-1e-100" in ' - 'this case.\n' - ' Which approach is more appropriate depends on the ' - 'application.\n' - '\n' - '[2] If x is very close to an exact integer multiple of ' - 'y, it’s\n' - ' possible for "x//y" to be one larger than ' - '"(x-x%y)//y" due to\n' - ' rounding. In such cases, Python returns the latter ' - 'result, in\n' - ' order to preserve that "divmod(x,y)[0] * y + x % y" ' - 'be very close\n' - ' to "x".\n' - '\n' - '[3] The Unicode standard distinguishes between *code ' - 'points* (e.g.\n' - ' U+0041) and *abstract characters* (e.g. “LATIN ' - 'CAPITAL LETTER A”).\n' - ' While most abstract characters in Unicode are only ' - 'represented\n' - ' using one code point, there is a number of abstract ' - 'characters\n' - ' that can in addition be represented using a sequence ' - 'of more than\n' - ' one code point. For example, the abstract character ' - '“LATIN\n' - ' CAPITAL LETTER C WITH CEDILLA” can be represented as ' - 'a single\n' - ' *precomposed character* at code position U+00C7, or ' - 'as a sequence\n' - ' of a *base character* at code position U+0043 (LATIN ' - 'CAPITAL\n' - ' LETTER C), followed by a *combining character* at ' - 'code position\n' - ' U+0327 (COMBINING CEDILLA).\n' - '\n' - ' The comparison operators on strings compare at the ' - 'level of\n' - ' Unicode code points. This may be counter-intuitive ' - 'to humans. For\n' - ' example, ""\\u00C7" == "\\u0043\\u0327"" is "False", ' - 'even though both\n' - ' strings represent the same abstract character “LATIN ' - 'CAPITAL\n' - ' LETTER C WITH CEDILLA”.\n' - '\n' - ' To compare strings at the level of abstract ' - 'characters (that is,\n' - ' in a way intuitive to humans), use ' - '"unicodedata.normalize()".\n' - '\n' - '[4] Due to automatic garbage-collection, free lists, and ' - 'the dynamic\n' - ' nature of descriptors, you may notice seemingly ' - 'unusual behaviour\n' - ' in certain uses of the "is" operator, like those ' - 'involving\n' - ' comparisons between instance methods, or constants. ' - 'Check their\n' - ' documentation for more info.\n' - '\n' - '[5] The power operator "**" binds less tightly than an ' - 'arithmetic or\n' - ' bitwise unary operator on its right, that is, ' - '"2**-1" is "0.5".\n' - '\n' - '[6] The "%" operator is also used for string formatting; ' - 'the same\n' - ' precedence applies.\n', - 'pass': 'The "pass" statement\n' - '********************\n' - '\n' - ' pass_stmt ::= "pass"\n' - '\n' - '"pass" is a null operation — when it is executed, nothing happens. ' - 'It\n' - 'is useful as a placeholder when a statement is required ' - 'syntactically,\n' - 'but no code needs to be executed, for example:\n' - '\n' - ' def f(arg): pass # a function that does nothing (yet)\n' - '\n' - ' class C: pass # a class with no methods (yet)\n', - 'power': 'The power operator\n' - '******************\n' - '\n' - 'The power operator binds more tightly than unary operators on its\n' - 'left; it binds less tightly than unary operators on its right. ' - 'The\n' - 'syntax is:\n' - '\n' - ' power ::= (await_expr | primary) ["**" u_expr]\n' - '\n' - 'Thus, in an unparenthesized sequence of power and unary operators, ' - 'the\n' - 'operators are evaluated from right to left (this does not ' - 'constrain\n' - 'the evaluation order for the operands): "-1**2" results in "-1".\n' - '\n' - 'The power operator has the same semantics as the built-in "pow()"\n' - 'function, when called with two arguments: it yields its left ' - 'argument\n' - 'raised to the power of its right argument. The numeric arguments ' - 'are\n' - 'first converted to a common type, and the result is of that type.\n' - '\n' - 'For int operands, the result has the same type as the operands ' - 'unless\n' - 'the second argument is negative; in that case, all arguments are\n' - 'converted to float and a float result is delivered. For example,\n' - '"10**2" returns "100", but "10**-2" returns "0.01".\n' - '\n' - 'Raising "0.0" to a negative power results in a ' - '"ZeroDivisionError".\n' - 'Raising a negative number to a fractional power results in a ' - '"complex"\n' - 'number. (In earlier versions it raised a "ValueError".)\n' - '\n' - 'This operation can be customized using the special "__pow__()" and\n' - '"__rpow__()" methods.\n', - 'raise': 'The "raise" statement\n' - '*********************\n' - '\n' - ' raise_stmt ::= "raise" [expression ["from" expression]]\n' - '\n' - 'If no expressions are present, "raise" re-raises the exception that ' - 'is\n' - 'currently being handled, which is also known as the *active\n' - 'exception*. If there isn’t currently an active exception, a\n' - '"RuntimeError" exception is raised indicating that this is an ' - 'error.\n' - '\n' - 'Otherwise, "raise" evaluates the first expression as the exception\n' - 'object. It must be either a subclass or an instance of\n' - '"BaseException". If it is a class, the exception instance will be\n' - 'obtained when needed by instantiating the class with no arguments.\n' - '\n' - 'The *type* of the exception is the exception instance’s class, the\n' - '*value* is the instance itself.\n' - '\n' - 'A traceback object is normally created automatically when an ' - 'exception\n' - 'is raised and attached to it as the "__traceback__" attribute. You ' - 'can\n' - 'create an exception and set your own traceback in one step using ' - 'the\n' - '"with_traceback()" exception method (which returns the same ' - 'exception\n' - 'instance, with its traceback set to its argument), like so:\n' - '\n' - ' raise Exception("foo occurred").with_traceback(tracebackobj)\n' - '\n' - 'The "from" clause is used for exception chaining: if given, the ' - 'second\n' - '*expression* must be another exception class or instance. If the\n' - 'second expression is an exception instance, it will be attached to ' - 'the\n' - 'raised exception as the "__cause__" attribute (which is writable). ' - 'If\n' - 'the expression is an exception class, the class will be ' - 'instantiated\n' - 'and the resulting exception instance will be attached to the ' - 'raised\n' - 'exception as the "__cause__" attribute. If the raised exception is ' - 'not\n' - 'handled, both exceptions will be printed:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except Exception as exc:\n' - ' ... raise RuntimeError("Something bad happened") from exc\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' print(1 / 0)\n' - ' ~~^~~\n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' The above exception was the direct cause of the following ' - 'exception:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' raise RuntimeError("Something bad happened") from exc\n' - ' RuntimeError: Something bad happened\n' - '\n' - 'A similar mechanism works implicitly if a new exception is raised ' - 'when\n' - 'an exception is already being handled. An exception may be ' - 'handled\n' - 'when an "except" or "finally" clause, or a "with" statement, is ' - 'used.\n' - 'The previous exception is then attached as the new exception’s\n' - '"__context__" attribute:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened")\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 2, in \n' - ' print(1 / 0)\n' - ' ~~^~~\n' - ' ZeroDivisionError: division by zero\n' - '\n' - ' During handling of the above exception, another exception ' - 'occurred:\n' - '\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' raise RuntimeError("Something bad happened")\n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Exception chaining can be explicitly suppressed by specifying ' - '"None"\n' - 'in the "from" clause:\n' - '\n' - ' >>> try:\n' - ' ... print(1 / 0)\n' - ' ... except:\n' - ' ... raise RuntimeError("Something bad happened") from None\n' - ' ...\n' - ' Traceback (most recent call last):\n' - ' File "", line 4, in \n' - ' RuntimeError: Something bad happened\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information about handling exceptions is in ' - 'section\n' - 'The try statement.\n' - '\n' - 'Changed in version 3.3: "None" is now permitted as "Y" in "raise X\n' - 'from Y".Added the "__suppress_context__" attribute to suppress\n' - 'automatic display of the exception context.\n' - '\n' - 'Changed in version 3.11: If the traceback of the active exception ' - 'is\n' - 'modified in an "except" clause, a subsequent "raise" statement re-\n' - 'raises the exception with the modified traceback. Previously, the\n' - 'exception was re-raised with the traceback it had when it was ' - 'caught.\n', - 'return': 'The "return" statement\n' - '**********************\n' - '\n' - ' return_stmt ::= "return" [expression_list]\n' - '\n' - '"return" may only occur syntactically nested in a function ' - 'definition,\n' - 'not within a nested class definition.\n' - '\n' - 'If an expression list is present, it is evaluated, else "None" is\n' - 'substituted.\n' - '\n' - '"return" leaves the current function call with the expression list ' - '(or\n' - '"None") as return value.\n' - '\n' - 'When "return" passes control out of a "try" statement with a ' - '"finally"\n' - 'clause, that "finally" clause is executed before really leaving ' - 'the\n' - 'function.\n' - '\n' - 'In a generator function, the "return" statement indicates that ' - 'the\n' - 'generator is done and will cause "StopIteration" to be raised. ' - 'The\n' - 'returned value (if any) is used as an argument to construct\n' - '"StopIteration" and becomes the "StopIteration.value" attribute.\n' - '\n' - 'In an asynchronous generator function, an empty "return" ' - 'statement\n' - 'indicates that the asynchronous generator is done and will cause\n' - '"StopAsyncIteration" to be raised. A non-empty "return" statement ' - 'is\n' - 'a syntax error in an asynchronous generator function.\n', - 'sequence-types': 'Emulating container types\n' - '*************************\n' - '\n' - 'The following methods can be defined to implement ' - 'container objects.\n' - 'None of them are provided by the "object" class itself. ' - 'Containers\n' - 'usually are *sequences* (such as "lists" or "tuples") or ' - '*mappings*\n' - '(like *dictionaries*), but can represent other containers ' - 'as well.\n' - 'The first set of methods is used either to emulate a ' - 'sequence or to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= ' - 'k < N" where\n' - '*N* is the length of the sequence, or "slice" objects, ' - 'which define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard ' - '"dictionary" objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - '*abstract\n' - 'base class* to help create those methods from a base set ' - 'of\n' - '"__getitem__()", "__setitem__()", "__delitem__()", and ' - '"keys()".\n' - 'Mutable sequences should provide methods "append()", ' - '"count()",\n' - '"index()", "extend()", "insert()", "pop()", "remove()", ' - '"reverse()"\n' - 'and "sort()", like Python standard "list" objects. ' - 'Finally, sequence\n' - 'types should implement addition (meaning concatenation) ' - 'and\n' - 'multiplication (meaning repetition) by defining the ' - 'methods\n' - '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' - '"__rmul__()" and\n' - '"__imul__()" described below; they should not define other ' - 'numerical\n' - 'operators. It is recommended that both mappings and ' - 'sequences\n' - 'implement the "__contains__()" method to allow efficient ' - 'use of the\n' - '"in" operator; for mappings, "in" should search the ' - 'mapping’s keys;\n' - 'for sequences, it should search through the values. It is ' - 'further\n' - 'recommended that both mappings and sequences implement ' - 'the\n' - '"__iter__()" method to allow efficient iteration through ' - 'the\n' - 'container; for mappings, "__iter__()" should iterate ' - 'through the\n' - 'object’s keys; for sequences, it should iterate through ' - 'the values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the ' - 'length is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may ' - 'raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater ' - 'or less than\n' - ' the actual length). The length must be an integer ">=" ' - '0. The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - '*sequence*\n' - ' types, the accepted keys should be integers. ' - 'Optionally, they may\n' - ' support "slice" objects as well. Negative index ' - 'support is also\n' - ' optional. If *key* is of an inappropriate type, ' - '"TypeError" may be\n' - ' raised; if *key* is a value outside the set of indexes ' - 'for the\n' - ' sequence (after any special interpretation of negative ' - 'values),\n' - ' "IndexError" should be raised. For *mapping* types, if ' - '*key* is\n' - ' missing (not in the container), "KeyError" should be ' - 'raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be ' - 'raised for\n' - ' illegal indexes to allow proper detection of the end ' - 'of the\n' - ' sequence.\n' - '\n' - ' Note:\n' - '\n' - ' When subscripting a *class*, the special class ' - 'method\n' - ' "__class_getitem__()" may be called instead of ' - '"__getitem__()".\n' - ' See __class_getitem__ versus __getitem__ for more ' - 'details.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same ' - 'note as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or ' - 'if new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences ' - 'if elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement ' - '"self[key]" for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an *iterator* is required ' - 'for a\n' - ' container. This method should return a new iterator ' - 'object that can\n' - ' iterate over all the objects in the container. For ' - 'mappings, it\n' - ' should iterate over the keys of the container.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in ' - 'reverse order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the ' - 'sequence protocol\n' - ' should only provide "__reversed__()" if they can ' - 'provide an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a ' - 'more efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this ' - 'section in the\n' - ' language reference.\n', - 'shifting': 'Shifting operations\n' - '*******************\n' - '\n' - 'The shifting operations have lower priority than the arithmetic\n' - 'operations:\n' - '\n' - ' shift_expr ::= a_expr | shift_expr ("<<" | ">>") a_expr\n' - '\n' - 'These operators accept integers as arguments. They shift the ' - 'first\n' - 'argument to the left or right by the number of bits given by ' - 'the\n' - 'second argument.\n' - '\n' - 'The left shift operation can be customized using the special\n' - '"__lshift__()" and "__rlshift__()" methods. The right shift ' - 'operation\n' - 'can be customized using the special "__rshift__()" and ' - '"__rrshift__()"\n' - 'methods.\n' - '\n' - 'A right shift by *n* bits is defined as floor division by ' - '"pow(2,n)".\n' - 'A left shift by *n* bits is defined as multiplication with ' - '"pow(2,n)".\n', - 'slicings': 'Slicings\n' - '********\n' - '\n' - 'A slicing selects a range of items in a sequence object (e.g., ' - 'a\n' - 'string, tuple or list). Slicings may be used as expressions or ' - 'as\n' - 'targets in assignment or "del" statements. The syntax for a ' - 'slicing:\n' - '\n' - ' slicing ::= primary "[" slice_list "]"\n' - ' slice_list ::= slice_item ("," slice_item)* [","]\n' - ' slice_item ::= expression | proper_slice\n' - ' proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" ' - '[stride] ]\n' - ' lower_bound ::= expression\n' - ' upper_bound ::= expression\n' - ' stride ::= expression\n' - '\n' - 'There is ambiguity in the formal syntax here: anything that ' - 'looks like\n' - 'an expression list also looks like a slice list, so any ' - 'subscription\n' - 'can be interpreted as a slicing. Rather than further ' - 'complicating the\n' - 'syntax, this is disambiguated by defining that in this case the\n' - 'interpretation as a subscription takes priority over the\n' - 'interpretation as a slicing (this is the case if the slice list\n' - 'contains no proper slice).\n' - '\n' - 'The semantics for a slicing are as follows. The primary is ' - 'indexed\n' - '(using the same "__getitem__()" method as normal subscription) ' - 'with a\n' - 'key that is constructed from the slice list, as follows. If the ' - 'slice\n' - 'list contains at least one comma, the key is a tuple containing ' - 'the\n' - 'conversion of the slice items; otherwise, the conversion of the ' - 'lone\n' - 'slice item is the key. The conversion of a slice item that is ' - 'an\n' - 'expression is that expression. The conversion of a proper slice ' - 'is a\n' - 'slice object (see section The standard type hierarchy) whose ' - '"start",\n' - '"stop" and "step" attributes are the values of the expressions ' - 'given\n' - 'as lower bound, upper bound and stride, respectively, ' - 'substituting\n' - '"None" for missing expressions.\n', - 'specialattrs': 'Special Attributes\n' - '******************\n' - '\n' - 'The implementation adds a few special read-only attributes ' - 'to several\n' - 'object types, where they are relevant. Some of these are ' - 'not reported\n' - 'by the "dir()" built-in function.\n' - '\n' - 'definition.__name__\n' - '\n' - ' The name of the class, function, method, descriptor, or ' - 'generator\n' - ' instance.\n' - '\n' - 'definition.__qualname__\n' - '\n' - ' The *qualified name* of the class, function, method, ' - 'descriptor, or\n' - ' generator instance.\n' - '\n' - ' Added in version 3.3.\n' - '\n' - 'definition.__module__\n' - '\n' - ' The name of the module in which a class or function was ' - 'defined.\n' - '\n' - 'definition.__doc__\n' - '\n' - ' The documentation string of a class or function, or ' - '"None" if\n' - ' undefined.\n' - '\n' - 'definition.__type_params__\n' - '\n' - ' The type parameters of generic classes, functions, and ' - 'type\n' - ' aliases. For classes and functions that are not generic, ' - 'this will\n' - ' be an empty tuple.\n' - '\n' - ' Added in version 3.12.\n', - 'specialnames': 'Special method names\n' - '********************\n' - '\n' - 'A class can implement certain operations that are invoked by ' - 'special\n' - 'syntax (such as arithmetic operations or subscripting and ' - 'slicing) by\n' - 'defining methods with special names. This is Python’s ' - 'approach to\n' - '*operator overloading*, allowing classes to define their own ' - 'behavior\n' - 'with respect to language operators. For instance, if a ' - 'class defines\n' - 'a method named "__getitem__()", and "x" is an instance of ' - 'this class,\n' - 'then "x[i]" is roughly equivalent to "type(x).__getitem__(x, ' - 'i)".\n' - 'Except where mentioned, attempts to execute an operation ' - 'raise an\n' - 'exception when no appropriate method is defined (typically\n' - '"AttributeError" or "TypeError").\n' - '\n' - 'Setting a special method to "None" indicates that the ' - 'corresponding\n' - 'operation is not available. For example, if a class sets ' - '"__iter__()"\n' - 'to "None", the class is not iterable, so calling "iter()" on ' - 'its\n' - 'instances will raise a "TypeError" (without falling back to\n' - '"__getitem__()"). [2]\n' - '\n' - 'When implementing a class that emulates any built-in type, ' - 'it is\n' - 'important that the emulation only be implemented to the ' - 'degree that it\n' - 'makes sense for the object being modelled. For example, ' - 'some\n' - 'sequences may work well with retrieval of individual ' - 'elements, but\n' - 'extracting a slice may not make sense. (One example of this ' - 'is the\n' - '"NodeList" interface in the W3C’s Document Object Model.)\n' - '\n' - '\n' - 'Basic customization\n' - '===================\n' - '\n' - 'object.__new__(cls[, ...])\n' - '\n' - ' Called to create a new instance of class *cls*. ' - '"__new__()" is a\n' - ' static method (special-cased so you need not declare it ' - 'as such)\n' - ' that takes the class of which an instance was requested ' - 'as its\n' - ' first argument. The remaining arguments are those passed ' - 'to the\n' - ' object constructor expression (the call to the class). ' - 'The return\n' - ' value of "__new__()" should be the new object instance ' - '(usually an\n' - ' instance of *cls*).\n' - '\n' - ' Typical implementations create a new instance of the ' - 'class by\n' - ' invoking the superclass’s "__new__()" method using\n' - ' "super().__new__(cls[, ...])" with appropriate arguments ' - 'and then\n' - ' modifying the newly created instance as necessary before ' - 'returning\n' - ' it.\n' - '\n' - ' If "__new__()" is invoked during object construction and ' - 'it returns\n' - ' an instance of *cls*, then the new instance’s ' - '"__init__()" method\n' - ' will be invoked like "__init__(self[, ...])", where ' - '*self* is the\n' - ' new instance and the remaining arguments are the same as ' - 'were\n' - ' passed to the object constructor.\n' - '\n' - ' If "__new__()" does not return an instance of *cls*, then ' - 'the new\n' - ' instance’s "__init__()" method will not be invoked.\n' - '\n' - ' "__new__()" is intended mainly to allow subclasses of ' - 'immutable\n' - ' types (like int, str, or tuple) to customize instance ' - 'creation. It\n' - ' is also commonly overridden in custom metaclasses in ' - 'order to\n' - ' customize class creation.\n' - '\n' - 'object.__init__(self[, ...])\n' - '\n' - ' Called after the instance has been created (by ' - '"__new__()"), but\n' - ' before it is returned to the caller. The arguments are ' - 'those\n' - ' passed to the class constructor expression. If a base ' - 'class has an\n' - ' "__init__()" method, the derived class’s "__init__()" ' - 'method, if\n' - ' any, must explicitly call it to ensure proper ' - 'initialization of the\n' - ' base class part of the instance; for example:\n' - ' "super().__init__([args...])".\n' - '\n' - ' Because "__new__()" and "__init__()" work together in ' - 'constructing\n' - ' objects ("__new__()" to create it, and "__init__()" to ' - 'customize\n' - ' it), no non-"None" value may be returned by "__init__()"; ' - 'doing so\n' - ' will cause a "TypeError" to be raised at runtime.\n' - '\n' - 'object.__del__(self)\n' - '\n' - ' Called when the instance is about to be destroyed. This ' - 'is also\n' - ' called a finalizer or (improperly) a destructor. If a ' - 'base class\n' - ' has a "__del__()" method, the derived class’s "__del__()" ' - 'method,\n' - ' if any, must explicitly call it to ensure proper deletion ' - 'of the\n' - ' base class part of the instance.\n' - '\n' - ' It is possible (though not recommended!) for the ' - '"__del__()" method\n' - ' to postpone destruction of the instance by creating a new ' - 'reference\n' - ' to it. This is called object *resurrection*. It is\n' - ' implementation-dependent whether "__del__()" is called a ' - 'second\n' - ' time when a resurrected object is about to be destroyed; ' - 'the\n' - ' current *CPython* implementation only calls it once.\n' - '\n' - ' It is not guaranteed that "__del__()" methods are called ' - 'for\n' - ' objects that still exist when the interpreter exits.\n' - ' "weakref.finalize" provides a straightforward way to ' - 'register a\n' - ' cleanup function to be called when an object is garbage ' - 'collected.\n' - '\n' - ' Note:\n' - '\n' - ' "del x" doesn’t directly call "x.__del__()" — the ' - 'former\n' - ' decrements the reference count for "x" by one, and the ' - 'latter is\n' - ' only called when "x"’s reference count reaches zero.\n' - '\n' - ' **CPython implementation detail:** It is possible for a ' - 'reference\n' - ' cycle to prevent the reference count of an object from ' - 'going to\n' - ' zero. In this case, the cycle will be later detected and ' - 'deleted\n' - ' by the *cyclic garbage collector*. A common cause of ' - 'reference\n' - ' cycles is when an exception has been caught in a local ' - 'variable.\n' - ' The frame’s locals then reference the exception, which ' - 'references\n' - ' its own traceback, which references the locals of all ' - 'frames caught\n' - ' in the traceback.\n' - '\n' - ' See also: Documentation for the "gc" module.\n' - '\n' - ' Warning:\n' - '\n' - ' Due to the precarious circumstances under which ' - '"__del__()"\n' - ' methods are invoked, exceptions that occur during their ' - 'execution\n' - ' are ignored, and a warning is printed to "sys.stderr" ' - 'instead.\n' - ' In particular:\n' - '\n' - ' * "__del__()" can be invoked when arbitrary code is ' - 'being\n' - ' executed, including from any arbitrary thread. If ' - '"__del__()"\n' - ' needs to take a lock or invoke any other blocking ' - 'resource, it\n' - ' may deadlock as the resource may already be taken by ' - 'the code\n' - ' that gets interrupted to execute "__del__()".\n' - '\n' - ' * "__del__()" can be executed during interpreter ' - 'shutdown. As a\n' - ' consequence, the global variables it needs to access ' - '(including\n' - ' other modules) may already have been deleted or set ' - 'to "None".\n' - ' Python guarantees that globals whose name begins with ' - 'a single\n' - ' underscore are deleted from their module before other ' - 'globals\n' - ' are deleted; if no other references to such globals ' - 'exist, this\n' - ' may help in assuring that imported modules are still ' - 'available\n' - ' at the time when the "__del__()" method is called.\n' - '\n' - 'object.__repr__(self)\n' - '\n' - ' Called by the "repr()" built-in function to compute the ' - '“official”\n' - ' string representation of an object. If at all possible, ' - 'this\n' - ' should look like a valid Python expression that could be ' - 'used to\n' - ' recreate an object with the same value (given an ' - 'appropriate\n' - ' environment). If this is not possible, a string of the ' - 'form\n' - ' "<...some useful description...>" should be returned. The ' - 'return\n' - ' value must be a string object. If a class defines ' - '"__repr__()" but\n' - ' not "__str__()", then "__repr__()" is also used when an ' - '“informal”\n' - ' string representation of instances of that class is ' - 'required.\n' - '\n' - ' This is typically used for debugging, so it is important ' - 'that the\n' - ' representation is information-rich and unambiguous. A ' - 'default\n' - ' implementation is provided by the "object" class itself.\n' - '\n' - 'object.__str__(self)\n' - '\n' - ' Called by "str(object)", the default "__format__()" ' - 'implementation,\n' - ' and the built-in function "print()", to compute the ' - '“informal” or\n' - ' nicely printable string representation of an object. The ' - 'return\n' - ' value must be a str object.\n' - '\n' - ' This method differs from "object.__repr__()" in that ' - 'there is no\n' - ' expectation that "__str__()" return a valid Python ' - 'expression: a\n' - ' more convenient or concise representation can be used.\n' - '\n' - ' The default implementation defined by the built-in type ' - '"object"\n' - ' calls "object.__repr__()".\n' - '\n' - 'object.__bytes__(self)\n' - '\n' - ' Called by bytes to compute a byte-string representation ' - 'of an\n' - ' object. This should return a "bytes" object. The "object" ' - 'class\n' - ' itself does not provide this method.\n' - '\n' - 'object.__format__(self, format_spec)\n' - '\n' - ' Called by the "format()" built-in function, and by ' - 'extension,\n' - ' evaluation of formatted string literals and the ' - '"str.format()"\n' - ' method, to produce a “formatted” string representation of ' - 'an\n' - ' object. The *format_spec* argument is a string that ' - 'contains a\n' - ' description of the formatting options desired. The ' - 'interpretation\n' - ' of the *format_spec* argument is up to the type ' - 'implementing\n' - ' "__format__()", however most classes will either ' - 'delegate\n' - ' formatting to one of the built-in types, or use a ' - 'similar\n' - ' formatting option syntax.\n' - '\n' - ' See Format Specification Mini-Language for a description ' - 'of the\n' - ' standard formatting syntax.\n' - '\n' - ' The return value must be a string object.\n' - '\n' - ' The default implementation by the "object" class should ' - 'be given an\n' - ' empty *format_spec* string. It delegates to "__str__()".\n' - '\n' - ' Changed in version 3.4: The __format__ method of "object" ' - 'itself\n' - ' raises a "TypeError" if passed any non-empty string.\n' - '\n' - ' Changed in version 3.7: "object.__format__(x, \'\')" is ' - 'now\n' - ' equivalent to "str(x)" rather than "format(str(x), ' - '\'\')".\n' - '\n' - 'object.__lt__(self, other)\n' - 'object.__le__(self, other)\n' - 'object.__eq__(self, other)\n' - 'object.__ne__(self, other)\n' - 'object.__gt__(self, other)\n' - 'object.__ge__(self, other)\n' - '\n' - ' These are the so-called “rich comparison” methods. The\n' - ' correspondence between operator symbols and method names ' - 'is as\n' - ' follows: "xy" calls\n' - ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' - '\n' - ' A rich comparison method may return the singleton ' - '"NotImplemented"\n' - ' if it does not implement the operation for a given pair ' - 'of\n' - ' arguments. By convention, "False" and "True" are returned ' - 'for a\n' - ' successful comparison. However, these methods can return ' - 'any value,\n' - ' so if the comparison operator is used in a Boolean ' - 'context (e.g.,\n' - ' in the condition of an "if" statement), Python will call ' - '"bool()"\n' - ' on the value to determine if the result is true or ' - 'false.\n' - '\n' - ' By default, "object" implements "__eq__()" by using "is", ' - 'returning\n' - ' "NotImplemented" in the case of a false comparison: "True ' - 'if x is y\n' - ' else NotImplemented". For "__ne__()", by default it ' - 'delegates to\n' - ' "__eq__()" and inverts the result unless it is ' - '"NotImplemented".\n' - ' There are no other implied relationships among the ' - 'comparison\n' - ' operators or default implementations; for example, the ' - 'truth of\n' - ' "(x.__hash__".\n' - '\n' - ' If a class that does not override "__eq__()" wishes to ' - 'suppress\n' - ' hash support, it should include "__hash__ = None" in the ' - 'class\n' - ' definition. A class which defines its own "__hash__()" ' - 'that\n' - ' explicitly raises a "TypeError" would be incorrectly ' - 'identified as\n' - ' hashable by an "isinstance(obj, ' - 'collections.abc.Hashable)" call.\n' - '\n' - ' Note:\n' - '\n' - ' By default, the "__hash__()" values of str and bytes ' - 'objects are\n' - ' “salted” with an unpredictable random value. Although ' - 'they\n' - ' remain constant within an individual Python process, ' - 'they are not\n' - ' predictable between repeated invocations of Python.This ' - 'is\n' - ' intended to provide protection against a ' - 'denial-of-service caused\n' - ' by carefully chosen inputs that exploit the worst case\n' - ' performance of a dict insertion, *O*(*n*^2) ' - 'complexity. See\n' - ' http://ocert.org/advisories/ocert-2011-003.html for\n' - ' details.Changing hash values affects the iteration ' - 'order of sets.\n' - ' Python has never made guarantees about this ordering ' - '(and it\n' - ' typically varies between 32-bit and 64-bit builds).See ' - 'also\n' - ' "PYTHONHASHSEED".\n' - '\n' - ' Changed in version 3.3: Hash randomization is enabled by ' - 'default.\n' - '\n' - 'object.__bool__(self)\n' - '\n' - ' Called to implement truth value testing and the built-in ' - 'operation\n' - ' "bool()"; should return "False" or "True". When this ' - 'method is not\n' - ' defined, "__len__()" is called, if it is defined, and the ' - 'object is\n' - ' considered true if its result is nonzero. If a class ' - 'defines\n' - ' neither "__len__()" nor "__bool__()" (which is true of ' - 'the "object"\n' - ' class itself), all its instances are considered true.\n' - '\n' - '\n' - 'Customizing attribute access\n' - '============================\n' - '\n' - 'The following methods can be defined to customize the ' - 'meaning of\n' - 'attribute access (use of, assignment to, or deletion of ' - '"x.name") for\n' - 'class instances.\n' - '\n' - 'object.__getattr__(self, name)\n' - '\n' - ' Called when the default attribute access fails with an\n' - ' "AttributeError" (either "__getattribute__()" raises an\n' - ' "AttributeError" because *name* is not an instance ' - 'attribute or an\n' - ' attribute in the class tree for "self"; or "__get__()" of ' - 'a *name*\n' - ' property raises "AttributeError"). This method should ' - 'either\n' - ' return the (computed) attribute value or raise an ' - '"AttributeError"\n' - ' exception. The "object" class itself does not provide ' - 'this method.\n' - '\n' - ' Note that if the attribute is found through the normal ' - 'mechanism,\n' - ' "__getattr__()" is not called. (This is an intentional ' - 'asymmetry\n' - ' between "__getattr__()" and "__setattr__()".) This is ' - 'done both for\n' - ' efficiency reasons and because otherwise "__getattr__()" ' - 'would have\n' - ' no way to access other attributes of the instance. Note ' - 'that at\n' - ' least for instance variables, you can take total control ' - 'by not\n' - ' inserting any values in the instance attribute dictionary ' - '(but\n' - ' instead inserting them in another object). See the\n' - ' "__getattribute__()" method below for a way to actually ' - 'get total\n' - ' control over attribute access.\n' - '\n' - 'object.__getattribute__(self, name)\n' - '\n' - ' Called unconditionally to implement attribute accesses ' - 'for\n' - ' instances of the class. If the class also defines ' - '"__getattr__()",\n' - ' the latter will not be called unless "__getattribute__()" ' - 'either\n' - ' calls it explicitly or raises an "AttributeError". This ' - 'method\n' - ' should return the (computed) attribute value or raise an\n' - ' "AttributeError" exception. In order to avoid infinite ' - 'recursion in\n' - ' this method, its implementation should always call the ' - 'base class\n' - ' method with the same name to access any attributes it ' - 'needs, for\n' - ' example, "object.__getattribute__(self, name)".\n' - '\n' - ' Note:\n' - '\n' - ' This method may still be bypassed when looking up ' - 'special methods\n' - ' as the result of implicit invocation via language ' - 'syntax or\n' - ' built-in functions. See Special method lookup.\n' - '\n' - ' For certain sensitive attribute accesses, raises an ' - 'auditing event\n' - ' "object.__getattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__setattr__(self, name, value)\n' - '\n' - ' Called when an attribute assignment is attempted. This ' - 'is called\n' - ' instead of the normal mechanism (i.e. store the value in ' - 'the\n' - ' instance dictionary). *name* is the attribute name, ' - '*value* is the\n' - ' value to be assigned to it.\n' - '\n' - ' If "__setattr__()" wants to assign to an instance ' - 'attribute, it\n' - ' should call the base class method with the same name, for ' - 'example,\n' - ' "object.__setattr__(self, name, value)".\n' - '\n' - ' For certain sensitive attribute assignments, raises an ' - 'auditing\n' - ' event "object.__setattr__" with arguments "obj", "name", ' - '"value".\n' - '\n' - 'object.__delattr__(self, name)\n' - '\n' - ' Like "__setattr__()" but for attribute deletion instead ' - 'of\n' - ' assignment. This should only be implemented if "del ' - 'obj.name" is\n' - ' meaningful for the object.\n' - '\n' - ' For certain sensitive attribute deletions, raises an ' - 'auditing event\n' - ' "object.__delattr__" with arguments "obj" and "name".\n' - '\n' - 'object.__dir__(self)\n' - '\n' - ' Called when "dir()" is called on the object. An iterable ' - 'must be\n' - ' returned. "dir()" converts the returned iterable to a ' - 'list and\n' - ' sorts it.\n' - '\n' - '\n' - 'Customizing module attribute access\n' - '-----------------------------------\n' - '\n' - 'Special names "__getattr__" and "__dir__" can be also used ' - 'to\n' - 'customize access to module attributes. The "__getattr__" ' - 'function at\n' - 'the module level should accept one argument which is the ' - 'name of an\n' - 'attribute and return the computed value or raise an ' - '"AttributeError".\n' - 'If an attribute is not found on a module object through the ' - 'normal\n' - 'lookup, i.e. "object.__getattribute__()", then "__getattr__" ' - 'is\n' - 'searched in the module "__dict__" before raising an ' - '"AttributeError".\n' - 'If found, it is called with the attribute name and the ' - 'result is\n' - 'returned.\n' - '\n' - 'The "__dir__" function should accept no arguments, and ' - 'return an\n' - 'iterable of strings that represents the names accessible on ' - 'module. If\n' - 'present, this function overrides the standard "dir()" search ' - 'on a\n' - 'module.\n' - '\n' - 'For a more fine grained customization of the module behavior ' - '(setting\n' - 'attributes, properties, etc.), one can set the "__class__" ' - 'attribute\n' - 'of a module object to a subclass of "types.ModuleType". For ' - 'example:\n' - '\n' - ' import sys\n' - ' from types import ModuleType\n' - '\n' - ' class VerboseModule(ModuleType):\n' - ' def __repr__(self):\n' - " return f'Verbose {self.__name__}'\n" - '\n' - ' def __setattr__(self, attr, value):\n' - " print(f'Setting {attr}...')\n" - ' super().__setattr__(attr, value)\n' - '\n' - ' sys.modules[__name__].__class__ = VerboseModule\n' - '\n' - 'Note:\n' - '\n' - ' Defining module "__getattr__" and setting module ' - '"__class__" only\n' - ' affect lookups made using the attribute access syntax – ' - 'directly\n' - ' accessing the module globals (whether by code within the ' - 'module, or\n' - ' via a reference to the module’s globals dictionary) is ' - 'unaffected.\n' - '\n' - 'Changed in version 3.5: "__class__" module attribute is now ' - 'writable.\n' - '\n' - 'Added in version 3.7: "__getattr__" and "__dir__" module ' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 562** - Module __getattr__ and __dir__\n' - ' Describes the "__getattr__" and "__dir__" functions on ' - 'modules.\n' - '\n' - '\n' - 'Implementing Descriptors\n' - '------------------------\n' - '\n' - 'The following methods only apply when an instance of the ' - 'class\n' - 'containing the method (a so-called *descriptor* class) ' - 'appears in an\n' - '*owner* class (the descriptor must be in either the owner’s ' - 'class\n' - 'dictionary or in the class dictionary for one of its ' - 'parents). In the\n' - 'examples below, “the attribute” refers to the attribute ' - 'whose name is\n' - 'the key of the property in the owner class’ "__dict__". The ' - '"object"\n' - 'class itself does not implement any of these protocols.\n' - '\n' - 'object.__get__(self, instance, owner=None)\n' - '\n' - ' Called to get the attribute of the owner class (class ' - 'attribute\n' - ' access) or of an instance of that class (instance ' - 'attribute\n' - ' access). The optional *owner* argument is the owner ' - 'class, while\n' - ' *instance* is the instance that the attribute was ' - 'accessed through,\n' - ' or "None" when the attribute is accessed through the ' - '*owner*.\n' - '\n' - ' This method should return the computed attribute value or ' - 'raise an\n' - ' "AttributeError" exception.\n' - '\n' - ' **PEP 252** specifies that "__get__()" is callable with ' - 'one or two\n' - ' arguments. Python’s own built-in descriptors support ' - 'this\n' - ' specification; however, it is likely that some ' - 'third-party tools\n' - ' have descriptors that require both arguments. Python’s ' - 'own\n' - ' "__getattribute__()" implementation always passes in both ' - 'arguments\n' - ' whether they are required or not.\n' - '\n' - 'object.__set__(self, instance, value)\n' - '\n' - ' Called to set the attribute on an instance *instance* of ' - 'the owner\n' - ' class to a new value, *value*.\n' - '\n' - ' Note, adding "__set__()" or "__delete__()" changes the ' - 'kind of\n' - ' descriptor to a “data descriptor”. See Invoking ' - 'Descriptors for\n' - ' more details.\n' - '\n' - 'object.__delete__(self, instance)\n' - '\n' - ' Called to delete the attribute on an instance *instance* ' - 'of the\n' - ' owner class.\n' - '\n' - 'Instances of descriptors may also have the "__objclass__" ' - 'attribute\n' - 'present:\n' - '\n' - 'object.__objclass__\n' - '\n' - ' The attribute "__objclass__" is interpreted by the ' - '"inspect" module\n' - ' as specifying the class where this object was defined ' - '(setting this\n' - ' appropriately can assist in runtime introspection of ' - 'dynamic class\n' - ' attributes). For callables, it may indicate that an ' - 'instance of the\n' - ' given type (or a subclass) is expected or required as the ' - 'first\n' - ' positional argument (for example, CPython sets this ' - 'attribute for\n' - ' unbound methods that are implemented in C).\n' - '\n' - '\n' - 'Invoking Descriptors\n' - '--------------------\n' - '\n' - 'In general, a descriptor is an object attribute with ' - '“binding\n' - 'behavior”, one whose attribute access has been overridden by ' - 'methods\n' - 'in the descriptor protocol: "__get__()", "__set__()", and\n' - '"__delete__()". If any of those methods are defined for an ' - 'object, it\n' - 'is said to be a descriptor.\n' - '\n' - 'The default behavior for attribute access is to get, set, or ' - 'delete\n' - 'the attribute from an object’s dictionary. For instance, ' - '"a.x" has a\n' - 'lookup chain starting with "a.__dict__[\'x\']", then\n' - '"type(a).__dict__[\'x\']", and continuing through the base ' - 'classes of\n' - '"type(a)" excluding metaclasses.\n' - '\n' - 'However, if the looked-up value is an object defining one of ' - 'the\n' - 'descriptor methods, then Python may override the default ' - 'behavior and\n' - 'invoke the descriptor method instead. Where this occurs in ' - 'the\n' - 'precedence chain depends on which descriptor methods were ' - 'defined and\n' - 'how they were called.\n' - '\n' - 'The starting point for descriptor invocation is a binding, ' - '"a.x". How\n' - 'the arguments are assembled depends on "a":\n' - '\n' - 'Direct Call\n' - ' The simplest and least common call is when user code ' - 'directly\n' - ' invokes a descriptor method: "x.__get__(a)".\n' - '\n' - 'Instance Binding\n' - ' If binding to an object instance, "a.x" is transformed ' - 'into the\n' - ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' - '\n' - 'Class Binding\n' - ' If binding to a class, "A.x" is transformed into the ' - 'call:\n' - ' "A.__dict__[\'x\'].__get__(None, A)".\n' - '\n' - 'Super Binding\n' - ' A dotted lookup such as "super(A, a).x" searches\n' - ' "a.__class__.__mro__" for a base class "B" following "A" ' - 'and then\n' - ' returns "B.__dict__[\'x\'].__get__(a, A)". If not a ' - 'descriptor, "x"\n' - ' is returned unchanged.\n' - '\n' - 'For instance bindings, the precedence of descriptor ' - 'invocation depends\n' - 'on which descriptor methods are defined. A descriptor can ' - 'define any\n' - 'combination of "__get__()", "__set__()" and "__delete__()". ' - 'If it\n' - 'does not define "__get__()", then accessing the attribute ' - 'will return\n' - 'the descriptor object itself unless there is a value in the ' - 'object’s\n' - 'instance dictionary. If the descriptor defines "__set__()" ' - 'and/or\n' - '"__delete__()", it is a data descriptor; if it defines ' - 'neither, it is\n' - 'a non-data descriptor. Normally, data descriptors define ' - 'both\n' - '"__get__()" and "__set__()", while non-data descriptors have ' - 'just the\n' - '"__get__()" method. Data descriptors with "__get__()" and ' - '"__set__()"\n' - '(and/or "__delete__()") defined always override a ' - 'redefinition in an\n' - 'instance dictionary. In contrast, non-data descriptors can ' - 'be\n' - 'overridden by instances.\n' - '\n' - 'Python methods (including those decorated with ' - '"@staticmethod" and\n' - '"@classmethod") are implemented as non-data descriptors. ' - 'Accordingly,\n' - 'instances can redefine and override methods. This allows ' - 'individual\n' - 'instances to acquire behaviors that differ from other ' - 'instances of the\n' - 'same class.\n' - '\n' - 'The "property()" function is implemented as a data ' - 'descriptor.\n' - 'Accordingly, instances cannot override the behavior of a ' - 'property.\n' - '\n' - '\n' - '__slots__\n' - '---------\n' - '\n' - '*__slots__* allow us to explicitly declare data members ' - '(like\n' - 'properties) and deny the creation of "__dict__" and ' - '*__weakref__*\n' - '(unless explicitly declared in *__slots__* or available in a ' - 'parent.)\n' - '\n' - 'The space saved over using "__dict__" can be significant. ' - 'Attribute\n' - 'lookup speed can be significantly improved as well.\n' - '\n' - 'object.__slots__\n' - '\n' - ' This class variable can be assigned a string, iterable, ' - 'or sequence\n' - ' of strings with variable names used by instances. ' - '*__slots__*\n' - ' reserves space for the declared variables and prevents ' - 'the\n' - ' automatic creation of "__dict__" and *__weakref__* for ' - 'each\n' - ' instance.\n' - '\n' - 'Notes on using *__slots__*:\n' - '\n' - '* When inheriting from a class without *__slots__*, the ' - '"__dict__" and\n' - ' *__weakref__* attribute of the instances will always be ' - 'accessible.\n' - '\n' - '* Without a "__dict__" variable, instances cannot be ' - 'assigned new\n' - ' variables not listed in the *__slots__* definition. ' - 'Attempts to\n' - ' assign to an unlisted variable name raises ' - '"AttributeError". If\n' - ' dynamic assignment of new variables is desired, then add\n' - ' "\'__dict__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' - '\n' - '* Without a *__weakref__* variable for each instance, ' - 'classes defining\n' - ' *__slots__* do not support "weak references" to its ' - 'instances. If\n' - ' weak reference support is needed, then add ' - '"\'__weakref__\'" to the\n' - ' sequence of strings in the *__slots__* declaration.\n' - '\n' - '* *__slots__* are implemented at the class level by ' - 'creating\n' - ' descriptors for each variable name. As a result, class ' - 'attributes\n' - ' cannot be used to set default values for instance ' - 'variables defined\n' - ' by *__slots__*; otherwise, the class attribute would ' - 'overwrite the\n' - ' descriptor assignment.\n' - '\n' - '* The action of a *__slots__* declaration is not limited to ' - 'the class\n' - ' where it is defined. *__slots__* declared in parents are ' - 'available\n' - ' in child classes. However, instances of a child subclass ' - 'will get a\n' - ' "__dict__" and *__weakref__* unless the subclass also ' - 'defines\n' - ' *__slots__* (which should only contain names of any ' - '*additional*\n' - ' slots).\n' - '\n' - '* If a class defines a slot also defined in a base class, ' - 'the instance\n' - ' variable defined by the base class slot is inaccessible ' - '(except by\n' - ' retrieving its descriptor directly from the base class). ' - 'This\n' - ' renders the meaning of the program undefined. In the ' - 'future, a\n' - ' check may be added to prevent this.\n' - '\n' - '* "TypeError" will be raised if nonempty *__slots__* are ' - 'defined for a\n' - ' class derived from a ""variable-length" built-in type" ' - 'such as\n' - ' "int", "bytes", and "tuple".\n' - '\n' - '* Any non-string *iterable* may be assigned to *__slots__*.\n' - '\n' - '* If a "dictionary" is used to assign *__slots__*, the ' - 'dictionary keys\n' - ' will be used as the slot names. The values of the ' - 'dictionary can be\n' - ' used to provide per-attribute docstrings that will be ' - 'recognised by\n' - ' "inspect.getdoc()" and displayed in the output of ' - '"help()".\n' - '\n' - '* "__class__" assignment works only if both classes have the ' - 'same\n' - ' *__slots__*.\n' - '\n' - '* Multiple inheritance with multiple slotted parent classes ' - 'can be\n' - ' used, but only one parent is allowed to have attributes ' - 'created by\n' - ' slots (the other bases must have empty slot layouts) - ' - 'violations\n' - ' raise "TypeError".\n' - '\n' - '* If an *iterator* is used for *__slots__* then a ' - '*descriptor* is\n' - ' created for each of the iterator’s values. However, the ' - '*__slots__*\n' - ' attribute will be an empty iterator.\n' - '\n' - '\n' - 'Customizing class creation\n' - '==========================\n' - '\n' - 'Whenever a class inherits from another class, ' - '"__init_subclass__()" is\n' - 'called on the parent class. This way, it is possible to ' - 'write classes\n' - 'which change the behavior of subclasses. This is closely ' - 'related to\n' - 'class decorators, but where class decorators only affect the ' - 'specific\n' - 'class they’re applied to, "__init_subclass__" solely applies ' - 'to future\n' - 'subclasses of the class defining the method.\n' - '\n' - 'classmethod object.__init_subclass__(cls)\n' - '\n' - ' This method is called whenever the containing class is ' - 'subclassed.\n' - ' *cls* is then the new subclass. If defined as a normal ' - 'instance\n' - ' method, this method is implicitly converted to a class ' - 'method.\n' - '\n' - ' Keyword arguments which are given to a new class are ' - 'passed to the\n' - ' parent class’s "__init_subclass__". For compatibility ' - 'with other\n' - ' classes using "__init_subclass__", one should take out ' - 'the needed\n' - ' keyword arguments and pass the others over to the base ' - 'class, as\n' - ' in:\n' - '\n' - ' class Philosopher:\n' - ' def __init_subclass__(cls, /, default_name, ' - '**kwargs):\n' - ' super().__init_subclass__(**kwargs)\n' - ' cls.default_name = default_name\n' - '\n' - ' class AustralianPhilosopher(Philosopher, ' - 'default_name="Bruce"):\n' - ' pass\n' - '\n' - ' The default implementation "object.__init_subclass__" ' - 'does nothing,\n' - ' but raises an error if it is called with any arguments.\n' - '\n' - ' Note:\n' - '\n' - ' The metaclass hint "metaclass" is consumed by the rest ' - 'of the\n' - ' type machinery, and is never passed to ' - '"__init_subclass__"\n' - ' implementations. The actual metaclass (rather than the ' - 'explicit\n' - ' hint) can be accessed as "type(cls)".\n' - '\n' - ' Added in version 3.6.\n' - '\n' - 'When a class is created, "type.__new__()" scans the class ' - 'variables\n' - 'and makes callbacks to those with a "__set_name__()" hook.\n' - '\n' - 'object.__set_name__(self, owner, name)\n' - '\n' - ' Automatically called at the time the owning class *owner* ' - 'is\n' - ' created. The object has been assigned to *name* in that ' - 'class:\n' - '\n' - ' class A:\n' - ' x = C() # Automatically calls: x.__set_name__(A, ' - "'x')\n" - '\n' - ' If the class variable is assigned after the class is ' - 'created,\n' - ' "__set_name__()" will not be called automatically. If ' - 'needed,\n' - ' "__set_name__()" can be called directly:\n' - '\n' - ' class A:\n' - ' pass\n' - '\n' - ' c = C()\n' - ' A.x = c # The hook is not called\n' - " c.__set_name__(A, 'x') # Manually invoke the hook\n" - '\n' - ' See Creating the class object for more details.\n' - '\n' - ' Added in version 3.6.\n' - '\n' - '\n' - 'Metaclasses\n' - '-----------\n' - '\n' - 'By default, classes are constructed using "type()". The ' - 'class body is\n' - 'executed in a new namespace and the class name is bound ' - 'locally to the\n' - 'result of "type(name, bases, namespace)".\n' - '\n' - 'The class creation process can be customized by passing the\n' - '"metaclass" keyword argument in the class definition line, ' - 'or by\n' - 'inheriting from an existing class that included such an ' - 'argument. In\n' - 'the following example, both "MyClass" and "MySubclass" are ' - 'instances\n' - 'of "Meta":\n' - '\n' - ' class Meta(type):\n' - ' pass\n' - '\n' - ' class MyClass(metaclass=Meta):\n' - ' pass\n' - '\n' - ' class MySubclass(MyClass):\n' - ' pass\n' - '\n' - 'Any other keyword arguments that are specified in the class ' - 'definition\n' - 'are passed through to all metaclass operations described ' - 'below.\n' - '\n' - 'When a class definition is executed, the following steps ' - 'occur:\n' - '\n' - '* MRO entries are resolved;\n' - '\n' - '* the appropriate metaclass is determined;\n' - '\n' - '* the class namespace is prepared;\n' - '\n' - '* the class body is executed;\n' - '\n' - '* the class object is created.\n' - '\n' - '\n' - 'Resolving MRO entries\n' - '---------------------\n' - '\n' - 'object.__mro_entries__(self, bases)\n' - '\n' - ' If a base that appears in a class definition is not an ' - 'instance of\n' - ' "type", then an "__mro_entries__()" method is searched on ' - 'the base.\n' - ' If an "__mro_entries__()" method is found, the base is ' - 'substituted\n' - ' with the result of a call to "__mro_entries__()" when ' - 'creating the\n' - ' class. The method is called with the original bases tuple ' - 'passed to\n' - ' the *bases* parameter, and must return a tuple of classes ' - 'that will\n' - ' be used instead of the base. The returned tuple may be ' - 'empty: in\n' - ' these cases, the original base is ignored.\n' - '\n' - 'See also:\n' - '\n' - ' "types.resolve_bases()"\n' - ' Dynamically resolve bases that are not instances of ' - '"type".\n' - '\n' - ' "types.get_original_bases()"\n' - ' Retrieve a class’s “original bases” prior to ' - 'modifications by\n' - ' "__mro_entries__()".\n' - '\n' - ' **PEP 560**\n' - ' Core support for typing module and generic types.\n' - '\n' - '\n' - 'Determining the appropriate metaclass\n' - '-------------------------------------\n' - '\n' - 'The appropriate metaclass for a class definition is ' - 'determined as\n' - 'follows:\n' - '\n' - '* if no bases and no explicit metaclass are given, then ' - '"type()" is\n' - ' used;\n' - '\n' - '* if an explicit metaclass is given and it is *not* an ' - 'instance of\n' - ' "type()", then it is used directly as the metaclass;\n' - '\n' - '* if an instance of "type()" is given as the explicit ' - 'metaclass, or\n' - ' bases are defined, then the most derived metaclass is ' - 'used.\n' - '\n' - 'The most derived metaclass is selected from the explicitly ' - 'specified\n' - 'metaclass (if any) and the metaclasses (i.e. "type(cls)") of ' - 'all\n' - 'specified base classes. The most derived metaclass is one ' - 'which is a\n' - 'subtype of *all* of these candidate metaclasses. If none of ' - 'the\n' - 'candidate metaclasses meets that criterion, then the class ' - 'definition\n' - 'will fail with "TypeError".\n' - '\n' - '\n' - 'Preparing the class namespace\n' - '-----------------------------\n' - '\n' - 'Once the appropriate metaclass has been identified, then the ' - 'class\n' - 'namespace is prepared. If the metaclass has a "__prepare__" ' - 'attribute,\n' - 'it is called as "namespace = metaclass.__prepare__(name, ' - 'bases,\n' - '**kwds)" (where the additional keyword arguments, if any, ' - 'come from\n' - 'the class definition). The "__prepare__" method should be ' - 'implemented\n' - 'as a "classmethod". The namespace returned by "__prepare__" ' - 'is passed\n' - 'in to "__new__", but when the final class object is created ' - 'the\n' - 'namespace is copied into a new "dict".\n' - '\n' - 'If the metaclass has no "__prepare__" attribute, then the ' - 'class\n' - 'namespace is initialised as an empty ordered mapping.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3115** - Metaclasses in Python 3000\n' - ' Introduced the "__prepare__" namespace hook\n' - '\n' - '\n' - 'Executing the class body\n' - '------------------------\n' - '\n' - 'The class body is executed (approximately) as "exec(body, ' - 'globals(),\n' - 'namespace)". The key difference from a normal call to ' - '"exec()" is that\n' - 'lexical scoping allows the class body (including any ' - 'methods) to\n' - 'reference names from the current and outer scopes when the ' - 'class\n' - 'definition occurs inside a function.\n' - '\n' - 'However, even when the class definition occurs inside the ' - 'function,\n' - 'methods defined inside the class still cannot see names ' - 'defined at the\n' - 'class scope. Class variables must be accessed through the ' - 'first\n' - 'parameter of instance or class methods, or through the ' - 'implicit\n' - 'lexically scoped "__class__" reference described in the next ' - 'section.\n' - '\n' - '\n' - 'Creating the class object\n' - '-------------------------\n' - '\n' - 'Once the class namespace has been populated by executing the ' - 'class\n' - 'body, the class object is created by calling ' - '"metaclass(name, bases,\n' - 'namespace, **kwds)" (the additional keywords passed here are ' - 'the same\n' - 'as those passed to "__prepare__").\n' - '\n' - 'This class object is the one that will be referenced by the ' - 'zero-\n' - 'argument form of "super()". "__class__" is an implicit ' - 'closure\n' - 'reference created by the compiler if any methods in a class ' - 'body refer\n' - 'to either "__class__" or "super". This allows the zero ' - 'argument form\n' - 'of "super()" to correctly identify the class being defined ' - 'based on\n' - 'lexical scoping, while the class or instance that was used ' - 'to make the\n' - 'current call is identified based on the first argument ' - 'passed to the\n' - 'method.\n' - '\n' - '**CPython implementation detail:** In CPython 3.6 and later, ' - 'the\n' - '"__class__" cell is passed to the metaclass as a ' - '"__classcell__" entry\n' - 'in the class namespace. If present, this must be propagated ' - 'up to the\n' - '"type.__new__" call in order for the class to be ' - 'initialised\n' - 'correctly. Failing to do so will result in a "RuntimeError" ' - 'in Python\n' - '3.8.\n' - '\n' - 'When using the default metaclass "type", or any metaclass ' - 'that\n' - 'ultimately calls "type.__new__", the following additional\n' - 'customization steps are invoked after creating the class ' - 'object:\n' - '\n' - '1. The "type.__new__" method collects all of the attributes ' - 'in the\n' - ' class namespace that define a "__set_name__()" method;\n' - '\n' - '2. Those "__set_name__" methods are called with the class ' - 'being\n' - ' defined and the assigned name of that particular ' - 'attribute;\n' - '\n' - '3. The "__init_subclass__()" hook is called on the immediate ' - 'parent of\n' - ' the new class in its method resolution order.\n' - '\n' - 'After the class object is created, it is passed to the ' - 'class\n' - 'decorators included in the class definition (if any) and the ' - 'resulting\n' - 'object is bound in the local namespace as the defined ' - 'class.\n' - '\n' - 'When a new class is created by "type.__new__", the object ' - 'provided as\n' - 'the namespace parameter is copied to a new ordered mapping ' - 'and the\n' - 'original object is discarded. The new copy is wrapped in a ' - 'read-only\n' - 'proxy, which becomes the "__dict__" attribute of the class ' - 'object.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3135** - New super\n' - ' Describes the implicit "__class__" closure reference\n' - '\n' - '\n' - 'Uses for metaclasses\n' - '--------------------\n' - '\n' - 'The potential uses for metaclasses are boundless. Some ideas ' - 'that have\n' - 'been explored include enum, logging, interface checking, ' - 'automatic\n' - 'delegation, automatic property creation, proxies, ' - 'frameworks, and\n' - 'automatic resource locking/synchronization.\n' - '\n' - '\n' - 'Customizing instance and subclass checks\n' - '========================================\n' - '\n' - 'The following methods are used to override the default ' - 'behavior of the\n' - '"isinstance()" and "issubclass()" built-in functions.\n' - '\n' - 'In particular, the metaclass "abc.ABCMeta" implements these ' - 'methods in\n' - 'order to allow the addition of Abstract Base Classes (ABCs) ' - 'as\n' - '“virtual base classes” to any class or type (including ' - 'built-in\n' - 'types), including other ABCs.\n' - '\n' - 'type.__instancecheck__(self, instance)\n' - '\n' - ' Return true if *instance* should be considered a (direct ' - 'or\n' - ' indirect) instance of *class*. If defined, called to ' - 'implement\n' - ' "isinstance(instance, class)".\n' - '\n' - 'type.__subclasscheck__(self, subclass)\n' - '\n' - ' Return true if *subclass* should be considered a (direct ' - 'or\n' - ' indirect) subclass of *class*. If defined, called to ' - 'implement\n' - ' "issubclass(subclass, class)".\n' - '\n' - 'Note that these methods are looked up on the type ' - '(metaclass) of a\n' - 'class. They cannot be defined as class methods in the ' - 'actual class.\n' - 'This is consistent with the lookup of special methods that ' - 'are called\n' - 'on instances, only in this case the instance is itself a ' - 'class.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 3119** - Introducing Abstract Base Classes\n' - ' Includes the specification for customizing ' - '"isinstance()" and\n' - ' "issubclass()" behavior through "__instancecheck__()" ' - 'and\n' - ' "__subclasscheck__()", with motivation for this ' - 'functionality in\n' - ' the context of adding Abstract Base Classes (see the ' - '"abc"\n' - ' module) to the language.\n' - '\n' - '\n' - 'Emulating generic types\n' - '=======================\n' - '\n' - 'When using *type annotations*, it is often useful to ' - '*parameterize* a\n' - '*generic type* using Python’s square-brackets notation. For ' - 'example,\n' - 'the annotation "list[int]" might be used to signify a "list" ' - 'in which\n' - 'all the elements are of type "int".\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 484** - Type Hints\n' - ' Introducing Python’s framework for type annotations\n' - '\n' - ' Generic Alias Types\n' - ' Documentation for objects representing parameterized ' - 'generic\n' - ' classes\n' - '\n' - ' Generics, user-defined generics and "typing.Generic"\n' - ' Documentation on how to implement generic classes that ' - 'can be\n' - ' parameterized at runtime and understood by static ' - 'type-checkers.\n' - '\n' - 'A class can *generally* only be parameterized if it defines ' - 'the\n' - 'special class method "__class_getitem__()".\n' - '\n' - 'classmethod object.__class_getitem__(cls, key)\n' - '\n' - ' Return an object representing the specialization of a ' - 'generic class\n' - ' by type arguments found in *key*.\n' - '\n' - ' When defined on a class, "__class_getitem__()" is ' - 'automatically a\n' - ' class method. As such, there is no need for it to be ' - 'decorated with\n' - ' "@classmethod" when it is defined.\n' - '\n' - '\n' - 'The purpose of *__class_getitem__*\n' - '----------------------------------\n' - '\n' - 'The purpose of "__class_getitem__()" is to allow runtime\n' - 'parameterization of standard-library generic classes in ' - 'order to more\n' - 'easily apply *type hints* to these classes.\n' - '\n' - 'To implement custom generic classes that can be ' - 'parameterized at\n' - 'runtime and understood by static type-checkers, users should ' - 'either\n' - 'inherit from a standard library class that already ' - 'implements\n' - '"__class_getitem__()", or inherit from "typing.Generic", ' - 'which has its\n' - 'own implementation of "__class_getitem__()".\n' - '\n' - 'Custom implementations of "__class_getitem__()" on classes ' - 'defined\n' - 'outside of the standard library may not be understood by ' - 'third-party\n' - 'type-checkers such as mypy. Using "__class_getitem__()" on ' - 'any class\n' - 'for purposes other than type hinting is discouraged.\n' - '\n' - '\n' - '*__class_getitem__* versus *__getitem__*\n' - '----------------------------------------\n' - '\n' - 'Usually, the subscription of an object using square brackets ' - 'will call\n' - 'the "__getitem__()" instance method defined on the object’s ' - 'class.\n' - 'However, if the object being subscribed is itself a class, ' - 'the class\n' - 'method "__class_getitem__()" may be called instead.\n' - '"__class_getitem__()" should return a GenericAlias object if ' - 'it is\n' - 'properly defined.\n' - '\n' - 'Presented with the *expression* "obj[x]", the Python ' - 'interpreter\n' - 'follows something like the following process to decide ' - 'whether\n' - '"__getitem__()" or "__class_getitem__()" should be called:\n' - '\n' - ' from inspect import isclass\n' - '\n' - ' def subscribe(obj, x):\n' - ' """Return the result of the expression \'obj[x]\'"""\n' - '\n' - ' class_of_obj = type(obj)\n' - '\n' - ' # If the class of obj defines __getitem__,\n' - ' # call class_of_obj.__getitem__(obj, x)\n' - " if hasattr(class_of_obj, '__getitem__'):\n" - ' return class_of_obj.__getitem__(obj, x)\n' - '\n' - ' # Else, if obj is a class and defines ' - '__class_getitem__,\n' - ' # call obj.__class_getitem__(x)\n' - ' elif isclass(obj) and hasattr(obj, ' - "'__class_getitem__'):\n" - ' return obj.__class_getitem__(x)\n' - '\n' - ' # Else, raise an exception\n' - ' else:\n' - ' raise TypeError(\n' - ' f"\'{class_of_obj.__name__}\' object is not ' - 'subscriptable"\n' - ' )\n' - '\n' - 'In Python, all classes are themselves instances of other ' - 'classes. The\n' - 'class of a class is known as that class’s *metaclass*, and ' - 'most\n' - 'classes have the "type" class as their metaclass. "type" ' - 'does not\n' - 'define "__getitem__()", meaning that expressions such as ' - '"list[int]",\n' - '"dict[str, float]" and "tuple[str, bytes]" all result in\n' - '"__class_getitem__()" being called:\n' - '\n' - ' >>> # list has class "type" as its metaclass, like most ' - 'classes:\n' - ' >>> type(list)\n' - " \n" - ' >>> type(dict) == type(list) == type(tuple) == type(str) ' - '== type(bytes)\n' - ' True\n' - ' >>> # "list[int]" calls "list.__class_getitem__(int)"\n' - ' >>> list[int]\n' - ' list[int]\n' - ' >>> # list.__class_getitem__ returns a GenericAlias ' - 'object:\n' - ' >>> type(list[int])\n' - " \n" - '\n' - 'However, if a class has a custom metaclass that defines\n' - '"__getitem__()", subscribing the class may result in ' - 'different\n' - 'behaviour. An example of this can be found in the "enum" ' - 'module:\n' - '\n' - ' >>> from enum import Enum\n' - ' >>> class Menu(Enum):\n' - ' ... """A breakfast menu"""\n' - " ... SPAM = 'spam'\n" - " ... BACON = 'bacon'\n" - ' ...\n' - ' >>> # Enum classes have a custom metaclass:\n' - ' >>> type(Menu)\n' - " \n" - ' >>> # EnumMeta defines __getitem__,\n' - ' >>> # so __class_getitem__ is not called,\n' - ' >>> # and the result is not a GenericAlias object:\n' - " >>> Menu['SPAM']\n" - " \n" - " >>> type(Menu['SPAM'])\n" - " \n" - '\n' - 'See also:\n' - '\n' - ' **PEP 560** - Core Support for typing module and generic ' - 'types\n' - ' Introducing "__class_getitem__()", and outlining when ' - 'a\n' - ' subscription results in "__class_getitem__()" being ' - 'called\n' - ' instead of "__getitem__()"\n' - '\n' - '\n' - 'Emulating callable objects\n' - '==========================\n' - '\n' - 'object.__call__(self[, args...])\n' - '\n' - ' Called when the instance is “called” as a function; if ' - 'this method\n' - ' is defined, "x(arg1, arg2, ...)" roughly translates to\n' - ' "type(x).__call__(x, arg1, ...)". The "object" class ' - 'itself does\n' - ' not provide this method.\n' - '\n' - '\n' - 'Emulating container types\n' - '=========================\n' - '\n' - 'The following methods can be defined to implement container ' - 'objects.\n' - 'None of them are provided by the "object" class itself. ' - 'Containers\n' - 'usually are *sequences* (such as "lists" or "tuples") or ' - '*mappings*\n' - '(like *dictionaries*), but can represent other containers as ' - 'well.\n' - 'The first set of methods is used either to emulate a ' - 'sequence or to\n' - 'emulate a mapping; the difference is that for a sequence, ' - 'the\n' - 'allowable keys should be the integers *k* for which "0 <= k ' - '< N" where\n' - '*N* is the length of the sequence, or "slice" objects, which ' - 'define a\n' - 'range of items. It is also recommended that mappings ' - 'provide the\n' - 'methods "keys()", "values()", "items()", "get()", ' - '"clear()",\n' - '"setdefault()", "pop()", "popitem()", "copy()", and ' - '"update()"\n' - 'behaving similar to those for Python’s standard "dictionary" ' - 'objects.\n' - 'The "collections.abc" module provides a "MutableMapping" ' - '*abstract\n' - 'base class* to help create those methods from a base set of\n' - '"__getitem__()", "__setitem__()", "__delitem__()", and ' - '"keys()".\n' - 'Mutable sequences should provide methods "append()", ' - '"count()",\n' - '"index()", "extend()", "insert()", "pop()", "remove()", ' - '"reverse()"\n' - 'and "sort()", like Python standard "list" objects. Finally, ' - 'sequence\n' - 'types should implement addition (meaning concatenation) and\n' - 'multiplication (meaning repetition) by defining the methods\n' - '"__add__()", "__radd__()", "__iadd__()", "__mul__()", ' - '"__rmul__()" and\n' - '"__imul__()" described below; they should not define other ' - 'numerical\n' - 'operators. It is recommended that both mappings and ' - 'sequences\n' - 'implement the "__contains__()" method to allow efficient use ' - 'of the\n' - '"in" operator; for mappings, "in" should search the ' - 'mapping’s keys;\n' - 'for sequences, it should search through the values. It is ' - 'further\n' - 'recommended that both mappings and sequences implement the\n' - '"__iter__()" method to allow efficient iteration through ' - 'the\n' - 'container; for mappings, "__iter__()" should iterate through ' - 'the\n' - 'object’s keys; for sequences, it should iterate through the ' - 'values.\n' - '\n' - 'object.__len__(self)\n' - '\n' - ' Called to implement the built-in function "len()". ' - 'Should return\n' - ' the length of the object, an integer ">=" 0. Also, an ' - 'object that\n' - ' doesn’t define a "__bool__()" method and whose ' - '"__len__()" method\n' - ' returns zero is considered to be false in a Boolean ' - 'context.\n' - '\n' - ' **CPython implementation detail:** In CPython, the length ' - 'is\n' - ' required to be at most "sys.maxsize". If the length is ' - 'larger than\n' - ' "sys.maxsize" some features (such as "len()") may raise\n' - ' "OverflowError". To prevent raising "OverflowError" by ' - 'truth value\n' - ' testing, an object must define a "__bool__()" method.\n' - '\n' - 'object.__length_hint__(self)\n' - '\n' - ' Called to implement "operator.length_hint()". Should ' - 'return an\n' - ' estimated length for the object (which may be greater or ' - 'less than\n' - ' the actual length). The length must be an integer ">=" 0. ' - 'The\n' - ' return value may also be "NotImplemented", which is ' - 'treated the\n' - ' same as if the "__length_hint__" method didn’t exist at ' - 'all. This\n' - ' method is purely an optimization and is never required ' - 'for\n' - ' correctness.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'Note:\n' - '\n' - ' Slicing is done exclusively with the following three ' - 'methods. A\n' - ' call like\n' - '\n' - ' a[1:2] = b\n' - '\n' - ' is translated to\n' - '\n' - ' a[slice(1, 2, None)] = b\n' - '\n' - ' and so forth. Missing slice items are always filled in ' - 'with "None".\n' - '\n' - 'object.__getitem__(self, key)\n' - '\n' - ' Called to implement evaluation of "self[key]". For ' - '*sequence*\n' - ' types, the accepted keys should be integers. Optionally, ' - 'they may\n' - ' support "slice" objects as well. Negative index support ' - 'is also\n' - ' optional. If *key* is of an inappropriate type, ' - '"TypeError" may be\n' - ' raised; if *key* is a value outside the set of indexes ' - 'for the\n' - ' sequence (after any special interpretation of negative ' - 'values),\n' - ' "IndexError" should be raised. For *mapping* types, if ' - '*key* is\n' - ' missing (not in the container), "KeyError" should be ' - 'raised.\n' - '\n' - ' Note:\n' - '\n' - ' "for" loops expect that an "IndexError" will be raised ' - 'for\n' - ' illegal indexes to allow proper detection of the end of ' - 'the\n' - ' sequence.\n' - '\n' - ' Note:\n' - '\n' - ' When subscripting a *class*, the special class method\n' - ' "__class_getitem__()" may be called instead of ' - '"__getitem__()".\n' - ' See __class_getitem__ versus __getitem__ for more ' - 'details.\n' - '\n' - 'object.__setitem__(self, key, value)\n' - '\n' - ' Called to implement assignment to "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support changes to the values for keys, or if ' - 'new keys\n' - ' can be added, or for sequences if elements can be ' - 'replaced. The\n' - ' same exceptions should be raised for improper *key* ' - 'values as for\n' - ' the "__getitem__()" method.\n' - '\n' - 'object.__delitem__(self, key)\n' - '\n' - ' Called to implement deletion of "self[key]". Same note ' - 'as for\n' - ' "__getitem__()". This should only be implemented for ' - 'mappings if\n' - ' the objects support removal of keys, or for sequences if ' - 'elements\n' - ' can be removed from the sequence. The same exceptions ' - 'should be\n' - ' raised for improper *key* values as for the ' - '"__getitem__()" method.\n' - '\n' - 'object.__missing__(self, key)\n' - '\n' - ' Called by "dict"."__getitem__()" to implement "self[key]" ' - 'for dict\n' - ' subclasses when key is not in the dictionary.\n' - '\n' - 'object.__iter__(self)\n' - '\n' - ' This method is called when an *iterator* is required for ' - 'a\n' - ' container. This method should return a new iterator ' - 'object that can\n' - ' iterate over all the objects in the container. For ' - 'mappings, it\n' - ' should iterate over the keys of the container.\n' - '\n' - 'object.__reversed__(self)\n' - '\n' - ' Called (if present) by the "reversed()" built-in to ' - 'implement\n' - ' reverse iteration. It should return a new iterator ' - 'object that\n' - ' iterates over all the objects in the container in reverse ' - 'order.\n' - '\n' - ' If the "__reversed__()" method is not provided, the ' - '"reversed()"\n' - ' built-in will fall back to using the sequence protocol ' - '("__len__()"\n' - ' and "__getitem__()"). Objects that support the sequence ' - 'protocol\n' - ' should only provide "__reversed__()" if they can provide ' - 'an\n' - ' implementation that is more efficient than the one ' - 'provided by\n' - ' "reversed()".\n' - '\n' - 'The membership test operators ("in" and "not in") are ' - 'normally\n' - 'implemented as an iteration through a container. However, ' - 'container\n' - 'objects can supply the following special method with a more ' - 'efficient\n' - 'implementation, which also does not require the object be ' - 'iterable.\n' - '\n' - 'object.__contains__(self, item)\n' - '\n' - ' Called to implement membership test operators. Should ' - 'return true\n' - ' if *item* is in *self*, false otherwise. For mapping ' - 'objects, this\n' - ' should consider the keys of the mapping rather than the ' - 'values or\n' - ' the key-item pairs.\n' - '\n' - ' For objects that don’t define "__contains__()", the ' - 'membership test\n' - ' first tries iteration via "__iter__()", then the old ' - 'sequence\n' - ' iteration protocol via "__getitem__()", see this section ' - 'in the\n' - ' language reference.\n' - '\n' - '\n' - 'Emulating numeric types\n' - '=======================\n' - '\n' - 'The following methods can be defined to emulate numeric ' - 'objects.\n' - 'Methods corresponding to operations that are not supported ' - 'by the\n' - 'particular kind of number implemented (e.g., bitwise ' - 'operations for\n' - 'non-integral numbers) should be left undefined.\n' - '\n' - 'object.__add__(self, other)\n' - 'object.__sub__(self, other)\n' - 'object.__mul__(self, other)\n' - 'object.__matmul__(self, other)\n' - 'object.__truediv__(self, other)\n' - 'object.__floordiv__(self, other)\n' - 'object.__mod__(self, other)\n' - 'object.__divmod__(self, other)\n' - 'object.__pow__(self, other[, modulo])\n' - 'object.__lshift__(self, other)\n' - 'object.__rshift__(self, other)\n' - 'object.__and__(self, other)\n' - 'object.__xor__(self, other)\n' - 'object.__or__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, ' - 'to\n' - ' evaluate the expression "x + y", where *x* is an instance ' - 'of a\n' - ' class that has an "__add__()" method, "type(x).__add__(x, ' - 'y)" is\n' - ' called. The "__divmod__()" method should be the ' - 'equivalent to\n' - ' using "__floordiv__()" and "__mod__()"; it should not be ' - 'related to\n' - ' "__truediv__()". Note that "__pow__()" should be defined ' - 'to accept\n' - ' an optional third argument if the ternary version of the ' - 'built-in\n' - ' "pow()" function is to be supported.\n' - '\n' - ' If one of those methods does not support the operation ' - 'with the\n' - ' supplied arguments, it should return "NotImplemented".\n' - '\n' - 'object.__radd__(self, other)\n' - 'object.__rsub__(self, other)\n' - 'object.__rmul__(self, other)\n' - 'object.__rmatmul__(self, other)\n' - 'object.__rtruediv__(self, other)\n' - 'object.__rfloordiv__(self, other)\n' - 'object.__rmod__(self, other)\n' - 'object.__rdivmod__(self, other)\n' - 'object.__rpow__(self, other[, modulo])\n' - 'object.__rlshift__(self, other)\n' - 'object.__rrshift__(self, other)\n' - 'object.__rand__(self, other)\n' - 'object.__rxor__(self, other)\n' - 'object.__ror__(self, other)\n' - '\n' - ' These methods are called to implement the binary ' - 'arithmetic\n' - ' operations ("+", "-", "*", "@", "/", "//", "%", ' - '"divmod()",\n' - ' "pow()", "**", "<<", ">>", "&", "^", "|") with reflected ' - '(swapped)\n' - ' operands. These functions are only called if the ' - 'operands are of\n' - ' different types, when the left operand does not support ' - 'the\n' - ' corresponding operation [3], or the right operand’s class ' - 'is\n' - ' derived from the left operand’s class. [4] For instance, ' - 'to\n' - ' evaluate the expression "x - y", where *y* is an instance ' - 'of a\n' - ' class that has an "__rsub__()" method, ' - '"type(y).__rsub__(y, x)" is\n' - ' called if "type(x).__sub__(x, y)" returns ' - '"NotImplemented" or\n' - ' "type(y)" is a subclass of "type(x)". [5]\n' - '\n' - ' Note that ternary "pow()" will not try calling ' - '"__rpow__()" (the\n' - ' coercion rules would become too complicated).\n' - '\n' - ' Note:\n' - '\n' - ' If the right operand’s type is a subclass of the left ' - 'operand’s\n' - ' type and that subclass provides a different ' - 'implementation of the\n' - ' reflected method for the operation, this method will be ' - 'called\n' - ' before the left operand’s non-reflected method. This ' - 'behavior\n' - ' allows subclasses to override their ancestors’ ' - 'operations.\n' - '\n' - 'object.__iadd__(self, other)\n' - 'object.__isub__(self, other)\n' - 'object.__imul__(self, other)\n' - 'object.__imatmul__(self, other)\n' - 'object.__itruediv__(self, other)\n' - 'object.__ifloordiv__(self, other)\n' - 'object.__imod__(self, other)\n' - 'object.__ipow__(self, other[, modulo])\n' - 'object.__ilshift__(self, other)\n' - 'object.__irshift__(self, other)\n' - 'object.__iand__(self, other)\n' - 'object.__ixor__(self, other)\n' - 'object.__ior__(self, other)\n' - '\n' - ' These methods are called to implement the augmented ' - 'arithmetic\n' - ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' - '"**=",\n' - ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' - 'attempt to\n' - ' do the operation in-place (modifying *self*) and return ' - 'the result\n' - ' (which could be, but does not have to be, *self*). If a ' - 'specific\n' - ' method is not defined, or if that method returns ' - '"NotImplemented",\n' - ' the augmented assignment falls back to the normal ' - 'methods. For\n' - ' instance, if *x* is an instance of a class with an ' - '"__iadd__()"\n' - ' method, "x += y" is equivalent to "x = x.__iadd__(y)" . ' - 'If\n' - ' "__iadd__()" does not exist, or if "x.__iadd__(y)" ' - 'returns\n' - ' "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are\n' - ' considered, as with the evaluation of "x + y". In ' - 'certain\n' - ' situations, augmented assignment can result in unexpected ' - 'errors\n' - ' (see Why does a_tuple[i] += [‘item’] raise an exception ' - 'when the\n' - ' addition works?), but this behavior is in fact part of ' - 'the data\n' - ' model.\n' - '\n' - 'object.__neg__(self)\n' - 'object.__pos__(self)\n' - 'object.__abs__(self)\n' - 'object.__invert__(self)\n' - '\n' - ' Called to implement the unary arithmetic operations ("-", ' - '"+",\n' - ' "abs()" and "~").\n' - '\n' - 'object.__complex__(self)\n' - 'object.__int__(self)\n' - 'object.__float__(self)\n' - '\n' - ' Called to implement the built-in functions "complex()", ' - '"int()" and\n' - ' "float()". Should return a value of the appropriate ' - 'type.\n' - '\n' - 'object.__index__(self)\n' - '\n' - ' Called to implement "operator.index()", and whenever ' - 'Python needs\n' - ' to losslessly convert the numeric object to an integer ' - 'object (such\n' - ' as in slicing, or in the built-in "bin()", "hex()" and ' - '"oct()"\n' - ' functions). Presence of this method indicates that the ' - 'numeric\n' - ' object is an integer type. Must return an integer.\n' - '\n' - ' If "__int__()", "__float__()" and "__complex__()" are not ' - 'defined\n' - ' then corresponding built-in functions "int()", "float()" ' - 'and\n' - ' "complex()" fall back to "__index__()".\n' - '\n' - 'object.__round__(self[, ndigits])\n' - 'object.__trunc__(self)\n' - 'object.__floor__(self)\n' - 'object.__ceil__(self)\n' - '\n' - ' Called to implement the built-in function "round()" and ' - '"math"\n' - ' functions "trunc()", "floor()" and "ceil()". Unless ' - '*ndigits* is\n' - ' passed to "__round__()" all these methods should return ' - 'the value\n' - ' of the object truncated to an "Integral" (typically an ' - '"int").\n' - '\n' - ' Changed in version 3.14: "int()" no longer delegates to ' - 'the\n' - ' "__trunc__()" method.\n' - '\n' - '\n' - 'With Statement Context Managers\n' - '===============================\n' - '\n' - 'A *context manager* is an object that defines the runtime ' - 'context to\n' - 'be established when executing a "with" statement. The ' - 'context manager\n' - 'handles the entry into, and the exit from, the desired ' - 'runtime context\n' - 'for the execution of the block of code. Context managers ' - 'are normally\n' - 'invoked using the "with" statement (described in section The ' - 'with\n' - 'statement), but can also be used by directly invoking their ' - 'methods.\n' - '\n' - 'Typical uses of context managers include saving and ' - 'restoring various\n' - 'kinds of global state, locking and unlocking resources, ' - 'closing opened\n' - 'files, etc.\n' - '\n' - 'For more information on context managers, see Context ' - 'Manager Types.\n' - 'The "object" class itself does not provide the context ' - 'manager\n' - 'methods.\n' - '\n' - 'object.__enter__(self)\n' - '\n' - ' Enter the runtime context related to this object. The ' - '"with"\n' - ' statement will bind this method’s return value to the ' - 'target(s)\n' - ' specified in the "as" clause of the statement, if any.\n' - '\n' - 'object.__exit__(self, exc_type, exc_value, traceback)\n' - '\n' - ' Exit the runtime context related to this object. The ' - 'parameters\n' - ' describe the exception that caused the context to be ' - 'exited. If the\n' - ' context was exited without an exception, all three ' - 'arguments will\n' - ' be "None".\n' - '\n' - ' If an exception is supplied, and the method wishes to ' - 'suppress the\n' - ' exception (i.e., prevent it from being propagated), it ' - 'should\n' - ' return a true value. Otherwise, the exception will be ' - 'processed\n' - ' normally upon exit from this method.\n' - '\n' - ' Note that "__exit__()" methods should not reraise the ' - 'passed-in\n' - ' exception; this is the caller’s responsibility.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the ' - 'Python "with"\n' - ' statement.\n' - '\n' - '\n' - 'Customizing positional arguments in class pattern matching\n' - '==========================================================\n' - '\n' - 'When using a class name in a pattern, positional arguments ' - 'in the\n' - 'pattern are not allowed by default, i.e. "case MyClass(x, ' - 'y)" is\n' - 'typically invalid without special support in "MyClass". To ' - 'be able to\n' - 'use that kind of pattern, the class needs to define a ' - '*__match_args__*\n' - 'attribute.\n' - '\n' - 'object.__match_args__\n' - '\n' - ' This class variable can be assigned a tuple of strings. ' - 'When this\n' - ' class is used in a class pattern with positional ' - 'arguments, each\n' - ' positional argument will be converted into a keyword ' - 'argument,\n' - ' using the corresponding value in *__match_args__* as the ' - 'keyword.\n' - ' The absence of this attribute is equivalent to setting it ' - 'to "()".\n' - '\n' - 'For example, if "MyClass.__match_args__" is "("left", ' - '"center",\n' - '"right")" that means that "case MyClass(x, y)" is equivalent ' - 'to "case\n' - 'MyClass(left=x, center=y)". Note that the number of ' - 'arguments in the\n' - 'pattern must be smaller than or equal to the number of ' - 'elements in\n' - '*__match_args__*; if it is larger, the pattern match attempt ' - 'will\n' - 'raise a "TypeError".\n' - '\n' - 'Added in version 3.10.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 634** - Structural Pattern Matching\n' - ' The specification for the Python "match" statement.\n' - '\n' - '\n' - 'Emulating buffer types\n' - '======================\n' - '\n' - 'The buffer protocol provides a way for Python objects to ' - 'expose\n' - 'efficient access to a low-level memory array. This protocol ' - 'is\n' - 'implemented by builtin types such as "bytes" and ' - '"memoryview", and\n' - 'third-party libraries may define additional buffer types.\n' - '\n' - 'While buffer types are usually implemented in C, it is also ' - 'possible\n' - 'to implement the protocol in Python.\n' - '\n' - 'object.__buffer__(self, flags)\n' - '\n' - ' Called when a buffer is requested from *self* (for ' - 'example, by the\n' - ' "memoryview" constructor). The *flags* argument is an ' - 'integer\n' - ' representing the kind of buffer requested, affecting for ' - 'example\n' - ' whether the returned buffer is read-only or writable.\n' - ' "inspect.BufferFlags" provides a convenient way to ' - 'interpret the\n' - ' flags. The method must return a "memoryview" object.\n' - '\n' - 'object.__release_buffer__(self, buffer)\n' - '\n' - ' Called when a buffer is no longer needed. The *buffer* ' - 'argument is\n' - ' a "memoryview" object that was previously returned by\n' - ' "__buffer__()". The method must release any resources ' - 'associated\n' - ' with the buffer. This method should return "None". Buffer ' - 'objects\n' - ' that do not need to perform any cleanup are not required ' - 'to\n' - ' implement this method.\n' - '\n' - 'Added in version 3.12.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 688** - Making the buffer protocol accessible in ' - 'Python\n' - ' Introduces the Python "__buffer__" and ' - '"__release_buffer__"\n' - ' methods.\n' - '\n' - ' "collections.abc.Buffer"\n' - ' ABC for buffer types.\n' - '\n' - '\n' - 'Annotations\n' - '===========\n' - '\n' - 'Functions, classes, and modules may contain *annotations*, ' - 'which are a\n' - 'way to associate information (usually *type hints*) with a ' - 'symbol.\n' - '\n' - 'object.__annotations__\n' - '\n' - ' This attribute contains the annotations for an object. It ' - 'is lazily\n' - ' evaluated, so accessing the attribute may execute ' - 'arbitrary code\n' - ' and raise exceptions. If evaluation is successful, the ' - 'attribute is\n' - ' set to a dictionary mapping from variable names to ' - 'annotations.\n' - '\n' - ' Changed in version 3.14: Annotations are now lazily ' - 'evaluated.\n' - '\n' - 'object.__annotate__(format)\n' - '\n' - ' An *annotate function*. Returns a new dictionary object ' - 'mapping\n' - ' attribute/parameter names to their annotation values.\n' - '\n' - ' Takes a format parameter specifying the format in which ' - 'annotations\n' - ' values should be provided. It must be a member of the\n' - ' "annotationlib.Format" enum, or an integer with a value\n' - ' corresponding to a member of the enum.\n' - '\n' - ' If an annotate function doesn’t support the requested ' - 'format, it\n' - ' must raise "NotImplementedError". Annotate functions must ' - 'always\n' - ' support "VALUE" format; they must not raise ' - '"NotImplementedError()"\n' - ' when called with this format.\n' - '\n' - ' When called with "VALUE" format, an annotate function ' - 'may raise\n' - ' "NameError"; it must not raise "NameError" when called ' - 'requesting\n' - ' any other format.\n' - '\n' - ' If an object does not have any annotations, ' - '"__annotate__" should\n' - ' preferably be set to "None" (it can’t be deleted), rather ' - 'than set\n' - ' to a function that returns an empty dict.\n' - '\n' - ' Added in version 3.14.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 649** — Deferred evaluation of annotation using ' - 'descriptors\n' - ' Introduces lazy evaluation of annotations and the ' - '"__annotate__"\n' - ' function.\n' - '\n' - '\n' - 'Special method lookup\n' - '=====================\n' - '\n' - 'For custom classes, implicit invocations of special methods ' - 'are only\n' - 'guaranteed to work correctly if defined on an object’s type, ' - 'not in\n' - 'the object’s instance dictionary. That behaviour is the ' - 'reason why\n' - 'the following code raises an exception:\n' - '\n' - ' >>> class C:\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__ = lambda: 5\n' - ' >>> len(c)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: object of type 'C' has no len()\n" - '\n' - 'The rationale behind this behaviour lies with a number of ' - 'special\n' - 'methods such as "__hash__()" and "__repr__()" that are ' - 'implemented by\n' - 'all objects, including type objects. If the implicit lookup ' - 'of these\n' - 'methods used the conventional lookup process, they would ' - 'fail when\n' - 'invoked on the type object itself:\n' - '\n' - ' >>> 1 .__hash__() == hash(1)\n' - ' True\n' - ' >>> int.__hash__() == hash(int)\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " TypeError: descriptor '__hash__' of 'int' object needs an " - 'argument\n' - '\n' - 'Incorrectly attempting to invoke an unbound method of a ' - 'class in this\n' - 'way is sometimes referred to as ‘metaclass confusion’, and ' - 'is avoided\n' - 'by bypassing the instance when looking up special methods:\n' - '\n' - ' >>> type(1).__hash__(1) == hash(1)\n' - ' True\n' - ' >>> type(int).__hash__(int) == hash(int)\n' - ' True\n' - '\n' - 'In addition to bypassing any instance attributes in the ' - 'interest of\n' - 'correctness, implicit special method lookup generally also ' - 'bypasses\n' - 'the "__getattribute__()" method even of the object’s ' - 'metaclass:\n' - '\n' - ' >>> class Meta(type):\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Metaclass getattribute invoked")\n' - ' ... return type.__getattribute__(*args)\n' - ' ...\n' - ' >>> class C(object, metaclass=Meta):\n' - ' ... def __len__(self):\n' - ' ... return 10\n' - ' ... def __getattribute__(*args):\n' - ' ... print("Class getattribute invoked")\n' - ' ... return object.__getattribute__(*args)\n' - ' ...\n' - ' >>> c = C()\n' - ' >>> c.__len__() # Explicit lookup via ' - 'instance\n' - ' Class getattribute invoked\n' - ' 10\n' - ' >>> type(c).__len__(c) # Explicit lookup via ' - 'type\n' - ' Metaclass getattribute invoked\n' - ' 10\n' - ' >>> len(c) # Implicit lookup\n' - ' 10\n' - '\n' - 'Bypassing the "__getattribute__()" machinery in this fashion ' - 'provides\n' - 'significant scope for speed optimisations within the ' - 'interpreter, at\n' - 'the cost of some flexibility in the handling of special ' - 'methods (the\n' - 'special method *must* be set on the class object itself in ' - 'order to be\n' - 'consistently invoked by the interpreter).\n', - 'string-methods': 'String Methods\n' - '**************\n' - '\n' - 'Strings implement all of the common sequence operations, ' - 'along with\n' - 'the additional methods described below.\n' - '\n' - 'Strings also support two styles of string formatting, one ' - 'providing a\n' - 'large degree of flexibility and customization (see ' - '"str.format()",\n' - 'Format String Syntax and Custom String Formatting) and the ' - 'other based\n' - 'on C "printf" style formatting that handles a narrower ' - 'range of types\n' - 'and is slightly harder to use correctly, but is often ' - 'faster for the\n' - 'cases it can handle (printf-style String Formatting).\n' - '\n' - 'The Text Processing Services section of the standard ' - 'library covers a\n' - 'number of other modules that provide various text related ' - 'utilities\n' - '(including regular expression support in the "re" ' - 'module).\n' - '\n' - 'str.capitalize()\n' - '\n' - ' Return a copy of the string with its first character ' - 'capitalized\n' - ' and the rest lowercased.\n' - '\n' - ' Changed in version 3.8: The first character is now put ' - 'into\n' - ' titlecase rather than uppercase. This means that ' - 'characters like\n' - ' digraphs will only have their first letter capitalized, ' - 'instead of\n' - ' the full character.\n' - '\n' - 'str.casefold()\n' - '\n' - ' Return a casefolded copy of the string. Casefolded ' - 'strings may be\n' - ' used for caseless matching.\n' - '\n' - ' Casefolding is similar to lowercasing but more ' - 'aggressive because\n' - ' it is intended to remove all case distinctions in a ' - 'string. For\n' - ' example, the German lowercase letter "\'ß\'" is ' - 'equivalent to ""ss"".\n' - ' Since it is already lowercase, "lower()" would do ' - 'nothing to "\'ß\'";\n' - ' "casefold()" converts it to ""ss"".\n' - '\n' - ' The casefolding algorithm is described in section 3.13 ' - '‘Default\n' - ' Case Folding’ of the Unicode Standard.\n' - '\n' - ' Added in version 3.3.\n' - '\n' - 'str.center(width[, fillchar])\n' - '\n' - ' Return centered in a string of length *width*. Padding ' - 'is done\n' - ' using the specified *fillchar* (default is an ASCII ' - 'space). The\n' - ' original string is returned if *width* is less than or ' - 'equal to\n' - ' "len(s)".\n' - '\n' - 'str.count(sub[, start[, end]])\n' - '\n' - ' Return the number of non-overlapping occurrences of ' - 'substring *sub*\n' - ' in the range [*start*, *end*]. Optional arguments ' - '*start* and\n' - ' *end* are interpreted as in slice notation.\n' - '\n' - ' If *sub* is empty, returns the number of empty strings ' - 'between\n' - ' characters which is the length of the string plus one.\n' - '\n' - "str.encode(encoding='utf-8', errors='strict')\n" - '\n' - ' Return the string encoded to "bytes".\n' - '\n' - ' *encoding* defaults to "\'utf-8\'"; see Standard ' - 'Encodings for\n' - ' possible values.\n' - '\n' - ' *errors* controls how encoding errors are handled. If ' - '"\'strict\'"\n' - ' (the default), a "UnicodeError" exception is raised. ' - 'Other possible\n' - ' values are "\'ignore\'", "\'replace\'", ' - '"\'xmlcharrefreplace\'",\n' - ' "\'backslashreplace\'" and any other name registered ' - 'via\n' - ' "codecs.register_error()". See Error Handlers for ' - 'details.\n' - '\n' - ' For performance reasons, the value of *errors* is not ' - 'checked for\n' - ' validity unless an encoding error actually occurs, ' - 'Python\n' - ' Development Mode is enabled or a debug build is used.\n' - '\n' - ' Changed in version 3.1: Added support for keyword ' - 'arguments.\n' - '\n' - ' Changed in version 3.9: The value of the *errors* ' - 'argument is now\n' - ' checked in Python Development Mode and in debug mode.\n' - '\n' - 'str.endswith(suffix[, start[, end]])\n' - '\n' - ' Return "True" if the string ends with the specified ' - '*suffix*,\n' - ' otherwise return "False". *suffix* can also be a tuple ' - 'of suffixes\n' - ' to look for. With optional *start*, test beginning at ' - 'that\n' - ' position. With optional *end*, stop comparing at that ' - 'position.\n' - '\n' - 'str.expandtabs(tabsize=8)\n' - '\n' - ' Return a copy of the string where all tab characters ' - 'are replaced\n' - ' by one or more spaces, depending on the current column ' - 'and the\n' - ' given tab size. Tab positions occur every *tabsize* ' - 'characters\n' - ' (default is 8, giving tab positions at columns 0, 8, 16 ' - 'and so on).\n' - ' To expand the string, the current column is set to zero ' - 'and the\n' - ' string is examined character by character. If the ' - 'character is a\n' - ' tab ("\\t"), one or more space characters are inserted ' - 'in the result\n' - ' until the current column is equal to the next tab ' - 'position. (The\n' - ' tab character itself is not copied.) If the character ' - 'is a newline\n' - ' ("\\n") or return ("\\r"), it is copied and the current ' - 'column is\n' - ' reset to zero. Any other character is copied unchanged ' - 'and the\n' - ' current column is incremented by one regardless of how ' - 'the\n' - ' character is represented when printed.\n' - '\n' - " >>> '01\\t012\\t0123\\t01234'.expandtabs()\n" - " '01 012 0123 01234'\n" - " >>> '01\\t012\\t0123\\t01234'.expandtabs(4)\n" - " '01 012 0123 01234'\n" - '\n' - 'str.find(sub[, start[, end]])\n' - '\n' - ' Return the lowest index in the string where substring ' - '*sub* is\n' - ' found within the slice "s[start:end]". Optional ' - 'arguments *start*\n' - ' and *end* are interpreted as in slice notation. Return ' - '"-1" if\n' - ' *sub* is not found.\n' - '\n' - ' Note:\n' - '\n' - ' The "find()" method should be used only if you need ' - 'to know the\n' - ' position of *sub*. To check if *sub* is a substring ' - 'or not, use\n' - ' the "in" operator:\n' - '\n' - " >>> 'Py' in 'Python'\n" - ' True\n' - '\n' - 'str.format(*args, **kwargs)\n' - '\n' - ' Perform a string formatting operation. The string on ' - 'which this\n' - ' method is called can contain literal text or ' - 'replacement fields\n' - ' delimited by braces "{}". Each replacement field ' - 'contains either\n' - ' the numeric index of a positional argument, or the name ' - 'of a\n' - ' keyword argument. Returns a copy of the string where ' - 'each\n' - ' replacement field is replaced with the string value of ' - 'the\n' - ' corresponding argument.\n' - '\n' - ' >>> "The sum of 1 + 2 is {0}".format(1+2)\n' - " 'The sum of 1 + 2 is 3'\n" - '\n' - ' See Format String Syntax for a description of the ' - 'various\n' - ' formatting options that can be specified in format ' - 'strings.\n' - '\n' - ' Note:\n' - '\n' - ' When formatting a number ("int", "float", "complex",\n' - ' "decimal.Decimal" and subclasses) with the "n" type ' - '(ex:\n' - ' "\'{:n}\'.format(1234)"), the function temporarily ' - 'sets the\n' - ' "LC_CTYPE" locale to the "LC_NUMERIC" locale to ' - 'decode\n' - ' "decimal_point" and "thousands_sep" fields of ' - '"localeconv()" if\n' - ' they are non-ASCII or longer than 1 byte, and the ' - '"LC_NUMERIC"\n' - ' locale is different than the "LC_CTYPE" locale. This ' - 'temporary\n' - ' change affects other threads.\n' - '\n' - ' Changed in version 3.7: When formatting a number with ' - 'the "n" type,\n' - ' the function sets temporarily the "LC_CTYPE" locale to ' - 'the\n' - ' "LC_NUMERIC" locale in some cases.\n' - '\n' - 'str.format_map(mapping, /)\n' - '\n' - ' Similar to "str.format(**mapping)", except that ' - '"mapping" is used\n' - ' directly and not copied to a "dict". This is useful if ' - 'for example\n' - ' "mapping" is a dict subclass:\n' - '\n' - ' >>> class Default(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return key\n' - ' ...\n' - " >>> '{name} was born in " - "{country}'.format_map(Default(name='Guido'))\n" - " 'Guido was born in country'\n" - '\n' - ' Added in version 3.2.\n' - '\n' - 'str.index(sub[, start[, end]])\n' - '\n' - ' Like "find()", but raise "ValueError" when the ' - 'substring is not\n' - ' found.\n' - '\n' - 'str.isalnum()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphanumeric and\n' - ' there is at least one character, "False" otherwise. A ' - 'character\n' - ' "c" is alphanumeric if one of the following returns ' - '"True":\n' - ' "c.isalpha()", "c.isdecimal()", "c.isdigit()", or ' - '"c.isnumeric()".\n' - '\n' - 'str.isalpha()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'alphabetic and\n' - ' there is at least one character, "False" otherwise. ' - 'Alphabetic\n' - ' characters are those characters defined in the Unicode ' - 'character\n' - ' database as “Letter”, i.e., those with general category ' - 'property\n' - ' being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note ' - 'that this is\n' - ' different from the Alphabetic property defined in the ' - 'section 4.10\n' - ' ‘Letters, Alphabetic, and Ideographic’ of the Unicode ' - 'Standard.\n' - '\n' - 'str.isascii()\n' - '\n' - ' Return "True" if the string is empty or all characters ' - 'in the\n' - ' string are ASCII, "False" otherwise. ASCII characters ' - 'have code\n' - ' points in the range U+0000-U+007F.\n' - '\n' - ' Added in version 3.7.\n' - '\n' - 'str.isdecimal()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'decimal\n' - ' characters and there is at least one character, "False" ' - 'otherwise.\n' - ' Decimal characters are those that can be used to form ' - 'numbers in\n' - ' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. ' - 'Formally a decimal\n' - ' character is a character in the Unicode General ' - 'Category “Nd”.\n' - '\n' - 'str.isdigit()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'digits and there\n' - ' is at least one character, "False" otherwise. Digits ' - 'include\n' - ' decimal characters and digits that need special ' - 'handling, such as\n' - ' the compatibility superscript digits. This covers ' - 'digits which\n' - ' cannot be used to form numbers in base 10, like the ' - 'Kharosthi\n' - ' numbers. Formally, a digit is a character that has the ' - 'property\n' - ' value Numeric_Type=Digit or Numeric_Type=Decimal.\n' - '\n' - 'str.isidentifier()\n' - '\n' - ' Return "True" if the string is a valid identifier ' - 'according to the\n' - ' language definition, section Identifiers and keywords.\n' - '\n' - ' "keyword.iskeyword()" can be used to test whether ' - 'string "s" is a\n' - ' reserved identifier, such as "def" and "class".\n' - '\n' - ' Example:\n' - '\n' - ' >>> from keyword import iskeyword\n' - '\n' - " >>> 'hello'.isidentifier(), iskeyword('hello')\n" - ' (True, False)\n' - " >>> 'def'.isidentifier(), iskeyword('def')\n" - ' (True, True)\n' - '\n' - 'str.islower()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' lowercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - 'str.isnumeric()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'numeric\n' - ' characters, and there is at least one character, ' - '"False" otherwise.\n' - ' Numeric characters include digit characters, and all ' - 'characters\n' - ' that have the Unicode numeric value property, e.g. ' - 'U+2155, VULGAR\n' - ' FRACTION ONE FIFTH. Formally, numeric characters are ' - 'those with\n' - ' the property value Numeric_Type=Digit, ' - 'Numeric_Type=Decimal or\n' - ' Numeric_Type=Numeric.\n' - '\n' - 'str.isprintable()\n' - '\n' - ' Return "True" if all characters in the string are ' - 'printable or the\n' - ' string is empty, "False" otherwise. Nonprintable ' - 'characters are\n' - ' those characters defined in the Unicode character ' - 'database as\n' - ' “Other” or “Separator”, excepting the ASCII space ' - '(0x20) which is\n' - ' considered printable. (Note that printable characters ' - 'in this\n' - ' context are those which should not be escaped when ' - '"repr()" is\n' - ' invoked on a string. It has no bearing on the handling ' - 'of strings\n' - ' written to "sys.stdout" or "sys.stderr".)\n' - '\n' - 'str.isspace()\n' - '\n' - ' Return "True" if there are only whitespace characters ' - 'in the string\n' - ' and there is at least one character, "False" ' - 'otherwise.\n' - '\n' - ' A character is *whitespace* if in the Unicode character ' - 'database\n' - ' (see "unicodedata"), either its general category is ' - '"Zs"\n' - ' (“Separator, space”), or its bidirectional class is one ' - 'of "WS",\n' - ' "B", or "S".\n' - '\n' - 'str.istitle()\n' - '\n' - ' Return "True" if the string is a titlecased string and ' - 'there is at\n' - ' least one character, for example uppercase characters ' - 'may only\n' - ' follow uncased characters and lowercase characters only ' - 'cased ones.\n' - ' Return "False" otherwise.\n' - '\n' - 'str.isupper()\n' - '\n' - ' Return "True" if all cased characters [4] in the string ' - 'are\n' - ' uppercase and there is at least one cased character, ' - '"False"\n' - ' otherwise.\n' - '\n' - " >>> 'BANANA'.isupper()\n" - ' True\n' - " >>> 'banana'.isupper()\n" - ' False\n' - " >>> 'baNana'.isupper()\n" - ' False\n' - " >>> ' '.isupper()\n" - ' False\n' - '\n' - 'str.join(iterable)\n' - '\n' - ' Return a string which is the concatenation of the ' - 'strings in\n' - ' *iterable*. A "TypeError" will be raised if there are ' - 'any non-\n' - ' string values in *iterable*, including "bytes" ' - 'objects. The\n' - ' separator between elements is the string providing this ' - 'method.\n' - '\n' - 'str.ljust(width[, fillchar])\n' - '\n' - ' Return the string left justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.lower()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to lowercase.\n' - '\n' - ' The lowercasing algorithm used is described in section ' - '3.13\n' - ' ‘Default Case Folding’ of the Unicode Standard.\n' - '\n' - 'str.lstrip([chars])\n' - '\n' - ' Return a copy of the string with leading characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'prefix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.lstrip()\n" - " 'spacious '\n" - " >>> 'www.example.com'.lstrip('cmowz.')\n" - " 'example.com'\n" - '\n' - ' See "str.removeprefix()" for a method that will remove ' - 'a single\n' - ' prefix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Arthur: three!'.lstrip('Arthur: ')\n" - " 'ee!'\n" - " >>> 'Arthur: three!'.removeprefix('Arthur: ')\n" - " 'three!'\n" - '\n' - 'static str.maketrans(x[, y[, z]])\n' - '\n' - ' This static method returns a translation table usable ' - 'for\n' - ' "str.translate()".\n' - '\n' - ' If there is only one argument, it must be a dictionary ' - 'mapping\n' - ' Unicode ordinals (integers) or characters (strings of ' - 'length 1) to\n' - ' Unicode ordinals, strings (of arbitrary lengths) or ' - '"None".\n' - ' Character keys will then be converted to ordinals.\n' - '\n' - ' If there are two arguments, they must be strings of ' - 'equal length,\n' - ' and in the resulting dictionary, each character in x ' - 'will be mapped\n' - ' to the character at the same position in y. If there ' - 'is a third\n' - ' argument, it must be a string, whose characters will be ' - 'mapped to\n' - ' "None" in the result.\n' - '\n' - 'str.partition(sep)\n' - '\n' - ' Split the string at the first occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing the string itself, ' - 'followed by\n' - ' two empty strings.\n' - '\n' - 'str.removeprefix(prefix, /)\n' - '\n' - ' If the string starts with the *prefix* string, return\n' - ' "string[len(prefix):]". Otherwise, return a copy of the ' - 'original\n' - ' string:\n' - '\n' - " >>> 'TestHook'.removeprefix('Test')\n" - " 'Hook'\n" - " >>> 'BaseTestCase'.removeprefix('Test')\n" - " 'BaseTestCase'\n" - '\n' - ' Added in version 3.9.\n' - '\n' - 'str.removesuffix(suffix, /)\n' - '\n' - ' If the string ends with the *suffix* string and that ' - '*suffix* is\n' - ' not empty, return "string[:-len(suffix)]". Otherwise, ' - 'return a copy\n' - ' of the original string:\n' - '\n' - " >>> 'MiscTests'.removesuffix('Tests')\n" - " 'Misc'\n" - " >>> 'TmpDirMixin'.removesuffix('Tests')\n" - " 'TmpDirMixin'\n" - '\n' - ' Added in version 3.9.\n' - '\n' - 'str.replace(old, new, count=-1)\n' - '\n' - ' Return a copy of the string with all occurrences of ' - 'substring *old*\n' - ' replaced by *new*. If *count* is given, only the first ' - '*count*\n' - ' occurrences are replaced. If *count* is not specified ' - 'or "-1", then\n' - ' all occurrences are replaced.\n' - '\n' - ' Changed in version 3.13: *count* is now supported as a ' - 'keyword\n' - ' argument.\n' - '\n' - 'str.rfind(sub[, start[, end]])\n' - '\n' - ' Return the highest index in the string where substring ' - '*sub* is\n' - ' found, such that *sub* is contained within ' - '"s[start:end]".\n' - ' Optional arguments *start* and *end* are interpreted as ' - 'in slice\n' - ' notation. Return "-1" on failure.\n' - '\n' - 'str.rindex(sub[, start[, end]])\n' - '\n' - ' Like "rfind()" but raises "ValueError" when the ' - 'substring *sub* is\n' - ' not found.\n' - '\n' - 'str.rjust(width[, fillchar])\n' - '\n' - ' Return the string right justified in a string of length ' - '*width*.\n' - ' Padding is done using the specified *fillchar* (default ' - 'is an ASCII\n' - ' space). The original string is returned if *width* is ' - 'less than or\n' - ' equal to "len(s)".\n' - '\n' - 'str.rpartition(sep)\n' - '\n' - ' Split the string at the last occurrence of *sep*, and ' - 'return a\n' - ' 3-tuple containing the part before the separator, the ' - 'separator\n' - ' itself, and the part after the separator. If the ' - 'separator is not\n' - ' found, return a 3-tuple containing two empty strings, ' - 'followed by\n' - ' the string itself.\n' - '\n' - 'str.rsplit(sep=None, maxsplit=-1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit* splits\n' - ' are done, the *rightmost* ones. If *sep* is not ' - 'specified or\n' - ' "None", any whitespace string is a separator. Except ' - 'for splitting\n' - ' from the right, "rsplit()" behaves like "split()" which ' - 'is\n' - ' described in detail below.\n' - '\n' - 'str.rstrip([chars])\n' - '\n' - ' Return a copy of the string with trailing characters ' - 'removed. The\n' - ' *chars* argument is a string specifying the set of ' - 'characters to be\n' - ' removed. If omitted or "None", the *chars* argument ' - 'defaults to\n' - ' removing whitespace. The *chars* argument is not a ' - 'suffix; rather,\n' - ' all combinations of its values are stripped:\n' - '\n' - " >>> ' spacious '.rstrip()\n" - " ' spacious'\n" - " >>> 'mississippi'.rstrip('ipz')\n" - " 'mississ'\n" - '\n' - ' See "str.removesuffix()" for a method that will remove ' - 'a single\n' - ' suffix string rather than all of a set of characters. ' - 'For example:\n' - '\n' - " >>> 'Monty Python'.rstrip(' Python')\n" - " 'M'\n" - " >>> 'Monty Python'.removesuffix(' Python')\n" - " 'Monty'\n" - '\n' - 'str.split(sep=None, maxsplit=-1)\n' - '\n' - ' Return a list of the words in the string, using *sep* ' - 'as the\n' - ' delimiter string. If *maxsplit* is given, at most ' - '*maxsplit*\n' - ' splits are done (thus, the list will have at most ' - '"maxsplit+1"\n' - ' elements). If *maxsplit* is not specified or "-1", ' - 'then there is\n' - ' no limit on the number of splits (all possible splits ' - 'are made).\n' - '\n' - ' If *sep* is given, consecutive delimiters are not ' - 'grouped together\n' - ' and are deemed to delimit empty strings (for example,\n' - ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' - '\'2\']"). The *sep* argument\n' - ' may consist of multiple characters as a single ' - 'delimiter (to split\n' - ' with multiple delimiters, use "re.split()"). Splitting ' - 'an empty\n' - ' string with a specified separator returns "[\'\']".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1,2,3'.split(',')\n" - " ['1', '2', '3']\n" - " >>> '1,2,3'.split(',', maxsplit=1)\n" - " ['1', '2,3']\n" - " >>> '1,2,,3,'.split(',')\n" - " ['1', '2', '', '3', '']\n" - " >>> '1<>2<>3<4'.split('<>')\n" - " ['1', '2', '3<4']\n" - '\n' - ' If *sep* is not specified or is "None", a different ' - 'splitting\n' - ' algorithm is applied: runs of consecutive whitespace ' - 'are regarded\n' - ' as a single separator, and the result will contain no ' - 'empty strings\n' - ' at the start or end if the string has leading or ' - 'trailing\n' - ' whitespace. Consequently, splitting an empty string or ' - 'a string\n' - ' consisting of just whitespace with a "None" separator ' - 'returns "[]".\n' - '\n' - ' For example:\n' - '\n' - " >>> '1 2 3'.split()\n" - " ['1', '2', '3']\n" - " >>> '1 2 3'.split(maxsplit=1)\n" - " ['1', '2 3']\n" - " >>> ' 1 2 3 '.split()\n" - " ['1', '2', '3']\n" - '\n' - 'str.splitlines(keepends=False)\n' - '\n' - ' Return a list of the lines in the string, breaking at ' - 'line\n' - ' boundaries. Line breaks are not included in the ' - 'resulting list\n' - ' unless *keepends* is given and true.\n' - '\n' - ' This method splits on the following line boundaries. ' - 'In\n' - ' particular, the boundaries are a superset of *universal ' - 'newlines*.\n' - '\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | Representation | ' - 'Description |\n' - ' ' - '|=========================|===============================|\n' - ' | "\\n" | Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r" | Carriage ' - 'Return |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\r\\n" | Carriage Return + Line ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\v" or "\\x0b" | Line ' - 'Tabulation |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\f" or "\\x0c" | Form ' - 'Feed |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1c" | File ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1d" | Group ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x1e" | Record ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\x85" | Next Line (C1 Control ' - 'Code) |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2028" | Line ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - ' | "\\u2029" | Paragraph ' - 'Separator |\n' - ' ' - '+-------------------------+-------------------------------+\n' - '\n' - ' Changed in version 3.2: "\\v" and "\\f" added to list ' - 'of line\n' - ' boundaries.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'ab c\\n\\nde fg\\rkl\\r\\n'.splitlines()\n" - " ['ab c', '', 'de fg', 'kl']\n" - " >>> 'ab c\\n\\nde " - "fg\\rkl\\r\\n'.splitlines(keepends=True)\n" - " ['ab c\\n', '\\n', 'de fg\\r', 'kl\\r\\n']\n" - '\n' - ' Unlike "split()" when a delimiter string *sep* is ' - 'given, this\n' - ' method returns an empty list for the empty string, and ' - 'a terminal\n' - ' line break does not result in an extra line:\n' - '\n' - ' >>> "".splitlines()\n' - ' []\n' - ' >>> "One line\\n".splitlines()\n' - " ['One line']\n" - '\n' - ' For comparison, "split(\'\\n\')" gives:\n' - '\n' - " >>> ''.split('\\n')\n" - " ['']\n" - " >>> 'Two lines\\n'.split('\\n')\n" - " ['Two lines', '']\n" - '\n' - 'str.startswith(prefix[, start[, end]])\n' - '\n' - ' Return "True" if string starts with the *prefix*, ' - 'otherwise return\n' - ' "False". *prefix* can also be a tuple of prefixes to ' - 'look for.\n' - ' With optional *start*, test string beginning at that ' - 'position.\n' - ' With optional *end*, stop comparing string at that ' - 'position.\n' - '\n' - 'str.strip([chars])\n' - '\n' - ' Return a copy of the string with the leading and ' - 'trailing\n' - ' characters removed. The *chars* argument is a string ' - 'specifying the\n' - ' set of characters to be removed. If omitted or "None", ' - 'the *chars*\n' - ' argument defaults to removing whitespace. The *chars* ' - 'argument is\n' - ' not a prefix or suffix; rather, all combinations of its ' - 'values are\n' - ' stripped:\n' - '\n' - " >>> ' spacious '.strip()\n" - " 'spacious'\n" - " >>> 'www.example.com'.strip('cmowz.')\n" - " 'example'\n" - '\n' - ' The outermost leading and trailing *chars* argument ' - 'values are\n' - ' stripped from the string. Characters are removed from ' - 'the leading\n' - ' end until reaching a string character that is not ' - 'contained in the\n' - ' set of characters in *chars*. A similar action takes ' - 'place on the\n' - ' trailing end. For example:\n' - '\n' - " >>> comment_string = '#....... Section 3.2.1 Issue " - "#32 .......'\n" - " >>> comment_string.strip('.#! ')\n" - " 'Section 3.2.1 Issue #32'\n" - '\n' - 'str.swapcase()\n' - '\n' - ' Return a copy of the string with uppercase characters ' - 'converted to\n' - ' lowercase and vice versa. Note that it is not ' - 'necessarily true that\n' - ' "s.swapcase().swapcase() == s".\n' - '\n' - 'str.title()\n' - '\n' - ' Return a titlecased version of the string where words ' - 'start with an\n' - ' uppercase character and the remaining characters are ' - 'lowercase.\n' - '\n' - ' For example:\n' - '\n' - " >>> 'Hello world'.title()\n" - " 'Hello World'\n" - '\n' - ' The algorithm uses a simple language-independent ' - 'definition of a\n' - ' word as groups of consecutive letters. The definition ' - 'works in\n' - ' many contexts but it means that apostrophes in ' - 'contractions and\n' - ' possessives form word boundaries, which may not be the ' - 'desired\n' - ' result:\n' - '\n' - ' >>> "they\'re bill\'s friends from the UK".title()\n' - ' "They\'Re Bill\'S Friends From The Uk"\n' - '\n' - ' The "string.capwords()" function does not have this ' - 'problem, as it\n' - ' splits words on spaces only.\n' - '\n' - ' Alternatively, a workaround for apostrophes can be ' - 'constructed\n' - ' using regular expressions:\n' - '\n' - ' >>> import re\n' - ' >>> def titlecase(s):\n' - ' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n' - ' ... lambda mo: ' - 'mo.group(0).capitalize(),\n' - ' ... s)\n' - ' ...\n' - ' >>> titlecase("they\'re bill\'s friends.")\n' - ' "They\'re Bill\'s Friends."\n' - '\n' - 'str.translate(table)\n' - '\n' - ' Return a copy of the string in which each character has ' - 'been mapped\n' - ' through the given translation table. The table must be ' - 'an object\n' - ' that implements indexing via "__getitem__()", typically ' - 'a *mapping*\n' - ' or *sequence*. When indexed by a Unicode ordinal (an ' - 'integer), the\n' - ' table object can do any of the following: return a ' - 'Unicode ordinal\n' - ' or a string, to map the character to one or more other ' - 'characters;\n' - ' return "None", to delete the character from the return ' - 'string; or\n' - ' raise a "LookupError" exception, to map the character ' - 'to itself.\n' - '\n' - ' You can use "str.maketrans()" to create a translation ' - 'map from\n' - ' character-to-character mappings in different formats.\n' - '\n' - ' See also the "codecs" module for a more flexible ' - 'approach to custom\n' - ' character mappings.\n' - '\n' - 'str.upper()\n' - '\n' - ' Return a copy of the string with all the cased ' - 'characters [4]\n' - ' converted to uppercase. Note that ' - '"s.upper().isupper()" might be\n' - ' "False" if "s" contains uncased characters or if the ' - 'Unicode\n' - ' category of the resulting character(s) is not “Lu” ' - '(Letter,\n' - ' uppercase), but e.g. “Lt” (Letter, titlecase).\n' - '\n' - ' The uppercasing algorithm used is described in section ' - '3.13\n' - ' ‘Default Case Folding’ of the Unicode Standard.\n' - '\n' - 'str.zfill(width)\n' - '\n' - ' Return a copy of the string left filled with ASCII ' - '"\'0\'" digits to\n' - ' make a string of length *width*. A leading sign prefix\n' - ' ("\'+\'"/"\'-\'") is handled by inserting the padding ' - '*after* the sign\n' - ' character rather than before. The original string is ' - 'returned if\n' - ' *width* is less than or equal to "len(s)".\n' - '\n' - ' For example:\n' - '\n' - ' >>> "42".zfill(5)\n' - " '00042'\n" - ' >>> "-42".zfill(5)\n' - " '-0042'\n", - 'strings': 'String and Bytes literals\n' - '*************************\n' - '\n' - 'String literals are described by the following lexical ' - 'definitions:\n' - '\n' - ' stringliteral ::= [stringprefix](shortstring | longstring)\n' - ' stringprefix ::= "r" | "u" | "R" | "U" | "f" | "F"\n' - ' | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | ' - '"Rf" | "RF"\n' - ' shortstring ::= "\'" shortstringitem* "\'" | \'"\' ' - 'shortstringitem* \'"\'\n' - ' longstring ::= "\'\'\'" longstringitem* "\'\'\'" | ' - '\'"""\' longstringitem* \'"""\'\n' - ' shortstringitem ::= shortstringchar | stringescapeseq\n' - ' longstringitem ::= longstringchar | stringescapeseq\n' - ' shortstringchar ::= \n' - ' longstringchar ::= \n' - ' stringescapeseq ::= "\\" \n' - '\n' - ' bytesliteral ::= bytesprefix(shortbytes | longbytes)\n' - ' bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | ' - '"rb" | "rB" | "Rb" | "RB"\n' - ' shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' ' - 'shortbytesitem* \'"\'\n' - ' longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' ' - 'longbytesitem* \'"""\'\n' - ' shortbytesitem ::= shortbyteschar | bytesescapeseq\n' - ' longbytesitem ::= longbyteschar | bytesescapeseq\n' - ' shortbyteschar ::= \n' - ' longbyteschar ::= \n' - ' bytesescapeseq ::= "\\" \n' - '\n' - 'One syntactic restriction not indicated by these productions is ' - 'that\n' - 'whitespace is not allowed between the "stringprefix" or ' - '"bytesprefix"\n' - 'and the rest of the literal. The source character set is defined ' - 'by\n' - 'the encoding declaration; it is UTF-8 if no encoding declaration ' - 'is\n' - 'given in the source file; see section Encoding declarations.\n' - '\n' - 'In plain English: Both types of literals can be enclosed in ' - 'matching\n' - 'single quotes ("\'") or double quotes ("""). They can also be ' - 'enclosed\n' - 'in matching groups of three single or double quotes (these are\n' - 'generally referred to as *triple-quoted strings*). The backslash ' - '("\\")\n' - 'character is used to give special meaning to otherwise ordinary\n' - 'characters like "n", which means ‘newline’ when escaped ("\\n"). ' - 'It can\n' - 'also be used to escape characters that otherwise have a special\n' - 'meaning, such as newline, backslash itself, or the quote ' - 'character.\n' - 'See escape sequences below for examples.\n' - '\n' - 'Bytes literals are always prefixed with "\'b\'" or "\'B\'"; they ' - 'produce\n' - 'an instance of the "bytes" type instead of the "str" type. They ' - 'may\n' - 'only contain ASCII characters; bytes with a numeric value of 128 ' - 'or\n' - 'greater must be expressed with escapes.\n' - '\n' - 'Both string and bytes literals may optionally be prefixed with a\n' - 'letter "\'r\'" or "\'R\'"; such constructs are called *raw ' - 'string\n' - 'literals* and *raw bytes literals* respectively and treat ' - 'backslashes\n' - 'as literal characters. As a result, in raw string literals, ' - '"\'\\U\'"\n' - 'and "\'\\u\'" escapes are not treated specially.\n' - '\n' - 'Added in version 3.3: The "\'rb\'" prefix of raw bytes literals ' - 'has been\n' - 'added as a synonym of "\'br\'".Support for the unicode legacy ' - 'literal\n' - '("u\'value\'") was reintroduced to simplify the maintenance of ' - 'dual\n' - 'Python 2.x and 3.x codebases. See **PEP 414** for more ' - 'information.\n' - '\n' - 'A string literal with "\'f\'" or "\'F\'" in its prefix is a ' - '*formatted\n' - 'string literal*; see f-strings. The "\'f\'" may be combined with ' - '"\'r\'",\n' - 'but not with "\'b\'" or "\'u\'", therefore raw formatted strings ' - 'are\n' - 'possible, but formatted bytes literals are not.\n' - '\n' - 'In triple-quoted literals, unescaped newlines and quotes are ' - 'allowed\n' - '(and are retained), except that three unescaped quotes in a row\n' - 'terminate the literal. (A “quote” is the character used to open ' - 'the\n' - 'literal, i.e. either "\'" or """.)\n' - '\n' - '\n' - 'Escape sequences\n' - '================\n' - '\n' - 'Unless an "\'r\'" or "\'R\'" prefix is present, escape sequences ' - 'in string\n' - 'and bytes literals are interpreted according to rules similar to ' - 'those\n' - 'used by Standard C. The recognized escape sequences are:\n' - '\n' - '+---------------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | ' - 'Notes |\n' - '|===========================|===================================|=========|\n' - '| "\\" | Backslash and newline ignored ' - '| (1) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\\\" | Backslash ' - '("\\") | |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\\'" | Single quote ' - '("\'") | |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\"" | Double quote (""") ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\a" | ASCII Bell (BEL) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\b" | ASCII Backspace (BS) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\f" | ASCII Formfeed (FF) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\n" | ASCII Linefeed (LF) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\r" | ASCII Carriage Return (CR) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\t" | ASCII Horizontal Tab (TAB) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\v" | ASCII Vertical Tab (VT) ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\*ooo*" | Character with octal value *ooo* ' - '| (2,4) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\x*hh*" | Character with hex value *hh* ' - '| (3,4) |\n' - '+---------------------------+-----------------------------------+---------+\n' - '\n' - 'Escape sequences only recognized in string literals are:\n' - '\n' - '+---------------------------+-----------------------------------+---------+\n' - '| Escape Sequence | Meaning | ' - 'Notes |\n' - '|===========================|===================================|=========|\n' - '| "\\N{*name*}" | Character named *name* in the ' - '| (5) |\n' - '| | Unicode database ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\u*xxxx*" | Character with 16-bit hex value ' - '| (6) |\n' - '| | *xxxx* ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '| "\\U*xxxxxxxx*" | Character with 32-bit hex value ' - '| (7) |\n' - '| | *xxxxxxxx* ' - '| |\n' - '+---------------------------+-----------------------------------+---------+\n' - '\n' - 'Notes:\n' - '\n' - '1. A backslash can be added at the end of a line to ignore the\n' - ' newline:\n' - '\n' - " >>> 'This string will not include \\\n" - " ... backslashes or newline characters.'\n" - " 'This string will not include backslashes or newline " - "characters.'\n" - '\n' - ' The same result can be achieved using triple-quoted strings, ' - 'or\n' - ' parentheses and string literal concatenation.\n' - '\n' - '2. As in Standard C, up to three octal digits are accepted.\n' - '\n' - ' Changed in version 3.11: Octal escapes with value larger than\n' - ' "0o377" produce a "DeprecationWarning".\n' - '\n' - ' Changed in version 3.12: Octal escapes with value larger than\n' - ' "0o377" produce a "SyntaxWarning". In a future Python version ' - 'they\n' - ' will be eventually a "SyntaxError".\n' - '\n' - '3. Unlike in Standard C, exactly two hex digits are required.\n' - '\n' - '4. In a bytes literal, hexadecimal and octal escapes denote the ' - 'byte\n' - ' with the given value. In a string literal, these escapes ' - 'denote a\n' - ' Unicode character with the given value.\n' - '\n' - '5. Changed in version 3.3: Support for name aliases [1] has been\n' - ' added.\n' - '\n' - '6. Exactly four hex digits are required.\n' - '\n' - '7. Any Unicode character can be encoded this way. Exactly eight ' - 'hex\n' - ' digits are required.\n' - '\n' - 'Unlike Standard C, all unrecognized escape sequences are left in ' - 'the\n' - 'string unchanged, i.e., *the backslash is left in the result*. ' - '(This\n' - 'behavior is useful when debugging: if an escape sequence is ' - 'mistyped,\n' - 'the resulting output is more easily recognized as broken.) It is ' - 'also\n' - 'important to note that the escape sequences only recognized in ' - 'string\n' - 'literals fall into the category of unrecognized escapes for ' - 'bytes\n' - 'literals.\n' - '\n' - 'Changed in version 3.6: Unrecognized escape sequences produce a\n' - '"DeprecationWarning".\n' - '\n' - 'Changed in version 3.12: Unrecognized escape sequences produce a\n' - '"SyntaxWarning". In a future Python version they will be ' - 'eventually a\n' - '"SyntaxError".\n' - '\n' - 'Even in a raw literal, quotes can be escaped with a backslash, ' - 'but the\n' - 'backslash remains in the result; for example, "r"\\""" is a ' - 'valid\n' - 'string literal consisting of two characters: a backslash and a ' - 'double\n' - 'quote; "r"\\"" is not a valid string literal (even a raw string ' - 'cannot\n' - 'end in an odd number of backslashes). Specifically, *a raw ' - 'literal\n' - 'cannot end in a single backslash* (since the backslash would ' - 'escape\n' - 'the following quote character). Note also that a single ' - 'backslash\n' - 'followed by a newline is interpreted as those two characters as ' - 'part\n' - 'of the literal, *not* as a line continuation.\n', - 'subscriptions': 'Subscriptions\n' - '*************\n' - '\n' - 'The subscription of an instance of a container class will ' - 'generally\n' - 'select an element from the container. The subscription of a ' - '*generic\n' - 'class* will generally return a GenericAlias object.\n' - '\n' - ' subscription ::= primary "[" flexible_expression_list ' - '"]"\n' - '\n' - 'When an object is subscripted, the interpreter will ' - 'evaluate the\n' - 'primary and the expression list.\n' - '\n' - 'The primary must evaluate to an object that supports ' - 'subscription. An\n' - 'object may support subscription through defining one or ' - 'both of\n' - '"__getitem__()" and "__class_getitem__()". When the primary ' - 'is\n' - 'subscripted, the evaluated result of the expression list ' - 'will be\n' - 'passed to one of these methods. For more details on when\n' - '"__class_getitem__" is called instead of "__getitem__", ' - 'see\n' - '__class_getitem__ versus __getitem__.\n' - '\n' - 'If the expression list contains at least one comma, or if ' - 'any of the\n' - 'expressions are starred, the expression list will evaluate ' - 'to a\n' - '"tuple" containing the items of the expression list. ' - 'Otherwise, the\n' - 'expression list will evaluate to the value of the list’s ' - 'sole member.\n' - '\n' - 'Changed in version 3.11: Expressions in an expression list ' - 'may be\n' - 'starred. See **PEP 646**.\n' - '\n' - 'For built-in objects, there are two types of objects that ' - 'support\n' - 'subscription via "__getitem__()":\n' - '\n' - '1. Mappings. If the primary is a *mapping*, the expression ' - 'list must\n' - ' evaluate to an object whose value is one of the keys of ' - 'the\n' - ' mapping, and the subscription selects the value in the ' - 'mapping that\n' - ' corresponds to that key. An example of a builtin mapping ' - 'class is\n' - ' the "dict" class.\n' - '\n' - '2. Sequences. If the primary is a *sequence*, the ' - 'expression list must\n' - ' evaluate to an "int" or a "slice" (as discussed in the ' - 'following\n' - ' section). Examples of builtin sequence classes include ' - 'the "str",\n' - ' "list" and "tuple" classes.\n' - '\n' - 'The formal syntax makes no special provision for negative ' - 'indices in\n' - '*sequences*. However, built-in sequences all provide a ' - '"__getitem__()"\n' - 'method that interprets negative indices by adding the ' - 'length of the\n' - 'sequence to the index so that, for example, "x[-1]" selects ' - 'the last\n' - 'item of "x". The resulting value must be a nonnegative ' - 'integer less\n' - 'than the number of items in the sequence, and the ' - 'subscription selects\n' - 'the item whose index is that value (counting from zero). ' - 'Since the\n' - 'support for negative indices and slicing occurs in the ' - 'object’s\n' - '"__getitem__()" method, subclasses overriding this method ' - 'will need to\n' - 'explicitly add that support.\n' - '\n' - 'A "string" is a special kind of sequence whose items are ' - '*characters*.\n' - 'A character is not a separate data type but a string of ' - 'exactly one\n' - 'character.\n', - 'truth': 'Truth Value Testing\n' - '*******************\n' - '\n' - 'Any object can be tested for truth value, for use in an "if" or\n' - '"while" condition or as operand of the Boolean operations below.\n' - '\n' - 'By default, an object is considered true unless its class defines\n' - 'either a "__bool__()" method that returns "False" or a "__len__()"\n' - 'method that returns zero, when called with the object. [1] Here ' - 'are\n' - 'most of the built-in objects considered false:\n' - '\n' - '* constants defined to be false: "None" and "False"\n' - '\n' - '* zero of any numeric type: "0", "0.0", "0j", "Decimal(0)",\n' - ' "Fraction(0, 1)"\n' - '\n' - '* empty sequences and collections: "\'\'", "()", "[]", "{}", ' - '"set()",\n' - ' "range(0)"\n' - '\n' - 'Operations and built-in functions that have a Boolean result ' - 'always\n' - 'return "0" or "False" for false and "1" or "True" for true, unless\n' - 'otherwise stated. (Important exception: the Boolean operations ' - '"or"\n' - 'and "and" always return one of their operands.)\n', - 'try': 'The "try" statement\n' - '*******************\n' - '\n' - 'The "try" statement specifies exception handlers and/or cleanup code\n' - 'for a group of statements:\n' - '\n' - ' try_stmt ::= try1_stmt | try2_stmt | try3_stmt\n' - ' try1_stmt ::= "try" ":" suite\n' - ' ("except" [expression ["as" identifier]] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try2_stmt ::= "try" ":" suite\n' - ' ("except" "*" expression ["as" identifier] ":" ' - 'suite)+\n' - ' ["else" ":" suite]\n' - ' ["finally" ":" suite]\n' - ' try3_stmt ::= "try" ":" suite\n' - ' "finally" ":" suite\n' - '\n' - 'Additional information on exceptions can be found in section\n' - 'Exceptions, and information on using the "raise" statement to ' - 'generate\n' - 'exceptions may be found in section The raise statement.\n' - '\n' - '\n' - '"except" clause\n' - '===============\n' - '\n' - 'The "except" clause(s) specify one or more exception handlers. When ' - 'no\n' - 'exception occurs in the "try" clause, no exception handler is\n' - 'executed. When an exception occurs in the "try" suite, a search for ' - 'an\n' - 'exception handler is started. This search inspects the "except"\n' - 'clauses in turn until one is found that matches the exception. An\n' - 'expression-less "except" clause, if present, must be last; it ' - 'matches\n' - 'any exception.\n' - '\n' - 'For an "except" clause with an expression, the expression must\n' - 'evaluate to an exception type or a tuple of exception types. The\n' - 'raised exception matches an "except" clause whose expression ' - 'evaluates\n' - 'to the class or a *non-virtual base class* of the exception object, ' - 'or\n' - 'to a tuple that contains such a class.\n' - '\n' - 'If no "except" clause matches the exception, the search for an\n' - 'exception handler continues in the surrounding code and on the\n' - 'invocation stack. [1]\n' - '\n' - 'If the evaluation of an expression in the header of an "except" ' - 'clause\n' - 'raises an exception, the original search for a handler is canceled ' - 'and\n' - 'a search starts for the new exception in the surrounding code and on\n' - 'the call stack (it is treated as if the entire "try" statement ' - 'raised\n' - 'the exception).\n' - '\n' - 'When a matching "except" clause is found, the exception is assigned ' - 'to\n' - 'the target specified after the "as" keyword in that "except" clause,\n' - 'if present, and the "except" clause’s suite is executed. All ' - '"except"\n' - 'clauses must have an executable block. When the end of this block is\n' - 'reached, execution continues normally after the entire "try"\n' - 'statement. (This means that if two nested handlers exist for the ' - 'same\n' - 'exception, and the exception occurs in the "try" clause of the inner\n' - 'handler, the outer handler will not handle the exception.)\n' - '\n' - 'When an exception has been assigned using "as target", it is cleared\n' - 'at the end of the "except" clause. This is as if\n' - '\n' - ' except E as N:\n' - ' foo\n' - '\n' - 'was translated to\n' - '\n' - ' except E as N:\n' - ' try:\n' - ' foo\n' - ' finally:\n' - ' del N\n' - '\n' - 'This means the exception must be assigned to a different name to be\n' - 'able to refer to it after the "except" clause. Exceptions are ' - 'cleared\n' - 'because with the traceback attached to them, they form a reference\n' - 'cycle with the stack frame, keeping all locals in that frame alive\n' - 'until the next garbage collection occurs.\n' - '\n' - 'Before an "except" clause’s suite is executed, the exception is ' - 'stored\n' - 'in the "sys" module, where it can be accessed from within the body ' - 'of\n' - 'the "except" clause by calling "sys.exception()". When leaving an\n' - 'exception handler, the exception stored in the "sys" module is reset\n' - 'to its previous value:\n' - '\n' - ' >>> print(sys.exception())\n' - ' None\n' - ' >>> try:\n' - ' ... raise TypeError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... try:\n' - ' ... raise ValueError\n' - ' ... except:\n' - ' ... print(repr(sys.exception()))\n' - ' ... print(repr(sys.exception()))\n' - ' ...\n' - ' TypeError()\n' - ' ValueError()\n' - ' TypeError()\n' - ' >>> print(sys.exception())\n' - ' None\n' - '\n' - '\n' - '"except*" clause\n' - '================\n' - '\n' - 'The "except*" clause(s) are used for handling "ExceptionGroup"s. The\n' - 'exception type for matching is interpreted as in the case of ' - '"except",\n' - 'but in the case of exception groups we can have partial matches when\n' - 'the type matches some of the exceptions in the group. This means ' - 'that\n' - 'multiple "except*" clauses can execute, each handling part of the\n' - 'exception group. Each clause executes at most once and handles an\n' - 'exception group of all matching exceptions. Each exception in the\n' - 'group is handled by at most one "except*" clause, the first that\n' - 'matches it.\n' - '\n' - ' >>> try:\n' - ' ... raise ExceptionGroup("eg",\n' - ' ... [ValueError(1), TypeError(2), OSError(3), ' - 'OSError(4)])\n' - ' ... except* TypeError as e:\n' - " ... print(f'caught {type(e)} with nested {e.exceptions}')\n" - ' ... except* OSError as e:\n' - " ... print(f'caught {type(e)} with nested {e.exceptions}')\n" - ' ...\n' - " caught with nested (TypeError(2),)\n" - " caught with nested (OSError(3), " - 'OSError(4))\n' - ' + Exception Group Traceback (most recent call last):\n' - ' | File "", line 2, in \n' - ' | ExceptionGroup: eg\n' - ' +-+---------------- 1 ----------------\n' - ' | ValueError: 1\n' - ' +------------------------------------\n' - '\n' - 'Any remaining exceptions that were not handled by any "except*" ' - 'clause\n' - 'are re-raised at the end, along with all exceptions that were raised\n' - 'from within the "except*" clauses. If this list contains more than ' - 'one\n' - 'exception to reraise, they are combined into an exception group.\n' - '\n' - 'If the raised exception is not an exception group and its type ' - 'matches\n' - 'one of the "except*" clauses, it is caught and wrapped by an ' - 'exception\n' - 'group with an empty message string.\n' - '\n' - ' >>> try:\n' - ' ... raise BlockingIOError\n' - ' ... except* BlockingIOError as e:\n' - ' ... print(repr(e))\n' - ' ...\n' - " ExceptionGroup('', (BlockingIOError()))\n" - '\n' - 'An "except*" clause must have a matching expression; it cannot be\n' - '"except*:". Furthermore, this expression cannot contain exception\n' - 'group types, because that would have ambiguous semantics.\n' - '\n' - 'It is not possible to mix "except" and "except*" in the same "try".\n' - '"break", "continue" and "return" cannot appear in an "except*" ' - 'clause.\n' - '\n' - '\n' - '"else" clause\n' - '=============\n' - '\n' - 'The optional "else" clause is executed if the control flow leaves ' - 'the\n' - '"try" suite, no exception was raised, and no "return", "continue", ' - 'or\n' - '"break" statement was executed. Exceptions in the "else" clause are\n' - 'not handled by the preceding "except" clauses.\n' - '\n' - '\n' - '"finally" clause\n' - '================\n' - '\n' - 'If "finally" is present, it specifies a ‘cleanup’ handler. The ' - '"try"\n' - 'clause is executed, including any "except" and "else" clauses. If ' - 'an\n' - 'exception occurs in any of the clauses and is not handled, the\n' - 'exception is temporarily saved. The "finally" clause is executed. ' - 'If\n' - 'there is a saved exception it is re-raised at the end of the ' - '"finally"\n' - 'clause. If the "finally" clause raises another exception, the saved\n' - 'exception is set as the context of the new exception. If the ' - '"finally"\n' - 'clause executes a "return", "break" or "continue" statement, the ' - 'saved\n' - 'exception is discarded:\n' - '\n' - ' >>> def f():\n' - ' ... try:\n' - ' ... 1/0\n' - ' ... finally:\n' - ' ... return 42\n' - ' ...\n' - ' >>> f()\n' - ' 42\n' - '\n' - 'The exception information is not available to the program during\n' - 'execution of the "finally" clause.\n' - '\n' - 'When a "return", "break" or "continue" statement is executed in the\n' - '"try" suite of a "try"…"finally" statement, the "finally" clause is\n' - 'also executed ‘on the way out.’\n' - '\n' - 'The return value of a function is determined by the last "return"\n' - 'statement executed. Since the "finally" clause always executes, a\n' - '"return" statement executed in the "finally" clause will always be ' - 'the\n' - 'last one executed:\n' - '\n' - ' >>> def foo():\n' - ' ... try:\n' - " ... return 'try'\n" - ' ... finally:\n' - " ... return 'finally'\n" - ' ...\n' - ' >>> foo()\n' - " 'finally'\n" - '\n' - 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n' - 'was illegal in the "finally" clause due to a problem with the\n' - 'implementation.\n', - 'types': 'The standard type hierarchy\n' - '***************************\n' - '\n' - 'Below is a list of the types that are built into Python. ' - 'Extension\n' - 'modules (written in C, Java, or other languages, depending on the\n' - 'implementation) can define additional types. Future versions of\n' - 'Python may add types to the type hierarchy (e.g., rational ' - 'numbers,\n' - 'efficiently stored arrays of integers, etc.), although such ' - 'additions\n' - 'will often be provided via the standard library instead.\n' - '\n' - 'Some of the type descriptions below contain a paragraph listing\n' - '‘special attributes.’ These are attributes that provide access to ' - 'the\n' - 'implementation and are not intended for general use. Their ' - 'definition\n' - 'may change in the future.\n' - '\n' - '\n' - 'None\n' - '====\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the built-in name "None". It ' - 'is\n' - 'used to signify the absence of a value in many situations, e.g., it ' - 'is\n' - 'returned from functions that don’t explicitly return anything. Its\n' - 'truth value is false.\n' - '\n' - '\n' - 'NotImplemented\n' - '==============\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the built-in name\n' - '"NotImplemented". Numeric methods and rich comparison methods ' - 'should\n' - 'return this value if they do not implement the operation for the\n' - 'operands provided. (The interpreter will then try the reflected\n' - 'operation, or some other fallback, depending on the operator.) It\n' - 'should not be evaluated in a boolean context.\n' - '\n' - 'See Implementing the arithmetic operations for more details.\n' - '\n' - 'Changed in version 3.9: Evaluating "NotImplemented" in a boolean\n' - 'context was deprecated.\n' - '\n' - 'Changed in version 3.14: Evaluating "NotImplemented" in a boolean\n' - 'context now raises a "TypeError". It previously evaluated to ' - '"True"\n' - 'and emitted a "DeprecationWarning" since Python 3.9.\n' - '\n' - '\n' - 'Ellipsis\n' - '========\n' - '\n' - 'This type has a single value. There is a single object with this\n' - 'value. This object is accessed through the literal "..." or the ' - 'built-\n' - 'in name "Ellipsis". Its truth value is true.\n' - '\n' - '\n' - '"numbers.Number"\n' - '================\n' - '\n' - 'These are created by numeric literals and returned as results by\n' - 'arithmetic operators and arithmetic built-in functions. Numeric\n' - 'objects are immutable; once created their value never changes. ' - 'Python\n' - 'numbers are of course strongly related to mathematical numbers, ' - 'but\n' - 'subject to the limitations of numerical representation in ' - 'computers.\n' - '\n' - 'The string representations of the numeric classes, computed by\n' - '"__repr__()" and "__str__()", have the following properties:\n' - '\n' - '* They are valid numeric literals which, when passed to their ' - 'class\n' - ' constructor, produce an object having the value of the original\n' - ' numeric.\n' - '\n' - '* The representation is in base 10, when possible.\n' - '\n' - '* Leading zeros, possibly excepting a single zero before a decimal\n' - ' point, are not shown.\n' - '\n' - '* Trailing zeros, possibly excepting a single zero after a decimal\n' - ' point, are not shown.\n' - '\n' - '* A sign is shown only when the number is negative.\n' - '\n' - 'Python distinguishes between integers, floating-point numbers, and\n' - 'complex numbers:\n' - '\n' - '\n' - '"numbers.Integral"\n' - '------------------\n' - '\n' - 'These represent elements from the mathematical set of integers\n' - '(positive and negative).\n' - '\n' - 'Note:\n' - '\n' - ' The rules for integer representation are intended to give the ' - 'most\n' - ' meaningful interpretation of shift and mask operations involving\n' - ' negative integers.\n' - '\n' - 'There are two types of integers:\n' - '\n' - 'Integers ("int")\n' - ' These represent numbers in an unlimited range, subject to ' - 'available\n' - ' (virtual) memory only. For the purpose of shift and mask\n' - ' operations, a binary representation is assumed, and negative\n' - ' numbers are represented in a variant of 2’s complement which ' - 'gives\n' - ' the illusion of an infinite string of sign bits extending to ' - 'the\n' - ' left.\n' - '\n' - 'Booleans ("bool")\n' - ' These represent the truth values False and True. The two ' - 'objects\n' - ' representing the values "False" and "True" are the only Boolean\n' - ' objects. The Boolean type is a subtype of the integer type, and\n' - ' Boolean values behave like the values 0 and 1, respectively, in\n' - ' almost all contexts, the exception being that when converted to ' - 'a\n' - ' string, the strings ""False"" or ""True"" are returned,\n' - ' respectively.\n' - '\n' - '\n' - '"numbers.Real" ("float")\n' - '------------------------\n' - '\n' - 'These represent machine-level double precision floating-point ' - 'numbers.\n' - 'You are at the mercy of the underlying machine architecture (and C ' - 'or\n' - 'Java implementation) for the accepted range and handling of ' - 'overflow.\n' - 'Python does not support single-precision floating-point numbers; ' - 'the\n' - 'savings in processor and memory usage that are usually the reason ' - 'for\n' - 'using these are dwarfed by the overhead of using objects in Python, ' - 'so\n' - 'there is no reason to complicate the language with two kinds of\n' - 'floating-point numbers.\n' - '\n' - '\n' - '"numbers.Complex" ("complex")\n' - '-----------------------------\n' - '\n' - 'These represent complex numbers as a pair of machine-level double\n' - 'precision floating-point numbers. The same caveats apply as for\n' - 'floating-point numbers. The real and imaginary parts of a complex\n' - 'number "z" can be retrieved through the read-only attributes ' - '"z.real"\n' - 'and "z.imag".\n' - '\n' - '\n' - 'Sequences\n' - '=========\n' - '\n' - 'These represent finite ordered sets indexed by non-negative ' - 'numbers.\n' - 'The built-in function "len()" returns the number of items of a\n' - 'sequence. When the length of a sequence is *n*, the index set ' - 'contains\n' - 'the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected ' - 'by\n' - '"a[i]". Some sequences, including built-in sequences, interpret\n' - 'negative subscripts by adding the sequence length. For example,\n' - '"a[-2]" equals "a[n-2]", the second to last item of sequence a ' - 'with\n' - 'length "n".\n' - '\n' - 'Sequences also support slicing: "a[i:j]" selects all items with ' - 'index\n' - '*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a\n' - 'slice is a sequence of the same type. The comment above about ' - 'negative\n' - 'indexes also applies to negative slice positions.\n' - '\n' - 'Some sequences also support “extended slicing” with a third “step”\n' - 'parameter: "a[i:j:k]" selects all items of *a* with index *x* where ' - '"x\n' - '= i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*.\n' - '\n' - 'Sequences are distinguished according to their mutability:\n' - '\n' - '\n' - 'Immutable sequences\n' - '-------------------\n' - '\n' - 'An object of an immutable sequence type cannot change once it is\n' - 'created. (If the object contains references to other objects, ' - 'these\n' - 'other objects may be mutable and may be changed; however, the\n' - 'collection of objects directly referenced by an immutable object\n' - 'cannot change.)\n' - '\n' - 'The following types are immutable sequences:\n' - '\n' - 'Strings\n' - ' A string is a sequence of values that represent Unicode code\n' - ' points. All the code points in the range "U+0000 - U+10FFFF" can ' - 'be\n' - ' represented in a string. Python doesn’t have a char type; ' - 'instead,\n' - ' every code point in the string is represented as a string ' - 'object\n' - ' with length "1". The built-in function "ord()" converts a code\n' - ' point from its string form to an integer in the range "0 - ' - '10FFFF";\n' - ' "chr()" converts an integer in the range "0 - 10FFFF" to the\n' - ' corresponding length "1" string object. "str.encode()" can be ' - 'used\n' - ' to convert a "str" to "bytes" using the given text encoding, ' - 'and\n' - ' "bytes.decode()" can be used to achieve the opposite.\n' - '\n' - 'Tuples\n' - ' The items of a tuple are arbitrary Python objects. Tuples of two ' - 'or\n' - ' more items are formed by comma-separated lists of expressions. ' - 'A\n' - ' tuple of one item (a ‘singleton’) can be formed by affixing a ' - 'comma\n' - ' to an expression (an expression by itself does not create a ' - 'tuple,\n' - ' since parentheses must be usable for grouping of expressions). ' - 'An\n' - ' empty tuple can be formed by an empty pair of parentheses.\n' - '\n' - 'Bytes\n' - ' A bytes object is an immutable array. The items are 8-bit ' - 'bytes,\n' - ' represented by integers in the range 0 <= x < 256. Bytes ' - 'literals\n' - ' (like "b\'abc\'") and the built-in "bytes()" constructor can be ' - 'used\n' - ' to create bytes objects. Also, bytes objects can be decoded to\n' - ' strings via the "decode()" method.\n' - '\n' - '\n' - 'Mutable sequences\n' - '-----------------\n' - '\n' - 'Mutable sequences can be changed after they are created. The\n' - 'subscription and slicing notations can be used as the target of\n' - 'assignment and "del" (delete) statements.\n' - '\n' - 'Note:\n' - '\n' - ' The "collections" and "array" module provide additional examples ' - 'of\n' - ' mutable sequence types.\n' - '\n' - 'There are currently two intrinsic mutable sequence types:\n' - '\n' - 'Lists\n' - ' The items of a list are arbitrary Python objects. Lists are ' - 'formed\n' - ' by placing a comma-separated list of expressions in square\n' - ' brackets. (Note that there are no special cases needed to form\n' - ' lists of length 0 or 1.)\n' - '\n' - 'Byte Arrays\n' - ' A bytearray object is a mutable array. They are created by the\n' - ' built-in "bytearray()" constructor. Aside from being mutable ' - '(and\n' - ' hence unhashable), byte arrays otherwise provide the same ' - 'interface\n' - ' and functionality as immutable "bytes" objects.\n' - '\n' - '\n' - 'Set types\n' - '=========\n' - '\n' - 'These represent unordered, finite sets of unique, immutable ' - 'objects.\n' - 'As such, they cannot be indexed by any subscript. However, they can ' - 'be\n' - 'iterated over, and the built-in function "len()" returns the number ' - 'of\n' - 'items in a set. Common uses for sets are fast membership testing,\n' - 'removing duplicates from a sequence, and computing mathematical\n' - 'operations such as intersection, union, difference, and symmetric\n' - 'difference.\n' - '\n' - 'For set elements, the same immutability rules apply as for ' - 'dictionary\n' - 'keys. Note that numeric types obey the normal rules for numeric\n' - 'comparison: if two numbers compare equal (e.g., "1" and "1.0"), ' - 'only\n' - 'one of them can be contained in a set.\n' - '\n' - 'There are currently two intrinsic set types:\n' - '\n' - 'Sets\n' - ' These represent a mutable set. They are created by the built-in\n' - ' "set()" constructor and can be modified afterwards by several\n' - ' methods, such as "add()".\n' - '\n' - 'Frozen sets\n' - ' These represent an immutable set. They are created by the ' - 'built-in\n' - ' "frozenset()" constructor. As a frozenset is immutable and\n' - ' *hashable*, it can be used again as an element of another set, ' - 'or\n' - ' as a dictionary key.\n' - '\n' - '\n' - 'Mappings\n' - '========\n' - '\n' - 'These represent finite sets of objects indexed by arbitrary index\n' - 'sets. The subscript notation "a[k]" selects the item indexed by ' - '"k"\n' - 'from the mapping "a"; this can be used in expressions and as the\n' - 'target of assignments or "del" statements. The built-in function\n' - '"len()" returns the number of items in a mapping.\n' - '\n' - 'There is currently a single intrinsic mapping type:\n' - '\n' - '\n' - 'Dictionaries\n' - '------------\n' - '\n' - 'These represent finite sets of objects indexed by nearly arbitrary\n' - 'values. The only types of values not acceptable as keys are ' - 'values\n' - 'containing lists or dictionaries or other mutable types that are\n' - 'compared by value rather than by object identity, the reason being\n' - 'that the efficient implementation of dictionaries requires a key’s\n' - 'hash value to remain constant. Numeric types used for keys obey ' - 'the\n' - 'normal rules for numeric comparison: if two numbers compare equal\n' - '(e.g., "1" and "1.0") then they can be used interchangeably to ' - 'index\n' - 'the same dictionary entry.\n' - '\n' - 'Dictionaries preserve insertion order, meaning that keys will be\n' - 'produced in the same order they were added sequentially over the\n' - 'dictionary. Replacing an existing key does not change the order,\n' - 'however removing a key and re-inserting it will add it to the end\n' - 'instead of keeping its old place.\n' - '\n' - 'Dictionaries are mutable; they can be created by the "{}" notation\n' - '(see section Dictionary displays).\n' - '\n' - 'The extension modules "dbm.ndbm" and "dbm.gnu" provide additional\n' - 'examples of mapping types, as does the "collections" module.\n' - '\n' - 'Changed in version 3.7: Dictionaries did not preserve insertion ' - 'order\n' - 'in versions of Python before 3.6. In CPython 3.6, insertion order ' - 'was\n' - 'preserved, but it was considered an implementation detail at that ' - 'time\n' - 'rather than a language guarantee.\n' - '\n' - '\n' - 'Callable types\n' - '==============\n' - '\n' - 'These are the types to which the function call operation (see ' - 'section\n' - 'Calls) can be applied:\n' - '\n' - '\n' - 'User-defined functions\n' - '----------------------\n' - '\n' - 'A user-defined function object is created by a function definition\n' - '(see section Function definitions). It should be called with an\n' - 'argument list containing the same number of items as the ' - 'function’s\n' - 'formal parameter list.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| function.__globals__ | A reference ' - 'to the "dictionary" that holds the |\n' - '| | function’s ' - 'global variables – the global namespace |\n' - '| | of the ' - 'module in which the function was defined. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__closure__ | "None" or a ' - '"tuple" of cells that contain bindings |\n' - '| | for the ' - 'names specified in the "co_freevars" |\n' - '| | attribute of ' - 'the function’s "code object". A cell |\n' - '| | object has ' - 'the attribute "cell_contents". This can |\n' - '| | be used to ' - 'get the value of the cell, as well as |\n' - '| | set the ' - 'value. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special writable attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'Most of these attributes check the type of the assigned value:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| function.__doc__ | The ' - 'function’s documentation string, or "None" if |\n' - '| | ' - 'unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__name__ | The ' - 'function’s name. See also: "__name__ |\n' - '| | ' - 'attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__qualname__ | The ' - 'function’s *qualified name*. See also: |\n' - '| | ' - '"__qualname__ attributes". Added in version 3.3. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__module__ | The name of ' - 'the module the function was defined |\n' - '| | in, or ' - '"None" if unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__defaults__ | A "tuple" ' - 'containing default *parameter* values |\n' - '| | for those ' - 'parameters that have defaults, or "None" |\n' - '| | if no ' - 'parameters have a default value. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__code__ | The code ' - 'object representing the compiled function |\n' - '| | ' - 'body. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__dict__ | The ' - 'namespace supporting arbitrary function |\n' - '| | attributes. ' - 'See also: "__dict__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__annotations__ | A ' - '"dictionary" containing annotations of |\n' - '| | ' - '*parameters*. The keys of the dictionary are the |\n' - '| | parameter ' - 'names, and "\'return\'" for the return |\n' - '| | annotation, ' - 'if provided. See also: |\n' - '| | ' - '"object.__annotations__". Changed in version |\n' - '| | 3.14: ' - 'Annotations are now lazily evaluated. See |\n' - '| | **PEP ' - '649**. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__annotate__ | The ' - '*annotate function* for this function, or |\n' - '| | "None" if ' - 'the function has no annotations. See |\n' - '| | ' - '"object.__annotate__". Added in version 3.14. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__kwdefaults__ | A ' - '"dictionary" containing defaults for keyword- |\n' - '| | only ' - '*parameters*. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| function.__type_params__ | A "tuple" ' - 'containing the type parameters of a |\n' - '| | generic ' - 'function. Added in version 3.12. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'Function objects also support getting and setting arbitrary\n' - 'attributes, which can be used, for example, to attach metadata to\n' - 'functions. Regular attribute dot-notation is used to get and set ' - 'such\n' - 'attributes.\n' - '\n' - '**CPython implementation detail:** CPython’s current ' - 'implementation\n' - 'only supports function attributes on user-defined functions. ' - 'Function\n' - 'attributes on built-in functions may be supported in the future.\n' - '\n' - 'Additional information about a function’s definition can be ' - 'retrieved\n' - 'from its code object (accessible via the "__code__" attribute).\n' - '\n' - '\n' - 'Instance methods\n' - '----------------\n' - '\n' - 'An instance method object combines a class, a class instance and ' - 'any\n' - 'callable object (normally a user-defined function).\n' - '\n' - 'Special read-only attributes:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__self__ | Refers to ' - 'the class instance object to which the |\n' - '| | method is ' - 'bound |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__func__ | Refers to ' - 'the original function object |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__doc__ | The method’s ' - 'documentation (same as |\n' - '| | ' - '"method.__func__.__doc__"). A "string" if the |\n' - '| | original ' - 'function had a docstring, else "None". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__name__ | The name of ' - 'the method (same as |\n' - '| | ' - '"method.__func__.__name__") |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| method.__module__ | The name of ' - 'the module the method was defined in, |\n' - '| | or "None" if ' - 'unavailable. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'Methods also support accessing (but not setting) the arbitrary\n' - 'function attributes on the underlying function object.\n' - '\n' - 'User-defined method objects may be created when getting an ' - 'attribute\n' - 'of a class (perhaps via an instance of that class), if that ' - 'attribute\n' - 'is a user-defined function object or a "classmethod" object.\n' - '\n' - 'When an instance method object is created by retrieving a ' - 'user-defined\n' - 'function object from a class via one of its instances, its ' - '"__self__"\n' - 'attribute is the instance, and the method object is said to be\n' - '*bound*. The new method’s "__func__" attribute is the original\n' - 'function object.\n' - '\n' - 'When an instance method object is created by retrieving a\n' - '"classmethod" object from a class or instance, its "__self__"\n' - 'attribute is the class itself, and its "__func__" attribute is the\n' - 'function object underlying the class method.\n' - '\n' - 'When an instance method object is called, the underlying function\n' - '("__func__") is called, inserting the class instance ("__self__") ' - 'in\n' - 'front of the argument list. For instance, when "C" is a class ' - 'which\n' - 'contains a definition for a function "f()", and "x" is an instance ' - 'of\n' - '"C", calling "x.f(1)" is equivalent to calling "C.f(x, 1)".\n' - '\n' - 'When an instance method object is derived from a "classmethod" ' - 'object,\n' - 'the “class instance” stored in "__self__" will actually be the ' - 'class\n' - 'itself, so that calling either "x.f(1)" or "C.f(1)" is equivalent ' - 'to\n' - 'calling "f(C,1)" where "f" is the underlying function.\n' - '\n' - 'It is important to note that user-defined functions which are\n' - 'attributes of a class instance are not converted to bound methods;\n' - 'this *only* happens when the function is an attribute of the ' - 'class.\n' - '\n' - '\n' - 'Generator functions\n' - '-------------------\n' - '\n' - 'A function or method which uses the "yield" statement (see section ' - 'The\n' - 'yield statement) is called a *generator function*. Such a ' - 'function,\n' - 'when called, always returns an *iterator* object which can be used ' - 'to\n' - 'execute the body of the function: calling the iterator’s\n' - '"iterator.__next__()" method will cause the function to execute ' - 'until\n' - 'it provides a value using the "yield" statement. When the ' - 'function\n' - 'executes a "return" statement or falls off the end, a ' - '"StopIteration"\n' - 'exception is raised and the iterator will have reached the end of ' - 'the\n' - 'set of values to be returned.\n' - '\n' - '\n' - 'Coroutine functions\n' - '-------------------\n' - '\n' - 'A function or method which is defined using "async def" is called ' - 'a\n' - '*coroutine function*. Such a function, when called, returns a\n' - '*coroutine* object. It may contain "await" expressions, as well ' - 'as\n' - '"async with" and "async for" statements. See also the Coroutine\n' - 'Objects section.\n' - '\n' - '\n' - 'Asynchronous generator functions\n' - '--------------------------------\n' - '\n' - 'A function or method which is defined using "async def" and which ' - 'uses\n' - 'the "yield" statement is called a *asynchronous generator ' - 'function*.\n' - 'Such a function, when called, returns an *asynchronous iterator*\n' - 'object which can be used in an "async for" statement to execute ' - 'the\n' - 'body of the function.\n' - '\n' - 'Calling the asynchronous iterator’s "aiterator.__anext__" method ' - 'will\n' - 'return an *awaitable* which when awaited will execute until it\n' - 'provides a value using the "yield" expression. When the function\n' - 'executes an empty "return" statement or falls off the end, a\n' - '"StopAsyncIteration" exception is raised and the asynchronous ' - 'iterator\n' - 'will have reached the end of the set of values to be yielded.\n' - '\n' - '\n' - 'Built-in functions\n' - '------------------\n' - '\n' - 'A built-in function object is a wrapper around a C function. ' - 'Examples\n' - 'of built-in functions are "len()" and "math.sin()" ("math" is a\n' - 'standard built-in module). The number and type of the arguments ' - 'are\n' - 'determined by the C function. Special read-only attributes:\n' - '\n' - '* "__doc__" is the function’s documentation string, or "None" if\n' - ' unavailable. See "function.__doc__".\n' - '\n' - '* "__name__" is the function’s name. See "function.__name__".\n' - '\n' - '* "__self__" is set to "None" (but see the next item).\n' - '\n' - '* "__module__" is the name of the module the function was defined ' - 'in\n' - ' or "None" if unavailable. See "function.__module__".\n' - '\n' - '\n' - 'Built-in methods\n' - '----------------\n' - '\n' - 'This is really a different disguise of a built-in function, this ' - 'time\n' - 'containing an object passed to the C function as an implicit extra\n' - 'argument. An example of a built-in method is "alist.append()",\n' - 'assuming *alist* is a list object. In this case, the special ' - 'read-only\n' - 'attribute "__self__" is set to the object denoted by *alist*. (The\n' - 'attribute has the same semantics as it does with "other instance\n' - 'methods".)\n' - '\n' - '\n' - 'Classes\n' - '-------\n' - '\n' - 'Classes are callable. These objects normally act as factories for ' - 'new\n' - 'instances of themselves, but variations are possible for class ' - 'types\n' - 'that override "__new__()". The arguments of the call are passed ' - 'to\n' - '"__new__()" and, in the typical case, to "__init__()" to ' - 'initialize\n' - 'the new instance.\n' - '\n' - '\n' - 'Class Instances\n' - '---------------\n' - '\n' - 'Instances of arbitrary classes can be made callable by defining a\n' - '"__call__()" method in their class.\n' - '\n' - '\n' - 'Modules\n' - '=======\n' - '\n' - 'Modules are a basic organizational unit of Python code, and are\n' - 'created by the import system as invoked either by the "import"\n' - 'statement, or by calling functions such as ' - '"importlib.import_module()"\n' - 'and built-in "__import__()". A module object has a namespace\n' - 'implemented by a "dictionary" object (this is the dictionary\n' - 'referenced by the "__globals__" attribute of functions defined in ' - 'the\n' - 'module). Attribute references are translated to lookups in this\n' - 'dictionary, e.g., "m.x" is equivalent to "m.__dict__["x"]". A ' - 'module\n' - 'object does not contain the code object used to initialize the ' - 'module\n' - '(since it isn’t needed once the initialization is done).\n' - '\n' - 'Attribute assignment updates the module’s namespace dictionary, ' - 'e.g.,\n' - '"m.x = 1" is equivalent to "m.__dict__["x"] = 1".\n' - '\n' - '\n' - 'Import-related attributes on module objects\n' - '-------------------------------------------\n' - '\n' - 'Module objects have the following attributes that relate to the ' - 'import\n' - 'system. When a module is created using the machinery associated ' - 'with\n' - 'the import system, these attributes are filled in based on the\n' - 'module’s *spec*, before the *loader* executes and loads the ' - 'module.\n' - '\n' - 'To create a module dynamically rather than using the import ' - 'system,\n' - 'it’s recommended to use "importlib.util.module_from_spec()", which\n' - 'will set the various import-controlled attributes to appropriate\n' - 'values. It’s also possible to use the "types.ModuleType" ' - 'constructor\n' - 'to create modules directly, but this technique is more error-prone, ' - 'as\n' - 'most attributes must be manually set on the module object after it ' - 'has\n' - 'been created when using this approach.\n' - '\n' - 'Caution:\n' - '\n' - ' With the exception of "__name__", it is **strongly** recommended\n' - ' that you rely on "__spec__" and its attributes instead of any of ' - 'the\n' - ' other individual attributes listed in this subsection. Note that\n' - ' updating an attribute on "__spec__" will not update the\n' - ' corresponding attribute on the module itself:\n' - '\n' - ' >>> import typing\n' - ' >>> typing.__name__, typing.__spec__.name\n' - " ('typing', 'typing')\n" - " >>> typing.__spec__.name = 'spelling'\n" - ' >>> typing.__name__, typing.__spec__.name\n' - " ('typing', 'spelling')\n" - " >>> typing.__name__ = 'keyboard_smashing'\n" - ' >>> typing.__name__, typing.__spec__.name\n' - " ('keyboard_smashing', 'spelling')\n" - '\n' - 'module.__name__\n' - '\n' - ' The name used to uniquely identify the module in the import ' - 'system.\n' - ' For a directly executed module, this will be set to ' - '""__main__"".\n' - '\n' - ' This attribute must be set to the fully qualified name of the\n' - ' module. It is expected to match the value of\n' - ' "module.__spec__.name".\n' - '\n' - 'module.__spec__\n' - '\n' - ' A record of the module’s import-system-related state.\n' - '\n' - ' Set to the "module spec" that was used when importing the ' - 'module.\n' - ' See Module specs for more details.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - 'module.__package__\n' - '\n' - ' The *package* a module belongs to.\n' - '\n' - ' If the module is top-level (that is, not a part of any specific\n' - ' package) then the attribute should be set to "\'\'" (the empty\n' - ' string). Otherwise, it should be set to the name of the ' - 'module’s\n' - ' package (which can be equal to "module.__name__" if the module\n' - ' itself is a package). See **PEP 366** for further details.\n' - '\n' - ' This attribute is used instead of "__name__" to calculate ' - 'explicit\n' - ' relative imports for main modules. It defaults to "None" for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor; use "importlib.util.module_from_spec()" instead to\n' - ' ensure the attribute is set to a "str".\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.parent" instead of "module.__package__".\n' - ' "__package__" is now only used as a fallback if ' - '"__spec__.parent"\n' - ' is not set, and this fallback path is deprecated.\n' - '\n' - ' Changed in version 3.4: This attribute now defaults to "None" ' - 'for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor. Previously the attribute was optional.\n' - '\n' - ' Changed in version 3.6: The value of "__package__" is expected ' - 'to\n' - ' be the same as "__spec__.parent". "__package__" is now only used ' - 'as\n' - ' a fallback during import resolution if "__spec__.parent" is not\n' - ' defined.\n' - '\n' - ' Changed in version 3.10: "ImportWarning" is raised if an import\n' - ' resolution falls back to "__package__" instead of\n' - ' "__spec__.parent".\n' - '\n' - ' Changed in version 3.12: Raise "DeprecationWarning" instead of\n' - ' "ImportWarning" when falling back to "__package__" during ' - 'import\n' - ' resolution.\n' - '\n' - ' Deprecated since version 3.13, will be removed in version 3.15:\n' - ' "__package__" will cease to be set or taken into consideration ' - 'by\n' - ' the import system or standard library.\n' - '\n' - 'module.__loader__\n' - '\n' - ' The *loader* object that the import machinery used to load the\n' - ' module.\n' - '\n' - ' This attribute is mostly useful for introspection, but can be ' - 'used\n' - ' for additional loader-specific functionality, for example ' - 'getting\n' - ' data associated with a loader.\n' - '\n' - ' "__loader__" defaults to "None" for modules created dynamically\n' - ' using the "types.ModuleType" constructor; use\n' - ' "importlib.util.module_from_spec()" instead to ensure the ' - 'attribute\n' - ' is set to a *loader* object.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.loader" instead of "module.__loader__".\n' - '\n' - ' Changed in version 3.4: This attribute now defaults to "None" ' - 'for\n' - ' modules created dynamically using the "types.ModuleType"\n' - ' constructor. Previously the attribute was optional.\n' - '\n' - ' Deprecated since version 3.12, will be removed in version 3.16:\n' - ' Setting "__loader__" on a module while failing to set\n' - ' "__spec__.loader" is deprecated. In Python 3.16, "__loader__" ' - 'will\n' - ' cease to be set or taken into consideration by the import system ' - 'or\n' - ' the standard library.\n' - '\n' - 'module.__path__\n' - '\n' - ' A (possibly empty) *sequence* of strings enumerating the ' - 'locations\n' - ' where the package’s submodules will be found. Non-package ' - 'modules\n' - ' should not have a "__path__" attribute. See __path__ attributes ' - 'on\n' - ' modules for more details.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.submodule_search_locations" instead of\n' - ' "module.__path__".\n' - '\n' - 'module.__file__\n' - '\n' - 'module.__cached__\n' - '\n' - ' "__file__" and "__cached__" are both optional attributes that ' - 'may\n' - ' or may not be set. Both attributes should be a "str" when they ' - 'are\n' - ' available.\n' - '\n' - ' "__file__" indicates the pathname of the file from which the ' - 'module\n' - ' was loaded (if loaded from a file), or the pathname of the ' - 'shared\n' - ' library file for extension modules loaded dynamically from a ' - 'shared\n' - ' library. It might be missing for certain types of modules, such ' - 'as\n' - ' C modules that are statically linked into the interpreter, and ' - 'the\n' - ' import system may opt to leave it unset if it has no semantic\n' - ' meaning (for example, a module loaded from a database).\n' - '\n' - ' If "__file__" is set then the "__cached__" attribute might also ' - 'be\n' - ' set, which is the path to any compiled version of the code ' - '(for\n' - ' example, a byte-compiled file). The file does not need to exist ' - 'to\n' - ' set this attribute; the path can simply point to where the ' - 'compiled\n' - ' file *would* exist (see **PEP 3147**).\n' - '\n' - ' Note that "__cached__" may be set even if "__file__" is not ' - 'set.\n' - ' However, that scenario is quite atypical. Ultimately, the ' - '*loader*\n' - ' is what makes use of the module spec provided by the *finder* ' - '(from\n' - ' which "__file__" and "__cached__" are derived). So if a loader ' - 'can\n' - ' load from a cached module but otherwise does not load from a ' - 'file,\n' - ' that atypical scenario may be appropriate.\n' - '\n' - ' It is **strongly** recommended that you use\n' - ' "module.__spec__.cached" instead of "module.__cached__".\n' - '\n' - ' Deprecated since version 3.13, will be removed in version 3.15:\n' - ' Setting "__cached__" on a module while failing to set\n' - ' "__spec__.cached" is deprecated. In Python 3.15, "__cached__" ' - 'will\n' - ' cease to be set or taken into consideration by the import system ' - 'or\n' - ' standard library.\n' - '\n' - '\n' - 'Other writable attributes on module objects\n' - '-------------------------------------------\n' - '\n' - 'As well as the import-related attributes listed above, module ' - 'objects\n' - 'also have the following writable attributes:\n' - '\n' - 'module.__doc__\n' - '\n' - ' The module’s documentation string, or "None" if unavailable. ' - 'See\n' - ' also: "__doc__ attributes".\n' - '\n' - 'module.__annotations__\n' - '\n' - ' A dictionary containing *variable annotations* collected during\n' - ' module body execution. For best practices on working with\n' - ' "__annotations__", see "annotationlib".\n' - '\n' - ' Changed in version 3.14: Annotations are now lazily evaluated. ' - 'See\n' - ' **PEP 649**.\n' - '\n' - 'module.__annotate__\n' - '\n' - ' The *annotate function* for this module, or "None" if the ' - 'module\n' - ' has no annotations. See also: "__annotate__" attributes.\n' - '\n' - ' Added in version 3.14.\n' - '\n' - '\n' - 'Module dictionaries\n' - '-------------------\n' - '\n' - 'Module objects also have the following special read-only ' - 'attribute:\n' - '\n' - 'module.__dict__\n' - '\n' - ' The module’s namespace as a dictionary object. Uniquely among ' - 'the\n' - ' attributes listed here, "__dict__" cannot be accessed as a ' - 'global\n' - ' variable from within a module; it can only be accessed as an\n' - ' attribute on module objects.\n' - '\n' - ' **CPython implementation detail:** Because of the way CPython\n' - ' clears module dictionaries, the module dictionary will be ' - 'cleared\n' - ' when the module falls out of scope even if the dictionary still ' - 'has\n' - ' live references. To avoid this, copy the dictionary or keep ' - 'the\n' - ' module around while using its dictionary directly.\n' - '\n' - '\n' - 'Custom classes\n' - '==============\n' - '\n' - 'Custom class types are typically created by class definitions (see\n' - 'section Class definitions). A class has a namespace implemented by ' - 'a\n' - 'dictionary object. Class attribute references are translated to\n' - 'lookups in this dictionary, e.g., "C.x" is translated to\n' - '"C.__dict__["x"]" (although there are a number of hooks which ' - 'allow\n' - 'for other means of locating attributes). When the attribute name ' - 'is\n' - 'not found there, the attribute search continues in the base ' - 'classes.\n' - 'This search of the base classes uses the C3 method resolution ' - 'order\n' - 'which behaves correctly even in the presence of ‘diamond’ ' - 'inheritance\n' - 'structures where there are multiple inheritance paths leading back ' - 'to\n' - 'a common ancestor. Additional details on the C3 MRO used by Python ' - 'can\n' - 'be found at The Python 2.3 Method Resolution Order.\n' - '\n' - 'When a class attribute reference (for class "C", say) would yield ' - 'a\n' - 'class method object, it is transformed into an instance method ' - 'object\n' - 'whose "__self__" attribute is "C". When it would yield a\n' - '"staticmethod" object, it is transformed into the object wrapped ' - 'by\n' - 'the static method object. See section Implementing Descriptors for\n' - 'another way in which attributes retrieved from a class may differ ' - 'from\n' - 'those actually contained in its "__dict__".\n' - '\n' - 'Class attribute assignments update the class’s dictionary, never ' - 'the\n' - 'dictionary of a base class.\n' - '\n' - 'A class object can be called (see above) to yield a class instance\n' - '(see below).\n' - '\n' - '\n' - 'Special attributes\n' - '------------------\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| Attribute | ' - 'Meaning |\n' - '|====================================================|====================================================|\n' - '| type.__name__ | The class’s ' - 'name. See also: "__name__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__qualname__ | The class’s ' - '*qualified name*. See also: |\n' - '| | ' - '"__qualname__ attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__module__ | The name of ' - 'the module in which the class was |\n' - '| | ' - 'defined. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__dict__ | A "mapping ' - 'proxy" providing a read-only view of |\n' - '| | the class’s ' - 'namespace. See also: "__dict__ |\n' - '| | ' - 'attributes". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__bases__ | A "tuple" ' - 'containing the class’s bases. In most |\n' - '| | cases, for a ' - 'class defined as "class X(A, B, C)", |\n' - '| | ' - '"X.__bases__" will be exactly equal to "(A, B, |\n' - '| | ' - 'C)". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__doc__ | The class’s ' - 'documentation string, or "None" if |\n' - '| | undefined. ' - 'Not inherited by subclasses. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__annotations__ | A dictionary ' - 'containing *variable annotations* |\n' - '| | collected ' - 'during class body execution. See also: |\n' - '| | ' - '"__annotations__ attributes". For best practices |\n' - '| | on working ' - 'with "__annotations__", please see |\n' - '| | ' - '"annotationlib". Caution: Accessing the |\n' - '| | ' - '"__annotations__" attribute of a class object |\n' - '| | directly may ' - 'yield incorrect results in the |\n' - '| | presence of ' - 'metaclasses. In addition, the |\n' - '| | attribute ' - 'may not exist for some classes. Use |\n' - '| | ' - '"annotationlib.get_annotations()" to retrieve |\n' - '| | class ' - 'annotations safely. Changed in version |\n' - '| | 3.14: ' - 'Annotations are now lazily evaluated. See |\n' - '| | **PEP ' - '649**. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__annotate__() | The ' - '*annotate function* for this class, or "None" |\n' - '| | if the class ' - 'has no annotations. See also: |\n' - '| | ' - '"__annotate__ attributes". Caution: Accessing |\n' - '| | the ' - '"__annotate__" attribute of a class object |\n' - '| | directly may ' - 'yield incorrect results in the |\n' - '| | presence of ' - 'metaclasses. Use |\n' - '| | ' - '"annotationlib.get_annotate_function()" to |\n' - '| | retrieve the ' - 'annotate function safely. Added in |\n' - '| | version ' - '3.14. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__type_params__ | A "tuple" ' - 'containing the type parameters of a |\n' - '| | generic ' - 'class. Added in version 3.12. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__static_attributes__ | A "tuple" ' - 'containing names of attributes of this |\n' - '| | class which ' - 'are assigned through "self.X" from any |\n' - '| | function in ' - 'its body. Added in version 3.13. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__firstlineno__ | The line ' - 'number of the first line of the class |\n' - '| | definition, ' - 'including decorators. Setting the |\n' - '| | "__module__" ' - 'attribute removes the |\n' - '| | ' - '"__firstlineno__" item from the type’s dictionary. |\n' - '| | Added in ' - 'version 3.13. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| type.__mro__ | The "tuple" ' - 'of classes that are considered when |\n' - '| | looking for ' - 'base classes during method resolution. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special methods\n' - '---------------\n' - '\n' - 'In addition to the special attributes described above, all Python\n' - 'classes also have the following two methods available:\n' - '\n' - 'type.mro()\n' - '\n' - ' This method can be overridden by a metaclass to customize the\n' - ' method resolution order for its instances. It is called at ' - 'class\n' - ' instantiation, and its result is stored in "__mro__".\n' - '\n' - 'type.__subclasses__()\n' - '\n' - ' Each class keeps a list of weak references to its immediate\n' - ' subclasses. This method returns a list of all those references\n' - ' still alive. The list is in definition order. Example:\n' - '\n' - ' >>> class A: pass\n' - ' >>> class B(A): pass\n' - ' >>> A.__subclasses__()\n' - " []\n" - '\n' - '\n' - 'Class instances\n' - '===============\n' - '\n' - 'A class instance is created by calling a class object (see above). ' - 'A\n' - 'class instance has a namespace implemented as a dictionary which ' - 'is\n' - 'the first place in which attribute references are searched. When ' - 'an\n' - 'attribute is not found there, and the instance’s class has an\n' - 'attribute by that name, the search continues with the class\n' - 'attributes. If a class attribute is found that is a user-defined\n' - 'function object, it is transformed into an instance method object\n' - 'whose "__self__" attribute is the instance. Static method and ' - 'class\n' - 'method objects are also transformed; see above under “Classes”. ' - 'See\n' - 'section Implementing Descriptors for another way in which ' - 'attributes\n' - 'of a class retrieved via its instances may differ from the objects\n' - 'actually stored in the class’s "__dict__". If no class attribute ' - 'is\n' - 'found, and the object’s class has a "__getattr__()" method, that ' - 'is\n' - 'called to satisfy the lookup.\n' - '\n' - 'Attribute assignments and deletions update the instance’s ' - 'dictionary,\n' - 'never a class’s dictionary. If the class has a "__setattr__()" or\n' - '"__delattr__()" method, this is called instead of updating the\n' - 'instance dictionary directly.\n' - '\n' - 'Class instances can pretend to be numbers, sequences, or mappings ' - 'if\n' - 'they have methods with certain special names. See section Special\n' - 'method names.\n' - '\n' - '\n' - 'Special attributes\n' - '------------------\n' - '\n' - 'object.__class__\n' - '\n' - ' The class to which a class instance belongs.\n' - '\n' - 'object.__dict__\n' - '\n' - ' A dictionary or other mapping object used to store an object’s\n' - ' (writable) attributes. Not all instances have a "__dict__"\n' - ' attribute; see the section on __slots__ for more details.\n' - '\n' - '\n' - 'I/O objects (also known as file objects)\n' - '========================================\n' - '\n' - 'A *file object* represents an open file. Various shortcuts are\n' - 'available to create file objects: the "open()" built-in function, ' - 'and\n' - 'also "os.popen()", "os.fdopen()", and the "makefile()" method of\n' - 'socket objects (and perhaps by other functions or methods provided ' - 'by\n' - 'extension modules).\n' - '\n' - 'The objects "sys.stdin", "sys.stdout" and "sys.stderr" are ' - 'initialized\n' - 'to file objects corresponding to the interpreter’s standard input,\n' - 'output and error streams; they are all open in text mode and ' - 'therefore\n' - 'follow the interface defined by the "io.TextIOBase" abstract ' - 'class.\n' - '\n' - '\n' - 'Internal types\n' - '==============\n' - '\n' - 'A few types used internally by the interpreter are exposed to the\n' - 'user. Their definitions may change with future versions of the\n' - 'interpreter, but they are mentioned here for completeness.\n' - '\n' - '\n' - 'Code objects\n' - '------------\n' - '\n' - 'Code objects represent *byte-compiled* executable Python code, or\n' - '*bytecode*. The difference between a code object and a function ' - 'object\n' - 'is that the function object contains an explicit reference to the\n' - 'function’s globals (the module in which it was defined), while a ' - 'code\n' - 'object contains no context; also the default argument values are\n' - 'stored in the function object, not in the code object (because ' - 'they\n' - 'represent values calculated at run-time). Unlike function ' - 'objects,\n' - 'code objects are immutable and contain no references (directly or\n' - 'indirectly) to mutable objects.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_name | The function ' - 'name |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_qualname | The fully ' - 'qualified function name Added in |\n' - '| | version ' - '3.11. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_argcount | The total ' - 'number of positional *parameters* |\n' - '| | (including ' - 'positional-only parameters and |\n' - '| | parameters ' - 'with default values) that the function |\n' - '| | ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_posonlyargcount | The number ' - 'of positional-only *parameters* |\n' - '| | (including ' - 'arguments with default values) that the |\n' - '| | function ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_kwonlyargcount | The number ' - 'of keyword-only *parameters* (including |\n' - '| | arguments ' - 'with default values) that the function |\n' - '| | ' - 'has |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_nlocals | The number ' - 'of local variables used by the function |\n' - '| | (including ' - 'parameters) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_varnames | A "tuple" ' - 'containing the names of the local |\n' - '| | variables in ' - 'the function (starting with the |\n' - '| | parameter ' - 'names) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_cellvars | A "tuple" ' - 'containing the names of local variables |\n' - '| | that are ' - 'referenced from at least one *nested |\n' - '| | scope* ' - 'inside the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_freevars | A "tuple" ' - 'containing the names of *free (closure) |\n' - '| | variables* ' - 'that a *nested scope* references in an |\n' - '| | outer scope. ' - 'See also "function.__closure__". |\n' - '| | Note: ' - 'references to global and builtin names are |\n' - '| | *not* ' - 'included. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_code | A string ' - 'representing the sequence of *bytecode* |\n' - '| | instructions ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_consts | A "tuple" ' - 'containing the literals used by the |\n' - '| | *bytecode* ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_names | A "tuple" ' - 'containing the names used by the |\n' - '| | *bytecode* ' - 'in the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_filename | The name of ' - 'the file from which the code was |\n' - '| | ' - 'compiled |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_firstlineno | The line ' - 'number of the first line of the function |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_lnotab | A string ' - 'encoding the mapping from *bytecode* |\n' - '| | offsets to ' - 'line numbers. For details, see the |\n' - '| | source code ' - 'of the interpreter. Deprecated since |\n' - '| | version ' - '3.12: This attribute of code objects is |\n' - '| | deprecated, ' - 'and may be removed in Python 3.15. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_stacksize | The required ' - 'stack size of the code object |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| codeobject.co_flags | An "integer" ' - 'encoding a number of flags for the |\n' - '| | ' - 'interpreter. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'The following flag bits are defined for "co_flags": bit "0x04" is ' - 'set\n' - 'if the function uses the "*arguments" syntax to accept an ' - 'arbitrary\n' - 'number of positional arguments; bit "0x08" is set if the function ' - 'uses\n' - 'the "**keywords" syntax to accept arbitrary keyword arguments; bit\n' - '"0x20" is set if the function is a generator. See Code Objects Bit\n' - 'Flags for details on the semantics of each flags that might be\n' - 'present.\n' - '\n' - 'Future feature declarations ("from __future__ import division") ' - 'also\n' - 'use bits in "co_flags" to indicate whether a code object was ' - 'compiled\n' - 'with a particular feature enabled: bit "0x2000" is set if the ' - 'function\n' - 'was compiled with future division enabled; bits "0x10" and ' - '"0x1000"\n' - 'were used in earlier versions of Python.\n' - '\n' - 'Other bits in "co_flags" are reserved for internal use.\n' - '\n' - 'If a code object represents a function and has a docstring, the ' - 'first\n' - 'item in "co_consts" is the docstring of the function.\n' - '\n' - '\n' - 'Methods on code objects\n' - '~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'codeobject.co_positions()\n' - '\n' - ' Returns an iterable over the source code positions of each\n' - ' *bytecode* instruction in the code object.\n' - '\n' - ' The iterator returns "tuple"s containing the "(start_line,\n' - ' end_line, start_column, end_column)". The *i-th* tuple ' - 'corresponds\n' - ' to the position of the source code that compiled to the *i-th* ' - 'code\n' - ' unit. Column information is 0-indexed utf-8 byte offsets on the\n' - ' given source line.\n' - '\n' - ' This positional information can be missing. A non-exhaustive ' - 'lists\n' - ' of cases where this may happen:\n' - '\n' - ' * Running the interpreter with "-X" "no_debug_ranges".\n' - '\n' - ' * Loading a pyc file compiled while using "-X" ' - '"no_debug_ranges".\n' - '\n' - ' * Position tuples corresponding to artificial instructions.\n' - '\n' - ' * Line and column numbers that can’t be represented due to\n' - ' implementation specific limitations.\n' - '\n' - ' When this occurs, some or all of the tuple elements can be ' - '"None".\n' - '\n' - ' Added in version 3.11.\n' - '\n' - ' Note:\n' - '\n' - ' This feature requires storing column positions in code ' - 'objects\n' - ' which may result in a small increase of disk usage of ' - 'compiled\n' - ' Python files or interpreter memory usage. To avoid storing ' - 'the\n' - ' extra information and/or deactivate printing the extra ' - 'traceback\n' - ' information, the "-X" "no_debug_ranges" command line flag or ' - 'the\n' - ' "PYTHONNODEBUGRANGES" environment variable can be used.\n' - '\n' - 'codeobject.co_lines()\n' - '\n' - ' Returns an iterator that yields information about successive ' - 'ranges\n' - ' of *bytecode*s. Each item yielded is a "(start, end, lineno)"\n' - ' "tuple":\n' - '\n' - ' * "start" (an "int") represents the offset (inclusive) of the ' - 'start\n' - ' of the *bytecode* range\n' - '\n' - ' * "end" (an "int") represents the offset (exclusive) of the end ' - 'of\n' - ' the *bytecode* range\n' - '\n' - ' * "lineno" is an "int" representing the line number of the\n' - ' *bytecode* range, or "None" if the bytecodes in the given ' - 'range\n' - ' have no line number\n' - '\n' - ' The items yielded will have the following properties:\n' - '\n' - ' * The first range yielded will have a "start" of 0.\n' - '\n' - ' * The "(start, end)" ranges will be non-decreasing and ' - 'consecutive.\n' - ' That is, for any pair of "tuple"s, the "start" of the second ' - 'will\n' - ' be equal to the "end" of the first.\n' - '\n' - ' * No range will be backwards: "end >= start" for all triples.\n' - '\n' - ' * The last "tuple" yielded will have "end" equal to the size of ' - 'the\n' - ' *bytecode*.\n' - '\n' - ' Zero-width ranges, where "start == end", are allowed. ' - 'Zero-width\n' - ' ranges are used for lines that are present in the source code, ' - 'but\n' - ' have been eliminated by the *bytecode* compiler.\n' - '\n' - ' Added in version 3.10.\n' - '\n' - ' See also:\n' - '\n' - ' **PEP 626** - Precise line numbers for debugging and other ' - 'tools.\n' - ' The PEP that introduced the "co_lines()" method.\n' - '\n' - 'codeobject.replace(**kwargs)\n' - '\n' - ' Return a copy of the code object with new values for the ' - 'specified\n' - ' fields.\n' - '\n' - ' Code objects are also supported by the generic function\n' - ' "copy.replace()".\n' - '\n' - ' Added in version 3.8.\n' - '\n' - '\n' - 'Frame objects\n' - '-------------\n' - '\n' - 'Frame objects represent execution frames. They may occur in ' - 'traceback\n' - 'objects, and are also passed to registered trace functions.\n' - '\n' - '\n' - 'Special read-only attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_back | Points to ' - 'the previous stack frame (towards the |\n' - '| | caller), or ' - '"None" if this is the bottom stack |\n' - '| | ' - 'frame |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_code | The code ' - 'object being executed in this frame. |\n' - '| | Accessing ' - 'this attribute raises an auditing event |\n' - '| | ' - '"object.__getattr__" with arguments "obj" and |\n' - '| | ' - '""f_code"". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_locals | The mapping ' - 'used by the frame to look up local |\n' - '| | variables. ' - 'If the frame refers to an *optimized |\n' - '| | scope*, this ' - 'may return a write-through proxy |\n' - '| | object. ' - 'Changed in version 3.13: Return a proxy |\n' - '| | for ' - 'optimized scopes. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_globals | The ' - 'dictionary used by the frame to look up global |\n' - '| | ' - 'variables |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_builtins | The ' - 'dictionary used by the frame to look up built- |\n' - '| | in ' - '(intrinsic) names |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_lasti | The “precise ' - 'instruction” of the frame object |\n' - '| | (this is an ' - 'index into the *bytecode* string of |\n' - '| | the code ' - 'object) |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Special writable attributes\n' - '~~~~~~~~~~~~~~~~~~~~~~~~~~~\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace | If not ' - '"None", this is a function called for |\n' - '| | various ' - 'events during code execution (this is used |\n' - '| | by ' - 'debuggers). Normally an event is triggered for |\n' - '| | each new ' - 'source line (see "f_trace_lines"). |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace_lines | Set this ' - 'attribute to "False" to disable |\n' - '| | triggering a ' - 'tracing event for each source line. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_trace_opcodes | Set this ' - 'attribute to "True" to allow per-opcode |\n' - '| | events to be ' - 'requested. Note that this may lead to |\n' - '| | undefined ' - 'interpreter behaviour if exceptions |\n' - '| | raised by ' - 'the trace function escape to the |\n' - '| | function ' - 'being traced. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| frame.f_lineno | The current ' - 'line number of the frame – writing to |\n' - '| | this from ' - 'within a trace function jumps to the |\n' - '| | given line ' - '(only for the bottom-most frame). A |\n' - '| | debugger can ' - 'implement a Jump command (aka Set |\n' - '| | Next ' - 'Statement) by writing to this attribute. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - '\n' - 'Frame object methods\n' - '~~~~~~~~~~~~~~~~~~~~\n' - '\n' - 'Frame objects support one method:\n' - '\n' - 'frame.clear()\n' - '\n' - ' This method clears all references to local variables held by ' - 'the\n' - ' frame. Also, if the frame belonged to a *generator*, the ' - 'generator\n' - ' is finalized. This helps break reference cycles involving ' - 'frame\n' - ' objects (for example when catching an exception and storing its\n' - ' traceback for later use).\n' - '\n' - ' "RuntimeError" is raised if the frame is currently executing or\n' - ' suspended.\n' - '\n' - ' Added in version 3.4.\n' - '\n' - ' Changed in version 3.13: Attempting to clear a suspended frame\n' - ' raises "RuntimeError" (as has always been the case for ' - 'executing\n' - ' frames).\n' - '\n' - '\n' - 'Traceback objects\n' - '-----------------\n' - '\n' - 'Traceback objects represent the stack trace of an exception. A\n' - 'traceback object is implicitly created when an exception occurs, ' - 'and\n' - 'may also be explicitly created by calling "types.TracebackType".\n' - '\n' - 'Changed in version 3.7: Traceback objects can now be explicitly\n' - 'instantiated from Python code.\n' - '\n' - 'For implicitly created tracebacks, when the search for an ' - 'exception\n' - 'handler unwinds the execution stack, at each unwound level a ' - 'traceback\n' - 'object is inserted in front of the current traceback. When an\n' - 'exception handler is entered, the stack trace is made available to ' - 'the\n' - 'program. (See section The try statement.) It is accessible as the\n' - 'third item of the tuple returned by "sys.exc_info()", and as the\n' - '"__traceback__" attribute of the caught exception.\n' - '\n' - 'When the program contains no suitable handler, the stack trace is\n' - 'written (nicely formatted) to the standard error stream; if the\n' - 'interpreter is interactive, it is also made available to the user ' - 'as\n' - '"sys.last_traceback".\n' - '\n' - 'For explicitly created tracebacks, it is up to the creator of the\n' - 'traceback to determine how the "tb_next" attributes should be ' - 'linked\n' - 'to form a full stack trace.\n' - '\n' - 'Special read-only attributes:\n' - '\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_frame | Points to ' - 'the execution frame of the current |\n' - '| | level. ' - 'Accessing this attribute raises an |\n' - '| | auditing ' - 'event "object.__getattr__" with arguments |\n' - '| | "obj" and ' - '""tb_frame"". |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_lineno | Gives the ' - 'line number where the exception occurred |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '| traceback.tb_lasti | Indicates ' - 'the “precise instruction”. |\n' - '+----------------------------------------------------+----------------------------------------------------+\n' - '\n' - 'The line number and last instruction in the traceback may differ ' - 'from\n' - 'the line number of its frame object if the exception occurred in a\n' - '"try" statement with no matching except clause or with a "finally"\n' - 'clause.\n' - '\n' - 'traceback.tb_next\n' - '\n' - ' The special writable attribute "tb_next" is the next level in ' - 'the\n' - ' stack trace (towards the frame where the exception occurred), ' - 'or\n' - ' "None" if there is no next level.\n' - '\n' - ' Changed in version 3.7: This attribute is now writable\n' - '\n' - '\n' - 'Slice objects\n' - '-------------\n' - '\n' - 'Slice objects are used to represent slices for "__getitem__()"\n' - 'methods. They are also created by the built-in "slice()" ' - 'function.\n' - '\n' - 'Special read-only attributes: "start" is the lower bound; "stop" ' - 'is\n' - 'the upper bound; "step" is the step value; each is "None" if ' - 'omitted.\n' - 'These attributes can have any type.\n' - '\n' - 'Slice objects support one method:\n' - '\n' - 'slice.indices(self, length)\n' - '\n' - ' This method takes a single integer argument *length* and ' - 'computes\n' - ' information about the slice that the slice object would describe ' - 'if\n' - ' applied to a sequence of *length* items. It returns a tuple of\n' - ' three integers; respectively these are the *start* and *stop*\n' - ' indices and the *step* or stride length of the slice. Missing ' - 'or\n' - ' out-of-bounds indices are handled in a manner consistent with\n' - ' regular slices.\n' - '\n' - '\n' - 'Static method objects\n' - '---------------------\n' - '\n' - 'Static method objects provide a way of defeating the transformation ' - 'of\n' - 'function objects to method objects described above. A static ' - 'method\n' - 'object is a wrapper around any other object, usually a ' - 'user-defined\n' - 'method object. When a static method object is retrieved from a ' - 'class\n' - 'or a class instance, the object actually returned is the wrapped\n' - 'object, which is not subject to any further transformation. Static\n' - 'method objects are also callable. Static method objects are created ' - 'by\n' - 'the built-in "staticmethod()" constructor.\n' - '\n' - '\n' - 'Class method objects\n' - '--------------------\n' - '\n' - 'A class method object, like a static method object, is a wrapper\n' - 'around another object that alters the way in which that object is\n' - 'retrieved from classes and class instances. The behaviour of class\n' - 'method objects upon such retrieval is described above, under ' - '“instance\n' - 'methods”. Class method objects are created by the built-in\n' - '"classmethod()" constructor.\n', - 'typesfunctions': 'Functions\n' - '*********\n' - '\n' - 'Function objects are created by function definitions. The ' - 'only\n' - 'operation on a function object is to call it: ' - '"func(argument-list)".\n' - '\n' - 'There are really two flavors of function objects: built-in ' - 'functions\n' - 'and user-defined functions. Both support the same ' - 'operation (to call\n' - 'the function), but the implementation is different, hence ' - 'the\n' - 'different object types.\n' - '\n' - 'See Function definitions for more information.\n', - 'typesmapping': 'Mapping Types — "dict"\n' - '**********************\n' - '\n' - 'A *mapping* object maps *hashable* values to arbitrary ' - 'objects.\n' - 'Mappings are mutable objects. There is currently only one ' - 'standard\n' - 'mapping type, the *dictionary*. (For other containers see ' - 'the built-\n' - 'in "list", "set", and "tuple" classes, and the "collections" ' - 'module.)\n' - '\n' - 'A dictionary’s keys are *almost* arbitrary values. Values ' - 'that are\n' - 'not *hashable*, that is, values containing lists, ' - 'dictionaries or\n' - 'other mutable types (that are compared by value rather than ' - 'by object\n' - 'identity) may not be used as keys. Values that compare equal ' - '(such as\n' - '"1", "1.0", and "True") can be used interchangeably to index ' - 'the same\n' - 'dictionary entry.\n' - '\n' - 'class dict(**kwargs)\n' - 'class dict(mapping, **kwargs)\n' - 'class dict(iterable, **kwargs)\n' - '\n' - ' Return a new dictionary initialized from an optional ' - 'positional\n' - ' argument and a possibly empty set of keyword arguments.\n' - '\n' - ' Dictionaries can be created by several means:\n' - '\n' - ' * Use a comma-separated list of "key: value" pairs within ' - 'braces:\n' - ' "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: ' - "'jack', 4127:\n" - ' \'sjoerd\'}"\n' - '\n' - ' * Use a dict comprehension: "{}", "{x: x ** 2 for x in ' - 'range(10)}"\n' - '\n' - ' * Use the type constructor: "dict()", "dict([(\'foo\', ' - "100), ('bar',\n" - ' 200)])", "dict(foo=100, bar=200)"\n' - '\n' - ' If no positional argument is given, an empty dictionary ' - 'is created.\n' - ' If a positional argument is given and it defines a ' - '"keys()" method,\n' - ' a dictionary is created by calling "__getitem__()" on the ' - 'argument\n' - ' with each returned key from the method. Otherwise, the ' - 'positional\n' - ' argument must be an *iterable* object. Each item in the ' - 'iterable\n' - ' must itself be an iterable with exactly two elements. ' - 'The first\n' - ' element of each item becomes a key in the new dictionary, ' - 'and the\n' - ' second element the corresponding value. If a key occurs ' - 'more than\n' - ' once, the last value for that key becomes the ' - 'corresponding value\n' - ' in the new dictionary.\n' - '\n' - ' If keyword arguments are given, the keyword arguments and ' - 'their\n' - ' values are added to the dictionary created from the ' - 'positional\n' - ' argument. If a key being added is already present, the ' - 'value from\n' - ' the keyword argument replaces the value from the ' - 'positional\n' - ' argument.\n' - '\n' - ' To illustrate, the following examples all return a ' - 'dictionary equal\n' - ' to "{"one": 1, "two": 2, "three": 3}":\n' - '\n' - ' >>> a = dict(one=1, two=2, three=3)\n' - " >>> b = {'one': 1, 'two': 2, 'three': 3}\n" - " >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\n" - " >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\n" - " >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n" - " >>> f = dict({'one': 1, 'three': 3}, two=2)\n" - ' >>> a == b == c == d == e == f\n' - ' True\n' - '\n' - ' Providing keyword arguments as in the first example only ' - 'works for\n' - ' keys that are valid Python identifiers. Otherwise, any ' - 'valid keys\n' - ' can be used.\n' - '\n' - ' These are the operations that dictionaries support (and ' - 'therefore,\n' - ' custom mapping types should support too):\n' - '\n' - ' list(d)\n' - '\n' - ' Return a list of all the keys used in the dictionary ' - '*d*.\n' - '\n' - ' len(d)\n' - '\n' - ' Return the number of items in the dictionary *d*.\n' - '\n' - ' d[key]\n' - '\n' - ' Return the item of *d* with key *key*. Raises a ' - '"KeyError" if\n' - ' *key* is not in the map.\n' - '\n' - ' If a subclass of dict defines a method "__missing__()" ' - 'and *key*\n' - ' is not present, the "d[key]" operation calls that ' - 'method with\n' - ' the key *key* as argument. The "d[key]" operation ' - 'then returns\n' - ' or raises whatever is returned or raised by the\n' - ' "__missing__(key)" call. No other operations or ' - 'methods invoke\n' - ' "__missing__()". If "__missing__()" is not defined, ' - '"KeyError"\n' - ' is raised. "__missing__()" must be a method; it cannot ' - 'be an\n' - ' instance variable:\n' - '\n' - ' >>> class Counter(dict):\n' - ' ... def __missing__(self, key):\n' - ' ... return 0\n' - ' ...\n' - ' >>> c = Counter()\n' - " >>> c['red']\n" - ' 0\n' - " >>> c['red'] += 1\n" - " >>> c['red']\n" - ' 1\n' - '\n' - ' The example above shows part of the implementation of\n' - ' "collections.Counter". A different "__missing__" ' - 'method is used\n' - ' by "collections.defaultdict".\n' - '\n' - ' d[key] = value\n' - '\n' - ' Set "d[key]" to *value*.\n' - '\n' - ' del d[key]\n' - '\n' - ' Remove "d[key]" from *d*. Raises a "KeyError" if ' - '*key* is not\n' - ' in the map.\n' - '\n' - ' key in d\n' - '\n' - ' Return "True" if *d* has a key *key*, else "False".\n' - '\n' - ' key not in d\n' - '\n' - ' Equivalent to "not key in d".\n' - '\n' - ' iter(d)\n' - '\n' - ' Return an iterator over the keys of the dictionary. ' - 'This is a\n' - ' shortcut for "iter(d.keys())".\n' - '\n' - ' clear()\n' - '\n' - ' Remove all items from the dictionary.\n' - '\n' - ' copy()\n' - '\n' - ' Return a shallow copy of the dictionary.\n' - '\n' - ' classmethod fromkeys(iterable, value=None, /)\n' - '\n' - ' Create a new dictionary with keys from *iterable* and ' - 'values set\n' - ' to *value*.\n' - '\n' - ' "fromkeys()" is a class method that returns a new ' - 'dictionary.\n' - ' *value* defaults to "None". All of the values refer ' - 'to just a\n' - ' single instance, so it generally doesn’t make sense ' - 'for *value*\n' - ' to be a mutable object such as an empty list. To get ' - 'distinct\n' - ' values, use a dict comprehension instead.\n' - '\n' - ' get(key, default=None)\n' - '\n' - ' Return the value for *key* if *key* is in the ' - 'dictionary, else\n' - ' *default*. If *default* is not given, it defaults to ' - '"None", so\n' - ' that this method never raises a "KeyError".\n' - '\n' - ' items()\n' - '\n' - ' Return a new view of the dictionary’s items ("(key, ' - 'value)"\n' - ' pairs). See the documentation of view objects.\n' - '\n' - ' keys()\n' - '\n' - ' Return a new view of the dictionary’s keys. See the\n' - ' documentation of view objects.\n' - '\n' - ' pop(key[, default])\n' - '\n' - ' If *key* is in the dictionary, remove it and return ' - 'its value,\n' - ' else return *default*. If *default* is not given and ' - '*key* is\n' - ' not in the dictionary, a "KeyError" is raised.\n' - '\n' - ' popitem()\n' - '\n' - ' Remove and return a "(key, value)" pair from the ' - 'dictionary.\n' - ' Pairs are returned in LIFO (last-in, first-out) ' - 'order.\n' - '\n' - ' "popitem()" is useful to destructively iterate over a\n' - ' dictionary, as often used in set algorithms. If the ' - 'dictionary\n' - ' is empty, calling "popitem()" raises a "KeyError".\n' - '\n' - ' Changed in version 3.7: LIFO order is now guaranteed. ' - 'In prior\n' - ' versions, "popitem()" would return an arbitrary ' - 'key/value pair.\n' - '\n' - ' reversed(d)\n' - '\n' - ' Return a reverse iterator over the keys of the ' - 'dictionary. This\n' - ' is a shortcut for "reversed(d.keys())".\n' - '\n' - ' Added in version 3.8.\n' - '\n' - ' setdefault(key, default=None)\n' - '\n' - ' If *key* is in the dictionary, return its value. If ' - 'not, insert\n' - ' *key* with a value of *default* and return *default*. ' - '*default*\n' - ' defaults to "None".\n' - '\n' - ' update([other])\n' - '\n' - ' Update the dictionary with the key/value pairs from ' - '*other*,\n' - ' overwriting existing keys. Return "None".\n' - '\n' - ' "update()" accepts either another object with a ' - '"keys()" method\n' - ' (in which case "__getitem__()" is called with every ' - 'key returned\n' - ' from the method) or an iterable of key/value pairs (as ' - 'tuples or\n' - ' other iterables of length two). If keyword arguments ' - 'are\n' - ' specified, the dictionary is then updated with those ' - 'key/value\n' - ' pairs: "d.update(red=1, blue=2)".\n' - '\n' - ' values()\n' - '\n' - ' Return a new view of the dictionary’s values. See ' - 'the\n' - ' documentation of view objects.\n' - '\n' - ' An equality comparison between one "dict.values()" ' - 'view and\n' - ' another will always return "False". This also applies ' - 'when\n' - ' comparing "dict.values()" to itself:\n' - '\n' - " >>> d = {'a': 1}\n" - ' >>> d.values() == d.values()\n' - ' False\n' - '\n' - ' d | other\n' - '\n' - ' Create a new dictionary with the merged keys and ' - 'values of *d*\n' - ' and *other*, which must both be dictionaries. The ' - 'values of\n' - ' *other* take priority when *d* and *other* share ' - 'keys.\n' - '\n' - ' Added in version 3.9.\n' - '\n' - ' d |= other\n' - '\n' - ' Update the dictionary *d* with keys and values from ' - '*other*,\n' - ' which may be either a *mapping* or an *iterable* of ' - 'key/value\n' - ' pairs. The values of *other* take priority when *d* ' - 'and *other*\n' - ' share keys.\n' - '\n' - ' Added in version 3.9.\n' - '\n' - ' Dictionaries compare equal if and only if they have the ' - 'same "(key,\n' - ' value)" pairs (regardless of ordering). Order comparisons ' - '(‘<’,\n' - ' ‘<=’, ‘>=’, ‘>’) raise "TypeError".\n' - '\n' - ' Dictionaries preserve insertion order. Note that ' - 'updating a key\n' - ' does not affect the order. Keys added after deletion are ' - 'inserted\n' - ' at the end.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(d)\n' - " ['one', 'two', 'three', 'four']\n" - ' >>> list(d.values())\n' - ' [1, 2, 3, 4]\n' - ' >>> d["one"] = 42\n' - ' >>> d\n' - " {'one': 42, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> del d["two"]\n' - ' >>> d["two"] = None\n' - ' >>> d\n' - " {'one': 42, 'three': 3, 'four': 4, 'two': None}\n" - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order. This behavior was an implementation ' - 'detail of\n' - ' CPython from 3.6.\n' - '\n' - ' Dictionaries and dictionary views are reversible.\n' - '\n' - ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n' - ' >>> d\n' - " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n" - ' >>> list(reversed(d))\n' - " ['four', 'three', 'two', 'one']\n" - ' >>> list(reversed(d.values()))\n' - ' [4, 3, 2, 1]\n' - ' >>> list(reversed(d.items()))\n' - " [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\n" - '\n' - ' Changed in version 3.8: Dictionaries are now reversible.\n' - '\n' - 'See also:\n' - '\n' - ' "types.MappingProxyType" can be used to create a read-only ' - 'view of a\n' - ' "dict".\n' - '\n' - '\n' - 'Dictionary view objects\n' - '=======================\n' - '\n' - 'The objects returned by "dict.keys()", "dict.values()" and\n' - '"dict.items()" are *view objects*. They provide a dynamic ' - 'view on the\n' - 'dictionary’s entries, which means that when the dictionary ' - 'changes,\n' - 'the view reflects these changes.\n' - '\n' - 'Dictionary views can be iterated over to yield their ' - 'respective data,\n' - 'and support membership tests:\n' - '\n' - 'len(dictview)\n' - '\n' - ' Return the number of entries in the dictionary.\n' - '\n' - 'iter(dictview)\n' - '\n' - ' Return an iterator over the keys, values or items ' - '(represented as\n' - ' tuples of "(key, value)") in the dictionary.\n' - '\n' - ' Keys and values are iterated over in insertion order. ' - 'This allows\n' - ' the creation of "(value, key)" pairs using "zip()": ' - '"pairs =\n' - ' zip(d.values(), d.keys())". Another way to create the ' - 'same list is\n' - ' "pairs = [(v, k) for (k, v) in d.items()]".\n' - '\n' - ' Iterating views while adding or deleting entries in the ' - 'dictionary\n' - ' may raise a "RuntimeError" or fail to iterate over all ' - 'entries.\n' - '\n' - ' Changed in version 3.7: Dictionary order is guaranteed to ' - 'be\n' - ' insertion order.\n' - '\n' - 'x in dictview\n' - '\n' - ' Return "True" if *x* is in the underlying dictionary’s ' - 'keys, values\n' - ' or items (in the latter case, *x* should be a "(key, ' - 'value)"\n' - ' tuple).\n' - '\n' - 'reversed(dictview)\n' - '\n' - ' Return a reverse iterator over the keys, values or items ' - 'of the\n' - ' dictionary. The view will be iterated in reverse order of ' - 'the\n' - ' insertion.\n' - '\n' - ' Changed in version 3.8: Dictionary views are now ' - 'reversible.\n' - '\n' - 'dictview.mapping\n' - '\n' - ' Return a "types.MappingProxyType" that wraps the ' - 'original\n' - ' dictionary to which the view refers.\n' - '\n' - ' Added in version 3.10.\n' - '\n' - 'Keys views are set-like since their entries are unique and ' - '*hashable*.\n' - 'Items views also have set-like operations since the (key, ' - 'value) pairs\n' - 'are unique and the keys are hashable. If all values in an ' - 'items view\n' - 'are hashable as well, then the items view can interoperate ' - 'with other\n' - 'sets. (Values views are not treated as set-like since the ' - 'entries are\n' - 'generally not unique.) For set-like views, all of the ' - 'operations\n' - 'defined for the abstract base class "collections.abc.Set" ' - 'are\n' - 'available (for example, "==", "<", or "^"). While using ' - 'set\n' - 'operators, set-like views accept any iterable as the other ' - 'operand,\n' - 'unlike sets which only accept sets as the input.\n' - '\n' - 'An example of dictionary view usage:\n' - '\n' - " >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, " - "'spam': 500}\n" - ' >>> keys = dishes.keys()\n' - ' >>> values = dishes.values()\n' - '\n' - ' >>> # iteration\n' - ' >>> n = 0\n' - ' >>> for val in values:\n' - ' ... n += val\n' - ' ...\n' - ' >>> print(n)\n' - ' 504\n' - '\n' - ' >>> # keys and values are iterated over in the same order ' - '(insertion order)\n' - ' >>> list(keys)\n' - " ['eggs', 'sausage', 'bacon', 'spam']\n" - ' >>> list(values)\n' - ' [2, 1, 1, 500]\n' - '\n' - ' >>> # view objects are dynamic and reflect dict changes\n' - " >>> del dishes['eggs']\n" - " >>> del dishes['sausage']\n" - ' >>> list(keys)\n' - " ['bacon', 'spam']\n" - '\n' - ' >>> # set operations\n' - " >>> keys & {'eggs', 'bacon', 'salad'}\n" - " {'bacon'}\n" - " >>> keys ^ {'sausage', 'juice'} == {'juice', 'sausage', " - "'bacon', 'spam'}\n" - ' True\n' - " >>> keys | ['juice', 'juice', 'juice'] == {'bacon', " - "'spam', 'juice'}\n" - ' True\n' - '\n' - ' >>> # get back a read-only proxy for the original ' - 'dictionary\n' - ' >>> values.mapping\n' - " mappingproxy({'bacon': 1, 'spam': 500})\n" - " >>> values.mapping['spam']\n" - ' 500\n', - 'typesmethods': 'Methods\n' - '*******\n' - '\n' - 'Methods are functions that are called using the attribute ' - 'notation.\n' - 'There are two flavors: built-in methods (such as "append()" ' - 'on lists)\n' - 'and class instance method. Built-in methods are described ' - 'with the\n' - 'types that support them.\n' - '\n' - 'If you access a method (a function defined in a class ' - 'namespace)\n' - 'through an instance, you get a special object: a *bound ' - 'method* (also\n' - 'called instance method) object. When called, it will add the ' - '"self"\n' - 'argument to the argument list. Bound methods have two ' - 'special read-\n' - 'only attributes: "m.__self__" is the object on which the ' - 'method\n' - 'operates, and "m.__func__" is the function implementing the ' - 'method.\n' - 'Calling "m(arg-1, arg-2, ..., arg-n)" is completely ' - 'equivalent to\n' - 'calling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)".\n' - '\n' - 'Like function objects, bound method objects support getting ' - 'arbitrary\n' - 'attributes. However, since method attributes are actually ' - 'stored on\n' - 'the underlying function object ("method.__func__"), setting ' - 'method\n' - 'attributes on bound methods is disallowed. Attempting to ' - 'set an\n' - 'attribute on a method results in an "AttributeError" being ' - 'raised. In\n' - 'order to set a method attribute, you need to explicitly set ' - 'it on the\n' - 'underlying function object:\n' - '\n' - ' >>> class C:\n' - ' ... def method(self):\n' - ' ... pass\n' - ' ...\n' - ' >>> c = C()\n' - " >>> c.method.whoami = 'my name is method' # can't set on " - 'the method\n' - ' Traceback (most recent call last):\n' - ' File "", line 1, in \n' - " AttributeError: 'method' object has no attribute " - "'whoami'\n" - " >>> c.method.__func__.whoami = 'my name is method'\n" - ' >>> c.method.whoami\n' - " 'my name is method'\n" - '\n' - 'See Instance methods for more information.\n', - 'typesmodules': 'Modules\n' - '*******\n' - '\n' - 'The only special operation on a module is attribute access: ' - '"m.name",\n' - 'where *m* is a module and *name* accesses a name defined in ' - '*m*’s\n' - 'symbol table. Module attributes can be assigned to. (Note ' - 'that the\n' - '"import" statement is not, strictly speaking, an operation ' - 'on a module\n' - 'object; "import foo" does not require a module object named ' - '*foo* to\n' - 'exist, rather it requires an (external) *definition* for a ' - 'module\n' - 'named *foo* somewhere.)\n' - '\n' - 'A special attribute of every module is "__dict__". This is ' - 'the\n' - 'dictionary containing the module’s symbol table. Modifying ' - 'this\n' - 'dictionary will actually change the module’s symbol table, ' - 'but direct\n' - 'assignment to the "__dict__" attribute is not possible (you ' - 'can write\n' - '"m.__dict__[\'a\'] = 1", which defines "m.a" to be "1", but ' - 'you can’t\n' - 'write "m.__dict__ = {}"). Modifying "__dict__" directly is ' - 'not\n' - 'recommended.\n' - '\n' - 'Modules built into the interpreter are written like this: ' - '"". If loaded from a file, they are ' - 'written as\n' - '"".\n', - 'typesseq': 'Sequence Types — "list", "tuple", "range"\n' - '*****************************************\n' - '\n' - 'There are three basic sequence types: lists, tuples, and range\n' - 'objects. Additional sequence types tailored for processing of ' - 'binary\n' - 'data and text strings are described in dedicated sections.\n' - '\n' - '\n' - 'Common Sequence Operations\n' - '==========================\n' - '\n' - 'The operations in the following table are supported by most ' - 'sequence\n' - 'types, both mutable and immutable. The ' - '"collections.abc.Sequence" ABC\n' - 'is provided to make it easier to correctly implement these ' - 'operations\n' - 'on custom sequence types.\n' - '\n' - 'This table lists the sequence operations sorted in ascending ' - 'priority.\n' - 'In the table, *s* and *t* are sequences of the same type, *n*, ' - '*i*,\n' - '*j* and *k* are integers and *x* is an arbitrary object that ' - 'meets any\n' - 'type and value restrictions imposed by *s*.\n' - '\n' - 'The "in" and "not in" operations have the same priorities as ' - 'the\n' - 'comparison operations. The "+" (concatenation) and "*" ' - '(repetition)\n' - 'operations have the same priority as the corresponding numeric\n' - 'operations. [3]\n' - '\n' - '+----------------------------+----------------------------------+------------+\n' - '| Operation | Result ' - '| Notes |\n' - '|============================|==================================|============|\n' - '| "x in s" | "True" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "False" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "x not in s" | "False" if an item of *s* is ' - '| (1) |\n' - '| | equal to *x*, else "True" ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s + t" | the concatenation of *s* and *t* ' - '| (6)(7) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s * n" or "n * s" | equivalent to adding *s* to ' - '| (2)(7) |\n' - '| | itself *n* times ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i]" | *i*th item of *s*, origin 0 ' - '| (3) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j]" | slice of *s* from *i* to *j* ' - '| (3)(4) |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s[i:j:k]" | slice of *s* from *i* to *j* ' - '| (3)(5) |\n' - '| | with step *k* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "len(s)" | length of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "min(s)" | smallest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "max(s)" | largest item of *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.index(x[, i[, j]])" | index of the first occurrence of ' - '| (8) |\n' - '| | *x* in *s* (at or after index ' - '| |\n' - '| | *i* and before index *j*) ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '| "s.count(x)" | total number of occurrences of ' - '| |\n' - '| | *x* in *s* ' - '| |\n' - '+----------------------------+----------------------------------+------------+\n' - '\n' - 'Sequences of the same type also support comparisons. In ' - 'particular,\n' - 'tuples and lists are compared lexicographically by comparing\n' - 'corresponding elements. This means that to compare equal, every\n' - 'element must compare equal and the two sequences must be of the ' - 'same\n' - 'type and have the same length. (For full details see ' - 'Comparisons in\n' - 'the language reference.)\n' - '\n' - 'Forward and reversed iterators over mutable sequences access ' - 'values\n' - 'using an index. That index will continue to march forward (or\n' - 'backward) even if the underlying sequence is mutated. The ' - 'iterator\n' - 'terminates only when an "IndexError" or a "StopIteration" is\n' - 'encountered (or when the index drops below zero).\n' - '\n' - 'Notes:\n' - '\n' - '1. While the "in" and "not in" operations are used only for ' - 'simple\n' - ' containment testing in the general case, some specialised ' - 'sequences\n' - ' (such as "str", "bytes" and "bytearray") also use them for\n' - ' subsequence testing:\n' - '\n' - ' >>> "gg" in "eggs"\n' - ' True\n' - '\n' - '2. Values of *n* less than "0" are treated as "0" (which yields ' - 'an\n' - ' empty sequence of the same type as *s*). Note that items in ' - 'the\n' - ' sequence *s* are not copied; they are referenced multiple ' - 'times.\n' - ' This often haunts new Python programmers; consider:\n' - '\n' - ' >>> lists = [[]] * 3\n' - ' >>> lists\n' - ' [[], [], []]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists\n' - ' [[3], [3], [3]]\n' - '\n' - ' What has happened is that "[[]]" is a one-element list ' - 'containing\n' - ' an empty list, so all three elements of "[[]] * 3" are ' - 'references\n' - ' to this single empty list. Modifying any of the elements of\n' - ' "lists" modifies this single list. You can create a list of\n' - ' different lists this way:\n' - '\n' - ' >>> lists = [[] for i in range(3)]\n' - ' >>> lists[0].append(3)\n' - ' >>> lists[1].append(5)\n' - ' >>> lists[2].append(7)\n' - ' >>> lists\n' - ' [[3], [5], [7]]\n' - '\n' - ' Further explanation is available in the FAQ entry How do I ' - 'create a\n' - ' multidimensional list?.\n' - '\n' - '3. If *i* or *j* is negative, the index is relative to the end ' - 'of\n' - ' sequence *s*: "len(s) + i" or "len(s) + j" is substituted. ' - 'But\n' - ' note that "-0" is still "0".\n' - '\n' - '4. The slice of *s* from *i* to *j* is defined as the sequence ' - 'of\n' - ' items with index *k* such that "i <= k < j". If *i* or *j* ' - 'is\n' - ' greater than "len(s)", use "len(s)". If *i* is omitted or ' - '"None",\n' - ' use "0". If *j* is omitted or "None", use "len(s)". If *i* ' - 'is\n' - ' greater than or equal to *j*, the slice is empty.\n' - '\n' - '5. The slice of *s* from *i* to *j* with step *k* is defined as ' - 'the\n' - ' sequence of items with index "x = i + n*k" such that "0 <= n ' - '<\n' - ' (j-i)/k". In other words, the indices are "i", "i+k", ' - '"i+2*k",\n' - ' "i+3*k" and so on, stopping when *j* is reached (but never\n' - ' including *j*). When *k* is positive, *i* and *j* are ' - 'reduced to\n' - ' "len(s)" if they are greater. When *k* is negative, *i* and ' - '*j* are\n' - ' reduced to "len(s) - 1" if they are greater. If *i* or *j* ' - 'are\n' - ' omitted or "None", they become “end” values (which end ' - 'depends on\n' - ' the sign of *k*). Note, *k* cannot be zero. If *k* is ' - '"None", it\n' - ' is treated like "1".\n' - '\n' - '6. Concatenating immutable sequences always results in a new ' - 'object.\n' - ' This means that building up a sequence by repeated ' - 'concatenation\n' - ' will have a quadratic runtime cost in the total sequence ' - 'length.\n' - ' To get a linear runtime cost, you must switch to one of the\n' - ' alternatives below:\n' - '\n' - ' * if concatenating "str" objects, you can build a list and ' - 'use\n' - ' "str.join()" at the end or else write to an "io.StringIO"\n' - ' instance and retrieve its value when complete\n' - '\n' - ' * if concatenating "bytes" objects, you can similarly use\n' - ' "bytes.join()" or "io.BytesIO", or you can do in-place\n' - ' concatenation with a "bytearray" object. "bytearray" ' - 'objects are\n' - ' mutable and have an efficient overallocation mechanism\n' - '\n' - ' * if concatenating "tuple" objects, extend a "list" instead\n' - '\n' - ' * for other types, investigate the relevant class ' - 'documentation\n' - '\n' - '7. Some sequence types (such as "range") only support item ' - 'sequences\n' - ' that follow specific patterns, and hence don’t support ' - 'sequence\n' - ' concatenation or repetition.\n' - '\n' - '8. "index" raises "ValueError" when *x* is not found in *s*. Not ' - 'all\n' - ' implementations support passing the additional arguments *i* ' - 'and\n' - ' *j*. These arguments allow efficient searching of subsections ' - 'of\n' - ' the sequence. Passing the extra arguments is roughly ' - 'equivalent to\n' - ' using "s[i:j].index(x)", only without copying any data and ' - 'with the\n' - ' returned index being relative to the start of the sequence ' - 'rather\n' - ' than the start of the slice.\n' - '\n' - '\n' - 'Immutable Sequence Types\n' - '========================\n' - '\n' - 'The only operation that immutable sequence types generally ' - 'implement\n' - 'that is not also implemented by mutable sequence types is ' - 'support for\n' - 'the "hash()" built-in.\n' - '\n' - 'This support allows immutable sequences, such as "tuple" ' - 'instances, to\n' - 'be used as "dict" keys and stored in "set" and "frozenset" ' - 'instances.\n' - '\n' - 'Attempting to hash an immutable sequence that contains ' - 'unhashable\n' - 'values will result in "TypeError".\n' - '\n' - '\n' - 'Mutable Sequence Types\n' - '======================\n' - '\n' - 'The operations in the following table are defined on mutable ' - 'sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is provided to ' - 'make\n' - 'it easier to correctly implement these operations on custom ' - 'sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence type, *t* ' - 'is any\n' - 'iterable object and *x* is an arbitrary object that meets any ' - 'type and\n' - 'value restrictions imposed by *s* (for example, "bytearray" ' - 'only\n' - 'accepts integers that meet the value restriction "0 <= x <= ' - '255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes |\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is replaced ' - 'by | |\n' - '| | ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* to *j* ' - 'is | |\n' - '| | replaced by the contents of ' - 'the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of "s[i:j:k]" ' - 'are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the end of ' - 'the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items from *s* ' - '(same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow copy of ' - '*s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the contents ' - 'of | |\n' - '| | *t* (for the most part the ' - 'same | |\n' - '| | as "s[len(s):len(s)] = ' - 't") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* at ' - 'the | |\n' - '| | index given by *i* (same ' - 'as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* ' - 'and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | removes the first item from ' - '*s* | (3) |\n' - '| | where "s[i]" is equal to ' - '*x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of *s* ' - 'in | (4) |\n' - '| | ' - 'place | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. If *k* is not equal to "1", *t* must have the same length as ' - 'the\n' - ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that by ' - 'default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place for ' - 'economy\n' - ' of space when reversing a large sequence. To remind users ' - 'that it\n' - ' operates by side effect, it does not return the reversed ' - 'sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency with the\n' - ' interfaces of mutable containers that don’t support slicing\n' - ' operations (such as "dict" and "set"). "copy()" is not part ' - 'of the\n' - ' "collections.abc.MutableSequence" ABC, but most concrete ' - 'mutable\n' - ' sequence classes provide it.\n' - '\n' - ' Added in version 3.3: "clear()" and "copy()" methods.\n' - '\n' - '6. The value *n* is an integer, or an object implementing\n' - ' "__index__()". Zero and negative values of *n* clear the ' - 'sequence.\n' - ' Items in the sequence are not copied; they are referenced ' - 'multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n' - '\n' - '\n' - 'Lists\n' - '=====\n' - '\n' - 'Lists are mutable sequences, typically used to store collections ' - 'of\n' - 'homogeneous items (where the precise degree of similarity will ' - 'vary by\n' - 'application).\n' - '\n' - 'class list([iterable])\n' - '\n' - ' Lists may be constructed in several ways:\n' - '\n' - ' * Using a pair of square brackets to denote the empty list: ' - '"[]"\n' - '\n' - ' * Using square brackets, separating items with commas: "[a]", ' - '"[a,\n' - ' b, c]"\n' - '\n' - ' * Using a list comprehension: "[x for x in iterable]"\n' - '\n' - ' * Using the type constructor: "list()" or "list(iterable)"\n' - '\n' - ' The constructor builds a list whose items are the same and in ' - 'the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a list, a copy is made and\n' - ' returned, similar to "iterable[:]". For example, ' - '"list(\'abc\')"\n' - ' returns "[\'a\', \'b\', \'c\']" and "list( (1, 2, 3) )" ' - 'returns "[1, 2,\n' - ' 3]". If no argument is given, the constructor creates a new ' - 'empty\n' - ' list, "[]".\n' - '\n' - ' Many other operations also produce lists, including the ' - '"sorted()"\n' - ' built-in.\n' - '\n' - ' Lists implement all of the common and mutable sequence ' - 'operations.\n' - ' Lists also provide the following additional method:\n' - '\n' - ' sort(*, key=None, reverse=False)\n' - '\n' - ' This method sorts the list in place, using only "<" ' - 'comparisons\n' - ' between items. Exceptions are not suppressed - if any ' - 'comparison\n' - ' operations fail, the entire sort operation will fail (and ' - 'the\n' - ' list will likely be left in a partially modified state).\n' - '\n' - ' "sort()" accepts two arguments that can only be passed by\n' - ' keyword (keyword-only arguments):\n' - '\n' - ' *key* specifies a function of one argument that is used ' - 'to\n' - ' extract a comparison key from each list element (for ' - 'example,\n' - ' "key=str.lower"). The key corresponding to each item in ' - 'the list\n' - ' is calculated once and then used for the entire sorting ' - 'process.\n' - ' The default value of "None" means that list items are ' - 'sorted\n' - ' directly without calculating a separate key value.\n' - '\n' - ' The "functools.cmp_to_key()" utility is available to ' - 'convert a\n' - ' 2.x style *cmp* function to a *key* function.\n' - '\n' - ' *reverse* is a boolean value. If set to "True", then the ' - 'list\n' - ' elements are sorted as if each comparison were reversed.\n' - '\n' - ' This method modifies the sequence in place for economy of ' - 'space\n' - ' when sorting a large sequence. To remind users that it ' - 'operates\n' - ' by side effect, it does not return the sorted sequence ' - '(use\n' - ' "sorted()" to explicitly request a new sorted list ' - 'instance).\n' - '\n' - ' The "sort()" method is guaranteed to be stable. A sort ' - 'is\n' - ' stable if it guarantees not to change the relative order ' - 'of\n' - ' elements that compare equal — this is helpful for sorting ' - 'in\n' - ' multiple passes (for example, sort by department, then by ' - 'salary\n' - ' grade).\n' - '\n' - ' For sorting examples and a brief sorting tutorial, see ' - 'Sorting\n' - ' Techniques.\n' - '\n' - ' **CPython implementation detail:** While a list is being ' - 'sorted,\n' - ' the effect of attempting to mutate, or even inspect, the ' - 'list is\n' - ' undefined. The C implementation of Python makes the list ' - 'appear\n' - ' empty for the duration, and raises "ValueError" if it can ' - 'detect\n' - ' that the list has been mutated during a sort.\n' - '\n' - '\n' - 'Tuples\n' - '======\n' - '\n' - 'Tuples are immutable sequences, typically used to store ' - 'collections of\n' - 'heterogeneous data (such as the 2-tuples produced by the ' - '"enumerate()"\n' - 'built-in). Tuples are also used for cases where an immutable ' - 'sequence\n' - 'of homogeneous data is needed (such as allowing storage in a ' - '"set" or\n' - '"dict" instance).\n' - '\n' - 'class tuple([iterable])\n' - '\n' - ' Tuples may be constructed in a number of ways:\n' - '\n' - ' * Using a pair of parentheses to denote the empty tuple: ' - '"()"\n' - '\n' - ' * Using a trailing comma for a singleton tuple: "a," or ' - '"(a,)"\n' - '\n' - ' * Separating items with commas: "a, b, c" or "(a, b, c)"\n' - '\n' - ' * Using the "tuple()" built-in: "tuple()" or ' - '"tuple(iterable)"\n' - '\n' - ' The constructor builds a tuple whose items are the same and ' - 'in the\n' - ' same order as *iterable*’s items. *iterable* may be either ' - 'a\n' - ' sequence, a container that supports iteration, or an ' - 'iterator\n' - ' object. If *iterable* is already a tuple, it is returned\n' - ' unchanged. For example, "tuple(\'abc\')" returns "(\'a\', ' - '\'b\', \'c\')"\n' - ' and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument ' - 'is\n' - ' given, the constructor creates a new empty tuple, "()".\n' - '\n' - ' Note that it is actually the comma which makes a tuple, not ' - 'the\n' - ' parentheses. The parentheses are optional, except in the ' - 'empty\n' - ' tuple case, or when they are needed to avoid syntactic ' - 'ambiguity.\n' - ' For example, "f(a, b, c)" is a function call with three ' - 'arguments,\n' - ' while "f((a, b, c))" is a function call with a 3-tuple as the ' - 'sole\n' - ' argument.\n' - '\n' - ' Tuples implement all of the common sequence operations.\n' - '\n' - 'For heterogeneous collections of data where access by name is ' - 'clearer\n' - 'than access by index, "collections.namedtuple()" may be a more\n' - 'appropriate choice than a simple tuple object.\n' - '\n' - '\n' - 'Ranges\n' - '======\n' - '\n' - 'The "range" type represents an immutable sequence of numbers and ' - 'is\n' - 'commonly used for looping a specific number of times in "for" ' - 'loops.\n' - '\n' - 'class range(stop)\n' - 'class range(start, stop[, step])\n' - '\n' - ' The arguments to the range constructor must be integers ' - '(either\n' - ' built-in "int" or any object that implements the ' - '"__index__()"\n' - ' special method). If the *step* argument is omitted, it ' - 'defaults to\n' - ' "1". If the *start* argument is omitted, it defaults to "0". ' - 'If\n' - ' *step* is zero, "ValueError" is raised.\n' - '\n' - ' For a positive *step*, the contents of a range "r" are ' - 'determined\n' - ' by the formula "r[i] = start + step*i" where "i >= 0" and ' - '"r[i] <\n' - ' stop".\n' - '\n' - ' For a negative *step*, the contents of the range are still\n' - ' determined by the formula "r[i] = start + step*i", but the\n' - ' constraints are "i >= 0" and "r[i] > stop".\n' - '\n' - ' A range object will be empty if "r[0]" does not meet the ' - 'value\n' - ' constraint. Ranges do support negative indices, but these ' - 'are\n' - ' interpreted as indexing from the end of the sequence ' - 'determined by\n' - ' the positive indices.\n' - '\n' - ' Ranges containing absolute values larger than "sys.maxsize" ' - 'are\n' - ' permitted but some features (such as "len()") may raise\n' - ' "OverflowError".\n' - '\n' - ' Range examples:\n' - '\n' - ' >>> list(range(10))\n' - ' [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n' - ' >>> list(range(1, 11))\n' - ' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n' - ' >>> list(range(0, 30, 5))\n' - ' [0, 5, 10, 15, 20, 25]\n' - ' >>> list(range(0, 10, 3))\n' - ' [0, 3, 6, 9]\n' - ' >>> list(range(0, -10, -1))\n' - ' [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n' - ' >>> list(range(0))\n' - ' []\n' - ' >>> list(range(1, 0))\n' - ' []\n' - '\n' - ' Ranges implement all of the common sequence operations ' - 'except\n' - ' concatenation and repetition (due to the fact that range ' - 'objects\n' - ' can only represent sequences that follow a strict pattern ' - 'and\n' - ' repetition and concatenation will usually violate that ' - 'pattern).\n' - '\n' - ' start\n' - '\n' - ' The value of the *start* parameter (or "0" if the ' - 'parameter was\n' - ' not supplied)\n' - '\n' - ' stop\n' - '\n' - ' The value of the *stop* parameter\n' - '\n' - ' step\n' - '\n' - ' The value of the *step* parameter (or "1" if the parameter ' - 'was\n' - ' not supplied)\n' - '\n' - 'The advantage of the "range" type over a regular "list" or ' - '"tuple" is\n' - 'that a "range" object will always take the same (small) amount ' - 'of\n' - 'memory, no matter the size of the range it represents (as it ' - 'only\n' - 'stores the "start", "stop" and "step" values, calculating ' - 'individual\n' - 'items and subranges as needed).\n' - '\n' - 'Range objects implement the "collections.abc.Sequence" ABC, and\n' - 'provide features such as containment tests, element index ' - 'lookup,\n' - 'slicing and support for negative indices (see Sequence Types — ' - 'list,\n' - 'tuple, range):\n' - '\n' - '>>> r = range(0, 20, 2)\n' - '>>> r\n' - 'range(0, 20, 2)\n' - '>>> 11 in r\n' - 'False\n' - '>>> 10 in r\n' - 'True\n' - '>>> r.index(10)\n' - '5\n' - '>>> r[5]\n' - '10\n' - '>>> r[:5]\n' - 'range(0, 10, 2)\n' - '>>> r[-1]\n' - '18\n' - '\n' - 'Testing range objects for equality with "==" and "!=" compares ' - 'them as\n' - 'sequences. That is, two range objects are considered equal if ' - 'they\n' - 'represent the same sequence of values. (Note that two range ' - 'objects\n' - 'that compare equal might have different "start", "stop" and ' - '"step"\n' - 'attributes, for example "range(0) == range(2, 1, 3)" or ' - '"range(0, 3,\n' - '2) == range(0, 4, 2)".)\n' - '\n' - 'Changed in version 3.2: Implement the Sequence ABC. Support ' - 'slicing\n' - 'and negative indices. Test "int" objects for membership in ' - 'constant\n' - 'time instead of iterating through all items.\n' - '\n' - 'Changed in version 3.3: Define ‘==’ and ‘!=’ to compare range ' - 'objects\n' - 'based on the sequence of values they define (instead of ' - 'comparing\n' - 'based on object identity).Added the "start", "stop" and "step"\n' - 'attributes.\n' - '\n' - 'See also:\n' - '\n' - ' * The linspace recipe shows how to implement a lazy version of ' - 'range\n' - ' suitable for floating-point applications.\n', - 'typesseq-mutable': 'Mutable Sequence Types\n' - '**********************\n' - '\n' - 'The operations in the following table are defined on ' - 'mutable sequence\n' - 'types. The "collections.abc.MutableSequence" ABC is ' - 'provided to make\n' - 'it easier to correctly implement these operations on ' - 'custom sequence\n' - 'types.\n' - '\n' - 'In the table *s* is an instance of a mutable sequence ' - 'type, *t* is any\n' - 'iterable object and *x* is an arbitrary object that ' - 'meets any type and\n' - 'value restrictions imposed by *s* (for example, ' - '"bytearray" only\n' - 'accepts integers that meet the value restriction "0 <= x ' - '<= 255").\n' - '\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| Operation | ' - 'Result | Notes ' - '|\n' - '|================================|==================================|=======================|\n' - '| "s[i] = x" | item *i* of *s* is ' - 'replaced by | |\n' - '| | ' - '*x* | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j] = t" | slice of *s* from *i* ' - 'to *j* is | |\n' - '| | replaced by the ' - 'contents of the | |\n' - '| | iterable ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j]" | same as "s[i:j] = ' - '[]" | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s[i:j:k] = t" | the elements of ' - '"s[i:j:k]" are | (1) |\n' - '| | replaced by those of ' - '*t* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "del s[i:j:k]" | removes the elements ' - 'of | |\n' - '| | "s[i:j:k]" from the ' - 'list | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.append(x)" | appends *x* to the ' - 'end of the | |\n' - '| | sequence (same ' - 'as | |\n' - '| | "s[len(s):len(s)] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.clear()" | removes all items ' - 'from *s* (same | (5) |\n' - '| | as "del ' - 's[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.copy()" | creates a shallow ' - 'copy of *s* | (5) |\n' - '| | (same as ' - '"s[:]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.extend(t)" or "s += t" | extends *s* with the ' - 'contents of | |\n' - '| | *t* (for the most ' - 'part the same | |\n' - '| | as "s[len(s):len(s)] ' - '= t") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s *= n" | updates *s* with its ' - 'contents | (6) |\n' - '| | repeated *n* ' - 'times | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.insert(i, x)" | inserts *x* into *s* ' - 'at the | |\n' - '| | index given by *i* ' - '(same as | |\n' - '| | "s[i:i] = ' - '[x]") | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.pop()" or "s.pop(i)" | retrieves the item at ' - '*i* and | (2) |\n' - '| | also removes it from ' - '*s* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.remove(x)" | removes the first ' - 'item from *s* | (3) |\n' - '| | where "s[i]" is equal ' - 'to *x* | |\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '| "s.reverse()" | reverses the items of ' - '*s* in | (4) |\n' - '| | ' - 'place | ' - '|\n' - '+--------------------------------+----------------------------------+-----------------------+\n' - '\n' - 'Notes:\n' - '\n' - '1. If *k* is not equal to "1", *t* must have the same ' - 'length as the\n' - ' slice it is replacing.\n' - '\n' - '2. The optional argument *i* defaults to "-1", so that ' - 'by default the\n' - ' last item is removed and returned.\n' - '\n' - '3. "remove()" raises "ValueError" when *x* is not found ' - 'in *s*.\n' - '\n' - '4. The "reverse()" method modifies the sequence in place ' - 'for economy\n' - ' of space when reversing a large sequence. To remind ' - 'users that it\n' - ' operates by side effect, it does not return the ' - 'reversed sequence.\n' - '\n' - '5. "clear()" and "copy()" are included for consistency ' - 'with the\n' - ' interfaces of mutable containers that don’t support ' - 'slicing\n' - ' operations (such as "dict" and "set"). "copy()" is ' - 'not part of the\n' - ' "collections.abc.MutableSequence" ABC, but most ' - 'concrete mutable\n' - ' sequence classes provide it.\n' - '\n' - ' Added in version 3.3: "clear()" and "copy()" ' - 'methods.\n' - '\n' - '6. The value *n* is an integer, or an object ' - 'implementing\n' - ' "__index__()". Zero and negative values of *n* clear ' - 'the sequence.\n' - ' Items in the sequence are not copied; they are ' - 'referenced multiple\n' - ' times, as explained for "s * n" under Common Sequence ' - 'Operations.\n', - 'unary': 'Unary arithmetic and bitwise operations\n' - '***************************************\n' - '\n' - 'All unary arithmetic and bitwise operations have the same ' - 'priority:\n' - '\n' - ' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n' - '\n' - 'The unary "-" (minus) operator yields the negation of its numeric\n' - 'argument; the operation can be overridden with the "__neg__()" ' - 'special\n' - 'method.\n' - '\n' - 'The unary "+" (plus) operator yields its numeric argument ' - 'unchanged;\n' - 'the operation can be overridden with the "__pos__()" special ' - 'method.\n' - '\n' - 'The unary "~" (invert) operator yields the bitwise inversion of ' - 'its\n' - 'integer argument. The bitwise inversion of "x" is defined as\n' - '"-(x+1)". It only applies to integral numbers or to custom ' - 'objects\n' - 'that override the "__invert__()" special method.\n' - '\n' - 'In all three cases, if the argument does not have the proper type, ' - 'a\n' - '"TypeError" exception is raised.\n', - 'while': 'The "while" statement\n' - '*********************\n' - '\n' - 'The "while" statement is used for repeated execution as long as an\n' - 'expression is true:\n' - '\n' - ' while_stmt ::= "while" assignment_expression ":" suite\n' - ' ["else" ":" suite]\n' - '\n' - 'This repeatedly tests the expression and, if it is true, executes ' - 'the\n' - 'first suite; if the expression is false (which may be the first ' - 'time\n' - 'it is tested) the suite of the "else" clause, if present, is ' - 'executed\n' - 'and the loop terminates.\n' - '\n' - 'A "break" statement executed in the first suite terminates the ' - 'loop\n' - 'without executing the "else" clause’s suite. A "continue" ' - 'statement\n' - 'executed in the first suite skips the rest of the suite and goes ' - 'back\n' - 'to testing the expression.\n', - 'with': 'The "with" statement\n' - '********************\n' - '\n' - 'The "with" statement is used to wrap the execution of a block with\n' - 'methods defined by a context manager (see section With Statement\n' - 'Context Managers). This allows common "try"…"except"…"finally" ' - 'usage\n' - 'patterns to be encapsulated for convenient reuse.\n' - '\n' - ' with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | ' - 'with_stmt_contents ) ":" suite\n' - ' with_stmt_contents ::= with_item ("," with_item)*\n' - ' with_item ::= expression ["as" target]\n' - '\n' - 'The execution of the "with" statement with one “item” proceeds as\n' - 'follows:\n' - '\n' - '1. The context expression (the expression given in the "with_item") ' - 'is\n' - ' evaluated to obtain a context manager.\n' - '\n' - '2. The context manager’s "__enter__()" is loaded for later use.\n' - '\n' - '3. The context manager’s "__exit__()" is loaded for later use.\n' - '\n' - '4. The context manager’s "__enter__()" method is invoked.\n' - '\n' - '5. If a target was included in the "with" statement, the return ' - 'value\n' - ' from "__enter__()" is assigned to it.\n' - '\n' - ' Note:\n' - '\n' - ' The "with" statement guarantees that if the "__enter__()" ' - 'method\n' - ' returns without an error, then "__exit__()" will always be\n' - ' called. Thus, if an error occurs during the assignment to the\n' - ' target list, it will be treated the same as an error occurring\n' - ' within the suite would be. See step 7 below.\n' - '\n' - '6. The suite is executed.\n' - '\n' - '7. The context manager’s "__exit__()" method is invoked. If an\n' - ' exception caused the suite to be exited, its type, value, and\n' - ' traceback are passed as arguments to "__exit__()". Otherwise, ' - 'three\n' - ' "None" arguments are supplied.\n' - '\n' - ' If the suite was exited due to an exception, and the return ' - 'value\n' - ' from the "__exit__()" method was false, the exception is ' - 'reraised.\n' - ' If the return value was true, the exception is suppressed, and\n' - ' execution continues with the statement following the "with"\n' - ' statement.\n' - '\n' - ' If the suite was exited for any reason other than an exception, ' - 'the\n' - ' return value from "__exit__()" is ignored, and execution ' - 'proceeds\n' - ' at the normal location for the kind of exit that was taken.\n' - '\n' - 'The following code:\n' - '\n' - ' with EXPRESSION as TARGET:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' manager = (EXPRESSION)\n' - ' enter = type(manager).__enter__\n' - ' exit = type(manager).__exit__\n' - ' value = enter(manager)\n' - ' hit_except = False\n' - '\n' - ' try:\n' - ' TARGET = value\n' - ' SUITE\n' - ' except:\n' - ' hit_except = True\n' - ' if not exit(manager, *sys.exc_info()):\n' - ' raise\n' - ' finally:\n' - ' if not hit_except:\n' - ' exit(manager, None, None, None)\n' - '\n' - 'With more than one item, the context managers are processed as if\n' - 'multiple "with" statements were nested:\n' - '\n' - ' with A() as a, B() as b:\n' - ' SUITE\n' - '\n' - 'is semantically equivalent to:\n' - '\n' - ' with A() as a:\n' - ' with B() as b:\n' - ' SUITE\n' - '\n' - 'You can also write multi-item context managers in multiple lines if\n' - 'the items are surrounded by parentheses. For example:\n' - '\n' - ' with (\n' - ' A() as a,\n' - ' B() as b,\n' - ' ):\n' - ' SUITE\n' - '\n' - 'Changed in version 3.1: Support for multiple context expressions.\n' - '\n' - 'Changed in version 3.10: Support for using grouping parentheses to\n' - 'break the statement in multiple lines.\n' - '\n' - 'See also:\n' - '\n' - ' **PEP 343** - The “with” statement\n' - ' The specification, background, and examples for the Python ' - '"with"\n' - ' statement.\n', - 'yield': 'The "yield" statement\n' - '*********************\n' - '\n' - ' yield_stmt ::= yield_expression\n' - '\n' - 'A "yield" statement is semantically equivalent to a yield ' - 'expression.\n' - 'The "yield" statement can be used to omit the parentheses that ' - 'would\n' - 'otherwise be required in the equivalent yield expression ' - 'statement.\n' - 'For example, the yield statements\n' - '\n' - ' yield \n' - ' yield from \n' - '\n' - 'are equivalent to the yield expression statements\n' - '\n' - ' (yield )\n' - ' (yield from )\n' - '\n' - 'Yield expressions and statements are only used when defining a\n' - '*generator* function, and are only used in the body of the ' - 'generator\n' - 'function. Using "yield" in a function definition is sufficient to\n' - 'cause that definition to create a generator function instead of a\n' - 'normal function.\n' - '\n' - 'For full details of "yield" semantics, refer to the Yield ' - 'expressions\n' - 'section.\n'} + +topics = { + 'assert': r'''The "assert" statement +********************** + +Assert statements are a convenient way to insert debugging assertions +into a program: + + assert_stmt ::= "assert" expression ["," expression] + +The simple form, "assert expression", is equivalent to + + if __debug__: + if not expression: raise AssertionError + +The extended form, "assert expression1, expression2", is equivalent to + + if __debug__: + if not expression1: raise AssertionError(expression2) + +These equivalences assume that "__debug__" and "AssertionError" refer +to the built-in variables with those names. In the current +implementation, the built-in variable "__debug__" is "True" under +normal circumstances, "False" when optimization is requested (command +line option "-O"). The current code generator emits no code for an +"assert" statement when optimization is requested at compile time. +Note that it is unnecessary to include the source code for the +expression that failed in the error message; it will be displayed as +part of the stack trace. + +Assignments to "__debug__" are illegal. The value for the built-in +variable is determined when the interpreter starts. +''', + 'assignment': r'''Assignment statements +********************* + +Assignment statements are used to (re)bind names to values and to +modify attributes or items of mutable objects: + + assignment_stmt ::= (target_list "=")+ (starred_expression | yield_expression) + target_list ::= target ("," target)* [","] + target ::= identifier + | "(" [target_list] ")" + | "[" [target_list] "]" + | attributeref + | subscription + | slicing + | "*" target + +(See section Primaries for the syntax definitions for *attributeref*, +*subscription*, and *slicing*.) + +An assignment statement evaluates the expression list (remember that +this can be a single expression or a comma-separated list, the latter +yielding a tuple) and assigns the single resulting object to each of +the target lists, from left to right. + +Assignment is defined recursively depending on the form of the target +(list). When a target is part of a mutable object (an attribute +reference, subscription or slicing), the mutable object must +ultimately perform the assignment and decide about its validity, and +may raise an exception if the assignment is unacceptable. The rules +observed by various types and the exceptions raised are given with the +definition of the object types (see section The standard type +hierarchy). + +Assignment of an object to a target list, optionally enclosed in +parentheses or square brackets, is recursively defined as follows. + +* If the target list is a single target with no trailing comma, + optionally in parentheses, the object is assigned to that target. + +* Else: + + * If the target list contains one target prefixed with an asterisk, + called a “starred” target: The object must be an iterable with at + least as many items as there are targets in the target list, minus + one. The first items of the iterable are assigned, from left to + right, to the targets before the starred target. The final items + of the iterable are assigned to the targets after the starred + target. A list of the remaining items in the iterable is then + assigned to the starred target (the list can be empty). + + * Else: The object must be an iterable with the same number of items + as there are targets in the target list, and the items are + assigned, from left to right, to the corresponding targets. + +Assignment of an object to a single target is recursively defined as +follows. + +* If the target is an identifier (name): + + * If the name does not occur in a "global" or "nonlocal" statement + in the current code block: the name is bound to the object in the + current local namespace. + + * Otherwise: the name is bound to the object in the global namespace + or the outer namespace determined by "nonlocal", respectively. + + The name is rebound if it was already bound. This may cause the + reference count for the object previously bound to the name to reach + zero, causing the object to be deallocated and its destructor (if it + has one) to be called. + +* If the target is an attribute reference: The primary expression in + the reference is evaluated. It should yield an object with + assignable attributes; if this is not the case, "TypeError" is + raised. That object is then asked to assign the assigned object to + the given attribute; if it cannot perform the assignment, it raises + an exception (usually but not necessarily "AttributeError"). + + Note: If the object is a class instance and the attribute reference + occurs on both sides of the assignment operator, the right-hand side + expression, "a.x" can access either an instance attribute or (if no + instance attribute exists) a class attribute. The left-hand side + target "a.x" is always set as an instance attribute, creating it if + necessary. Thus, the two occurrences of "a.x" do not necessarily + refer to the same attribute: if the right-hand side expression + refers to a class attribute, the left-hand side creates a new + instance attribute as the target of the assignment: + + class Cls: + x = 3 # class variable + inst = Cls() + inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3 + + This description does not necessarily apply to descriptor + attributes, such as properties created with "property()". + +* If the target is a subscription: The primary expression in the + reference is evaluated. It should yield either a mutable sequence + object (such as a list) or a mapping object (such as a dictionary). + Next, the subscript expression is evaluated. + + If the primary is a mutable sequence object (such as a list), the + subscript must yield an integer. If it is negative, the sequence’s + length is added to it. The resulting value must be a nonnegative + integer less than the sequence’s length, and the sequence is asked + to assign the assigned object to its item with that index. If the + index is out of range, "IndexError" is raised (assignment to a + subscripted sequence cannot add new items to a list). + + If the primary is a mapping object (such as a dictionary), the + subscript must have a type compatible with the mapping’s key type, + and the mapping is then asked to create a key/value pair which maps + the subscript to the assigned object. This can either replace an + existing key/value pair with the same key value, or insert a new + key/value pair (if no key with the same value existed). + + For user-defined objects, the "__setitem__()" method is called with + appropriate arguments. + +* If the target is a slicing: The primary expression in the reference + is evaluated. It should yield a mutable sequence object (such as a + list). The assigned object should be a sequence object of the same + type. Next, the lower and upper bound expressions are evaluated, + insofar they are present; defaults are zero and the sequence’s + length. The bounds should evaluate to integers. If either bound is + negative, the sequence’s length is added to it. The resulting + bounds are clipped to lie between zero and the sequence’s length, + inclusive. Finally, the sequence object is asked to replace the + slice with the items of the assigned sequence. The length of the + slice may be different from the length of the assigned sequence, + thus changing the length of the target sequence, if the target + sequence allows it. + +**CPython implementation detail:** In the current implementation, the +syntax for targets is taken to be the same as for expressions, and +invalid syntax is rejected during the code generation phase, causing +less detailed error messages. + +Although the definition of assignment implies that overlaps between +the left-hand side and the right-hand side are ‘simultaneous’ (for +example "a, b = b, a" swaps two variables), overlaps *within* the +collection of assigned-to variables occur left-to-right, sometimes +resulting in confusion. For instance, the following program prints +"[0, 2]": + + x = [0, 1] + i = 0 + i, x[i] = 1, 2 # i is updated, then x[i] is updated + print(x) + +See also: + + **PEP 3132** - Extended Iterable Unpacking + The specification for the "*target" feature. + + +Augmented assignment statements +=============================== + +Augmented assignment is the combination, in a single statement, of a +binary operation and an assignment statement: + + augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression) + augtarget ::= identifier | attributeref | subscription | slicing + augop ::= "+=" | "-=" | "*=" | "@=" | "/=" | "//=" | "%=" | "**=" + | ">>=" | "<<=" | "&=" | "^=" | "|=" + +(See section Primaries for the syntax definitions of the last three +symbols.) + +An augmented assignment evaluates the target (which, unlike normal +assignment statements, cannot be an unpacking) and the expression +list, performs the binary operation specific to the type of assignment +on the two operands, and assigns the result to the original target. +The target is only evaluated once. + +An augmented assignment statement like "x += 1" can be rewritten as "x += x + 1" to achieve a similar, but not exactly equal effect. In the +augmented version, "x" is only evaluated once. Also, when possible, +the actual operation is performed *in-place*, meaning that rather than +creating a new object and assigning that to the target, the old object +is modified instead. + +Unlike normal assignments, augmented assignments evaluate the left- +hand side *before* evaluating the right-hand side. For example, "a[i] ++= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and performs +the addition, and lastly, it writes the result back to "a[i]". + +With the exception of assigning to tuples and multiple targets in a +single statement, the assignment done by augmented assignment +statements is handled the same way as normal assignments. Similarly, +with the exception of the possible *in-place* behavior, the binary +operation performed by augmented assignment is the same as the normal +binary operations. + +For targets which are attribute references, the same caveat about +class and instance attributes applies as for regular assignments. + + +Annotated assignment statements +=============================== + +*Annotation* assignment is the combination, in a single statement, of +a variable or attribute annotation and an optional assignment +statement: + + annotated_assignment_stmt ::= augtarget ":" expression + ["=" (starred_expression | yield_expression)] + +The difference from normal Assignment statements is that only a single +target is allowed. + +The assignment target is considered “simple” if it consists of a +single name that is not enclosed in parentheses. For simple assignment +targets, if in class or module scope, the annotations are gathered in +a lazily evaluated annotation scope. The annotations can be evaluated +using the "__annotations__" attribute of a class or module, or using +the facilities in the "annotationlib" module. + +If the assignment target is not simple (an attribute, subscript node, +or parenthesized name), the annotation is never evaluated. + +If a name is annotated in a function scope, then this name is local +for that scope. Annotations are never evaluated and stored in function +scopes. + +If the right hand side is present, an annotated assignment performs +the actual assignment as if there was no annotation present. If the +right hand side is not present for an expression target, then the +interpreter evaluates the target except for the last "__setitem__()" +or "__setattr__()" call. + +See also: + + **PEP 526** - Syntax for Variable Annotations + The proposal that added syntax for annotating the types of + variables (including class variables and instance variables), + instead of expressing them through comments. + + **PEP 484** - Type hints + The proposal that added the "typing" module to provide a standard + syntax for type annotations that can be used in static analysis + tools and IDEs. + +Changed in version 3.8: Now annotated assignments allow the same +expressions in the right hand side as regular assignments. Previously, +some expressions (like un-parenthesized tuple expressions) caused a +syntax error. + +Changed in version 3.14: Annotations are now lazily evaluated in a +separate annotation scope. If the assignment target is not simple, +annotations are never evaluated. +''', + 'assignment-expressions': r'''Assignment expressions +********************** + + assignment_expression ::= [identifier ":="] expression + +An assignment expression (sometimes also called a “named expression” +or “walrus”) assigns an "expression" to an "identifier", while also +returning the value of the "expression". + +One common use case is when handling matched regular expressions: + + if matching := pattern.search(data): + do_something(matching) + +Or, when processing a file stream in chunks: + + while chunk := file.read(9000): + process(chunk) + +Assignment expressions must be surrounded by parentheses when used as +expression statements and when used as sub-expressions in slicing, +conditional, lambda, keyword-argument, and comprehension-if +expressions and in "assert", "with", and "assignment" statements. In +all other places where they can be used, parentheses are not required, +including in "if" and "while" statements. + +Added in version 3.8: See **PEP 572** for more details about +assignment expressions. +''', + 'async': r'''Coroutines +********** + +Added in version 3.5. + + +Coroutine function definition +============================= + + async_funcdef ::= [decorators] "async" "def" funcname "(" [parameter_list] ")" + ["->" expression] ":" suite + +Execution of Python coroutines can be suspended and resumed at many +points (see *coroutine*). "await" expressions, "async for" and "async +with" can only be used in the body of a coroutine function. + +Functions defined with "async def" syntax are always coroutine +functions, even if they do not contain "await" or "async" keywords. + +It is a "SyntaxError" to use a "yield from" expression inside the body +of a coroutine function. + +An example of a coroutine function: + + async def func(param1, param2): + do_stuff() + await some_coroutine() + +Changed in version 3.7: "await" and "async" are now keywords; +previously they were only treated as such inside the body of a +coroutine function. + + +The "async for" statement +========================= + + async_for_stmt ::= "async" for_stmt + +An *asynchronous iterable* provides an "__aiter__" method that +directly returns an *asynchronous iterator*, which can call +asynchronous code in its "__anext__" method. + +The "async for" statement allows convenient iteration over +asynchronous iterables. + +The following code: + + async for TARGET in ITER: + SUITE + else: + SUITE2 + +Is semantically equivalent to: + + iter = (ITER) + iter = type(iter).__aiter__(iter) + running = True + + while running: + try: + TARGET = await type(iter).__anext__(iter) + except StopAsyncIteration: + running = False + else: + SUITE + else: + SUITE2 + +See also "__aiter__()" and "__anext__()" for details. + +It is a "SyntaxError" to use an "async for" statement outside the body +of a coroutine function. + + +The "async with" statement +========================== + + async_with_stmt ::= "async" with_stmt + +An *asynchronous context manager* is a *context manager* that is able +to suspend execution in its *enter* and *exit* methods. + +The following code: + + async with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + aenter = type(manager).__aenter__ + aexit = type(manager).__aexit__ + value = await aenter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not await aexit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + await aexit(manager, None, None, None) + +See also "__aenter__()" and "__aexit__()" for details. + +It is a "SyntaxError" to use an "async with" statement outside the +body of a coroutine function. + +See also: + + **PEP 492** - Coroutines with async and await syntax + The proposal that made coroutines a proper standalone concept in + Python, and added supporting syntax. +''', + 'atom-identifiers': r'''Identifiers (Names) +******************* + +An identifier occurring as an atom is a name. See section Identifiers +and keywords for lexical definition and section Naming and binding for +documentation of naming and binding. + +When the name is bound to an object, evaluation of the atom yields +that object. When a name is not bound, an attempt to evaluate it +raises a "NameError" exception. + + +Private name mangling +===================== + +When an identifier that textually occurs in a class definition begins +with two or more underscore characters and does not end in two or more +underscores, it is considered a *private name* of that class. + +See also: The class specifications. + +More precisely, private names are transformed to a longer form before +code is generated for them. If the transformed name is longer than +255 characters, implementation-defined truncation may happen. + +The transformation is independent of the syntactical context in which +the identifier is used but only the following private identifiers are +mangled: + +* Any name used as the name of a variable that is assigned or read or + any name of an attribute being accessed. + + The "__name__" attribute of nested functions, classes, and type + aliases is however not mangled. + +* The name of imported modules, e.g., "__spam" in "import __spam". If + the module is part of a package (i.e., its name contains a dot), the + name is *not* mangled, e.g., the "__foo" in "import __foo.bar" is + not mangled. + +* The name of an imported member, e.g., "__f" in "from spam import + __f". + +The transformation rule is defined as follows: + +* The class name, with leading underscores removed and a single + leading underscore inserted, is inserted in front of the identifier, + e.g., the identifier "__spam" occurring in a class named "Foo", + "_Foo" or "__Foo" is transformed to "_Foo__spam". + +* If the class name consists only of underscores, the transformation + is the identity, e.g., the identifier "__spam" occurring in a class + named "_" or "__" is left as is. +''', + 'atom-literals': r'''Literals +******** + +Python supports string and bytes literals and various numeric +literals: + + literal ::= stringliteral | bytesliteral + | integer | floatnumber | imagnumber + +Evaluation of a literal yields an object of the given type (string, +bytes, integer, floating-point number, complex number) with the given +value. The value may be approximated in the case of floating-point +and imaginary (complex) literals. See section Literals for details. + +All literals correspond to immutable data types, and hence the +object’s identity is less important than its value. Multiple +evaluations of literals with the same value (either the same +occurrence in the program text or a different occurrence) may obtain +the same object or a different object with the same value. +''', + 'attribute-access': r'''Customizing attribute access +**************************** + +The following methods can be defined to customize the meaning of +attribute access (use of, assignment to, or deletion of "x.name") for +class instances. + +object.__getattr__(self, name) + + Called when the default attribute access fails with an + "AttributeError" (either "__getattribute__()" raises an + "AttributeError" because *name* is not an instance attribute or an + attribute in the class tree for "self"; or "__get__()" of a *name* + property raises "AttributeError"). This method should either + return the (computed) attribute value or raise an "AttributeError" + exception. The "object" class itself does not provide this method. + + Note that if the attribute is found through the normal mechanism, + "__getattr__()" is not called. (This is an intentional asymmetry + between "__getattr__()" and "__setattr__()".) This is done both for + efficiency reasons and because otherwise "__getattr__()" would have + no way to access other attributes of the instance. Note that at + least for instance variables, you can take total control by not + inserting any values in the instance attribute dictionary (but + instead inserting them in another object). See the + "__getattribute__()" method below for a way to actually get total + control over attribute access. + +object.__getattribute__(self, name) + + Called unconditionally to implement attribute accesses for + instances of the class. If the class also defines "__getattr__()", + the latter will not be called unless "__getattribute__()" either + calls it explicitly or raises an "AttributeError". This method + should return the (computed) attribute value or raise an + "AttributeError" exception. In order to avoid infinite recursion in + this method, its implementation should always call the base class + method with the same name to access any attributes it needs, for + example, "object.__getattribute__(self, name)". + + Note: + + This method may still be bypassed when looking up special methods + as the result of implicit invocation via language syntax or + built-in functions. See Special method lookup. + + For certain sensitive attribute accesses, raises an auditing event + "object.__getattr__" with arguments "obj" and "name". + +object.__setattr__(self, name, value) + + Called when an attribute assignment is attempted. This is called + instead of the normal mechanism (i.e. store the value in the + instance dictionary). *name* is the attribute name, *value* is the + value to be assigned to it. + + If "__setattr__()" wants to assign to an instance attribute, it + should call the base class method with the same name, for example, + "object.__setattr__(self, name, value)". + + For certain sensitive attribute assignments, raises an auditing + event "object.__setattr__" with arguments "obj", "name", "value". + +object.__delattr__(self, name) + + Like "__setattr__()" but for attribute deletion instead of + assignment. This should only be implemented if "del obj.name" is + meaningful for the object. + + For certain sensitive attribute deletions, raises an auditing event + "object.__delattr__" with arguments "obj" and "name". + +object.__dir__(self) + + Called when "dir()" is called on the object. An iterable must be + returned. "dir()" converts the returned iterable to a list and + sorts it. + + +Customizing module attribute access +=================================== + +Special names "__getattr__" and "__dir__" can be also used to +customize access to module attributes. The "__getattr__" function at +the module level should accept one argument which is the name of an +attribute and return the computed value or raise an "AttributeError". +If an attribute is not found on a module object through the normal +lookup, i.e. "object.__getattribute__()", then "__getattr__" is +searched in the module "__dict__" before raising an "AttributeError". +If found, it is called with the attribute name and the result is +returned. + +The "__dir__" function should accept no arguments, and return an +iterable of strings that represents the names accessible on module. If +present, this function overrides the standard "dir()" search on a +module. + +For a more fine grained customization of the module behavior (setting +attributes, properties, etc.), one can set the "__class__" attribute +of a module object to a subclass of "types.ModuleType". For example: + + import sys + from types import ModuleType + + class VerboseModule(ModuleType): + def __repr__(self): + return f'Verbose {self.__name__}' + + def __setattr__(self, attr, value): + print(f'Setting {attr}...') + super().__setattr__(attr, value) + + sys.modules[__name__].__class__ = VerboseModule + +Note: + + Defining module "__getattr__" and setting module "__class__" only + affect lookups made using the attribute access syntax – directly + accessing the module globals (whether by code within the module, or + via a reference to the module’s globals dictionary) is unaffected. + +Changed in version 3.5: "__class__" module attribute is now writable. + +Added in version 3.7: "__getattr__" and "__dir__" module attributes. + +See also: + + **PEP 562** - Module __getattr__ and __dir__ + Describes the "__getattr__" and "__dir__" functions on modules. + + +Implementing Descriptors +======================== + +The following methods only apply when an instance of the class +containing the method (a so-called *descriptor* class) appears in an +*owner* class (the descriptor must be in either the owner’s class +dictionary or in the class dictionary for one of its parents). In the +examples below, “the attribute” refers to the attribute whose name is +the key of the property in the owner class’ "__dict__". The "object" +class itself does not implement any of these protocols. + +object.__get__(self, instance, owner=None) + + Called to get the attribute of the owner class (class attribute + access) or of an instance of that class (instance attribute + access). The optional *owner* argument is the owner class, while + *instance* is the instance that the attribute was accessed through, + or "None" when the attribute is accessed through the *owner*. + + This method should return the computed attribute value or raise an + "AttributeError" exception. + + **PEP 252** specifies that "__get__()" is callable with one or two + arguments. Python’s own built-in descriptors support this + specification; however, it is likely that some third-party tools + have descriptors that require both arguments. Python’s own + "__getattribute__()" implementation always passes in both arguments + whether they are required or not. + +object.__set__(self, instance, value) + + Called to set the attribute on an instance *instance* of the owner + class to a new value, *value*. + + Note, adding "__set__()" or "__delete__()" changes the kind of + descriptor to a “data descriptor”. See Invoking Descriptors for + more details. + +object.__delete__(self, instance) + + Called to delete the attribute on an instance *instance* of the + owner class. + +Instances of descriptors may also have the "__objclass__" attribute +present: + +object.__objclass__ + + The attribute "__objclass__" is interpreted by the "inspect" module + as specifying the class where this object was defined (setting this + appropriately can assist in runtime introspection of dynamic class + attributes). For callables, it may indicate that an instance of the + given type (or a subclass) is expected or required as the first + positional argument (for example, CPython sets this attribute for + unbound methods that are implemented in C). + + +Invoking Descriptors +==================== + +In general, a descriptor is an object attribute with “binding +behavior”, one whose attribute access has been overridden by methods +in the descriptor protocol: "__get__()", "__set__()", and +"__delete__()". If any of those methods are defined for an object, it +is said to be a descriptor. + +The default behavior for attribute access is to get, set, or delete +the attribute from an object’s dictionary. For instance, "a.x" has a +lookup chain starting with "a.__dict__['x']", then +"type(a).__dict__['x']", and continuing through the base classes of +"type(a)" excluding metaclasses. + +However, if the looked-up value is an object defining one of the +descriptor methods, then Python may override the default behavior and +invoke the descriptor method instead. Where this occurs in the +precedence chain depends on which descriptor methods were defined and +how they were called. + +The starting point for descriptor invocation is a binding, "a.x". How +the arguments are assembled depends on "a": + +Direct Call + The simplest and least common call is when user code directly + invokes a descriptor method: "x.__get__(a)". + +Instance Binding + If binding to an object instance, "a.x" is transformed into the + call: "type(a).__dict__['x'].__get__(a, type(a))". + +Class Binding + If binding to a class, "A.x" is transformed into the call: + "A.__dict__['x'].__get__(None, A)". + +Super Binding + A dotted lookup such as "super(A, a).x" searches + "a.__class__.__mro__" for a base class "B" following "A" and then + returns "B.__dict__['x'].__get__(a, A)". If not a descriptor, "x" + is returned unchanged. + +For instance bindings, the precedence of descriptor invocation depends +on which descriptor methods are defined. A descriptor can define any +combination of "__get__()", "__set__()" and "__delete__()". If it +does not define "__get__()", then accessing the attribute will return +the descriptor object itself unless there is a value in the object’s +instance dictionary. If the descriptor defines "__set__()" and/or +"__delete__()", it is a data descriptor; if it defines neither, it is +a non-data descriptor. Normally, data descriptors define both +"__get__()" and "__set__()", while non-data descriptors have just the +"__get__()" method. Data descriptors with "__get__()" and "__set__()" +(and/or "__delete__()") defined always override a redefinition in an +instance dictionary. In contrast, non-data descriptors can be +overridden by instances. + +Python methods (including those decorated with "@staticmethod" and +"@classmethod") are implemented as non-data descriptors. Accordingly, +instances can redefine and override methods. This allows individual +instances to acquire behaviors that differ from other instances of the +same class. + +The "property()" function is implemented as a data descriptor. +Accordingly, instances cannot override the behavior of a property. + + +__slots__ +========= + +*__slots__* allow us to explicitly declare data members (like +properties) and deny the creation of "__dict__" and *__weakref__* +(unless explicitly declared in *__slots__* or available in a parent.) + +The space saved over using "__dict__" can be significant. Attribute +lookup speed can be significantly improved as well. + +object.__slots__ + + This class variable can be assigned a string, iterable, or sequence + of strings with variable names used by instances. *__slots__* + reserves space for the declared variables and prevents the + automatic creation of "__dict__" and *__weakref__* for each + instance. + +Notes on using *__slots__*: + +* When inheriting from a class without *__slots__*, the "__dict__" and + *__weakref__* attribute of the instances will always be accessible. + +* Without a "__dict__" variable, instances cannot be assigned new + variables not listed in the *__slots__* definition. Attempts to + assign to an unlisted variable name raises "AttributeError". If + dynamic assignment of new variables is desired, then add + "'__dict__'" to the sequence of strings in the *__slots__* + declaration. + +* Without a *__weakref__* variable for each instance, classes defining + *__slots__* do not support "weak references" to its instances. If + weak reference support is needed, then add "'__weakref__'" to the + sequence of strings in the *__slots__* declaration. + +* *__slots__* are implemented at the class level by creating + descriptors for each variable name. As a result, class attributes + cannot be used to set default values for instance variables defined + by *__slots__*; otherwise, the class attribute would overwrite the + descriptor assignment. + +* The action of a *__slots__* declaration is not limited to the class + where it is defined. *__slots__* declared in parents are available + in child classes. However, instances of a child subclass will get a + "__dict__" and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* + slots). + +* If a class defines a slot also defined in a base class, the instance + variable defined by the base class slot is inaccessible (except by + retrieving its descriptor directly from the base class). This + renders the meaning of the program undefined. In the future, a + check may be added to prevent this. + +* "TypeError" will be raised if nonempty *__slots__* are defined for a + class derived from a ""variable-length" built-in type" such as + "int", "bytes", and "tuple". + +* Any non-string *iterable* may be assigned to *__slots__*. + +* If a "dictionary" is used to assign *__slots__*, the dictionary keys + will be used as the slot names. The values of the dictionary can be + used to provide per-attribute docstrings that will be recognised by + "inspect.getdoc()" and displayed in the output of "help()". + +* "__class__" assignment works only if both classes have the same + *__slots__*. + +* Multiple inheritance with multiple slotted parent classes can be + used, but only one parent is allowed to have attributes created by + slots (the other bases must have empty slot layouts) - violations + raise "TypeError". + +* If an *iterator* is used for *__slots__* then a *descriptor* is + created for each of the iterator’s values. However, the *__slots__* + attribute will be an empty iterator. +''', + 'attribute-references': r'''Attribute references +******************** + +An attribute reference is a primary followed by a period and a name: + + attributeref ::= primary "." identifier + +The primary must evaluate to an object of a type that supports +attribute references, which most objects do. This object is then +asked to produce the attribute whose name is the identifier. The type +and value produced is determined by the object. Multiple evaluations +of the same attribute reference may yield different objects. + +This production can be customized by overriding the +"__getattribute__()" method or the "__getattr__()" method. The +"__getattribute__()" method is called first and either returns a value +or raises "AttributeError" if the attribute is not available. + +If an "AttributeError" is raised and the object has a "__getattr__()" +method, that method is called as a fallback. +''', + 'augassign': r'''Augmented assignment statements +******************************* + +Augmented assignment is the combination, in a single statement, of a +binary operation and an assignment statement: + + augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression) + augtarget ::= identifier | attributeref | subscription | slicing + augop ::= "+=" | "-=" | "*=" | "@=" | "/=" | "//=" | "%=" | "**=" + | ">>=" | "<<=" | "&=" | "^=" | "|=" + +(See section Primaries for the syntax definitions of the last three +symbols.) + +An augmented assignment evaluates the target (which, unlike normal +assignment statements, cannot be an unpacking) and the expression +list, performs the binary operation specific to the type of assignment +on the two operands, and assigns the result to the original target. +The target is only evaluated once. + +An augmented assignment statement like "x += 1" can be rewritten as "x += x + 1" to achieve a similar, but not exactly equal effect. In the +augmented version, "x" is only evaluated once. Also, when possible, +the actual operation is performed *in-place*, meaning that rather than +creating a new object and assigning that to the target, the old object +is modified instead. + +Unlike normal assignments, augmented assignments evaluate the left- +hand side *before* evaluating the right-hand side. For example, "a[i] ++= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and performs +the addition, and lastly, it writes the result back to "a[i]". + +With the exception of assigning to tuples and multiple targets in a +single statement, the assignment done by augmented assignment +statements is handled the same way as normal assignments. Similarly, +with the exception of the possible *in-place* behavior, the binary +operation performed by augmented assignment is the same as the normal +binary operations. + +For targets which are attribute references, the same caveat about +class and instance attributes applies as for regular assignments. +''', + 'await': r'''Await expression +**************** + +Suspend the execution of *coroutine* on an *awaitable* object. Can +only be used inside a *coroutine function*. + + await_expr ::= "await" primary + +Added in version 3.5. +''', + 'binary': r'''Binary arithmetic operations +**************************** + +The binary arithmetic operations have the conventional priority +levels. Note that some of these operations also apply to certain non- +numeric types. Apart from the power operator, there are only two +levels, one for multiplicative operators and one for additive +operators: + + m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr | + m_expr "//" u_expr | m_expr "/" u_expr | + m_expr "%" u_expr + a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr + +The "*" (multiplication) operator yields the product of its arguments. +The arguments must either both be numbers, or one argument must be an +integer and the other must be a sequence. In the former case, the +numbers are converted to a common real type and then multiplied +together. In the latter case, sequence repetition is performed; a +negative repetition factor yields an empty sequence. + +This operation can be customized using the special "__mul__()" and +"__rmul__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. + +The "@" (at) operator is intended to be used for matrix +multiplication. No builtin Python types implement this operator. + +This operation can be customized using the special "__matmul__()" and +"__rmatmul__()" methods. + +Added in version 3.5. + +The "/" (division) and "//" (floor division) operators yield the +quotient of their arguments. The numeric arguments are first +converted to a common type. Division of integers yields a float, while +floor division of integers results in an integer; the result is that +of mathematical division with the ‘floor’ function applied to the +result. Division by zero raises the "ZeroDivisionError" exception. + +The division operation can be customized using the special +"__truediv__()" and "__rtruediv__()" methods. The floor division +operation can be customized using the special "__floordiv__()" and +"__rfloordiv__()" methods. + +The "%" (modulo) operator yields the remainder from the division of +the first argument by the second. The numeric arguments are first +converted to a common type. A zero right argument raises the +"ZeroDivisionError" exception. The arguments may be floating-point +numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals "4*0.7 + +0.34".) The modulo operator always yields a result with the same sign +as its second operand (or zero); the absolute value of the result is +strictly smaller than the absolute value of the second operand [1]. + +The floor division and modulo operators are connected by the following +identity: "x == (x//y)*y + (x%y)". Floor division and modulo are also +connected with the built-in function "divmod()": "divmod(x, y) == +(x//y, x%y)". [2]. + +In addition to performing the modulo operation on numbers, the "%" +operator is also overloaded by string objects to perform old-style +string formatting (also known as interpolation). The syntax for +string formatting is described in the Python Library Reference, +section printf-style String Formatting. + +The *modulo* operation can be customized using the special "__mod__()" +and "__rmod__()" methods. + +The floor division operator, the modulo operator, and the "divmod()" +function are not defined for complex numbers. Instead, convert to a +floating-point number using the "abs()" function if appropriate. + +The "+" (addition) operator yields the sum of its arguments. The +arguments must either both be numbers or both be sequences of the same +type. In the former case, the numbers are converted to a common real +type and then added together. In the latter case, the sequences are +concatenated. + +This operation can be customized using the special "__add__()" and +"__radd__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. + +The "-" (subtraction) operator yields the difference of its arguments. +The numeric arguments are first converted to a common real type. + +This operation can be customized using the special "__sub__()" and +"__rsub__()" methods. + +Changed in version 3.14: If only one operand is a complex number, the +other operand is converted to a floating-point number. +''', + 'bitwise': r'''Binary bitwise operations +************************* + +Each of the three bitwise operations has a different priority level: + + and_expr ::= shift_expr | and_expr "&" shift_expr + xor_expr ::= and_expr | xor_expr "^" and_expr + or_expr ::= xor_expr | or_expr "|" xor_expr + +The "&" operator yields the bitwise AND of its arguments, which must +be integers or one of them must be a custom object overriding +"__and__()" or "__rand__()" special methods. + +The "^" operator yields the bitwise XOR (exclusive OR) of its +arguments, which must be integers or one of them must be a custom +object overriding "__xor__()" or "__rxor__()" special methods. + +The "|" operator yields the bitwise (inclusive) OR of its arguments, +which must be integers or one of them must be a custom object +overriding "__or__()" or "__ror__()" special methods. +''', + 'bltin-code-objects': r'''Code Objects +************ + +Code objects are used by the implementation to represent “pseudo- +compiled” executable Python code such as a function body. They differ +from function objects because they don’t contain a reference to their +global execution environment. Code objects are returned by the built- +in "compile()" function and can be extracted from function objects +through their "__code__" attribute. See also the "code" module. + +Accessing "__code__" raises an auditing event "object.__getattr__" +with arguments "obj" and ""__code__"". + +A code object can be executed or evaluated by passing it (instead of a +source string) to the "exec()" or "eval()" built-in functions. + +See The standard type hierarchy for more information. +''', + 'bltin-ellipsis-object': r'''The Ellipsis Object +******************* + +This object is commonly used by slicing (see Slicings). It supports +no special operations. There is exactly one ellipsis object, named +"Ellipsis" (a built-in name). "type(Ellipsis)()" produces the +"Ellipsis" singleton. + +It is written as "Ellipsis" or "...". +''', + 'bltin-null-object': r'''The Null Object +*************** + +This object is returned by functions that don’t explicitly return a +value. It supports no special operations. There is exactly one null +object, named "None" (a built-in name). "type(None)()" produces the +same singleton. + +It is written as "None". +''', + 'bltin-type-objects': r'''Type Objects +************ + +Type objects represent the various object types. An object’s type is +accessed by the built-in function "type()". There are no special +operations on types. The standard module "types" defines names for +all standard built-in types. + +Types are written like this: "". +''', + 'booleans': r'''Boolean operations +****************** + + or_test ::= and_test | or_test "or" and_test + and_test ::= not_test | and_test "and" not_test + not_test ::= comparison | "not" not_test + +In the context of Boolean operations, and also when expressions are +used by control flow statements, the following values are interpreted +as false: "False", "None", numeric zero of all types, and empty +strings and containers (including strings, tuples, lists, +dictionaries, sets and frozensets). All other values are interpreted +as true. User-defined objects can customize their truth value by +providing a "__bool__()" method. + +The operator "not" yields "True" if its argument is false, "False" +otherwise. + +The expression "x and y" first evaluates *x*; if *x* is false, its +value is returned; otherwise, *y* is evaluated and the resulting value +is returned. + +The expression "x or y" first evaluates *x*; if *x* is true, its value +is returned; otherwise, *y* is evaluated and the resulting value is +returned. + +Note that neither "and" nor "or" restrict the value and type they +return to "False" and "True", but rather return the last evaluated +argument. This is sometimes useful, e.g., if "s" is a string that +should be replaced by a default value if it is empty, the expression +"s or 'foo'" yields the desired value. Because "not" has to create a +new value, it returns a boolean value regardless of the type of its +argument (for example, "not 'foo'" produces "False" rather than "''".) +''', + 'break': r'''The "break" statement +********************* + + break_stmt ::= "break" + +"break" may only occur syntactically nested in a "for" or "while" +loop, but not nested in a function or class definition within that +loop. + +It terminates the nearest enclosing loop, skipping the optional "else" +clause if the loop has one. + +If a "for" loop is terminated by "break", the loop control target +keeps its current value. + +When "break" passes control out of a "try" statement with a "finally" +clause, that "finally" clause is executed before really leaving the +loop. +''', + 'callable-types': r'''Emulating callable objects +************************** + +object.__call__(self[, args...]) + + Called when the instance is “called” as a function; if this method + is defined, "x(arg1, arg2, ...)" roughly translates to + "type(x).__call__(x, arg1, ...)". The "object" class itself does + not provide this method. +''', + 'calls': r'''Calls +***** + +A call calls a callable object (e.g., a *function*) with a possibly +empty series of *arguments*: + + call ::= primary "(" [argument_list [","] | comprehension] ")" + argument_list ::= positional_arguments ["," starred_and_keywords] + ["," keywords_arguments] + | starred_and_keywords ["," keywords_arguments] + | keywords_arguments + positional_arguments ::= positional_item ("," positional_item)* + positional_item ::= assignment_expression | "*" expression + starred_and_keywords ::= ("*" expression | keyword_item) + ("," "*" expression | "," keyword_item)* + keywords_arguments ::= (keyword_item | "**" expression) + ("," keyword_item | "," "**" expression)* + keyword_item ::= identifier "=" expression + +An optional trailing comma may be present after the positional and +keyword arguments but does not affect the semantics. + +The primary must evaluate to a callable object (user-defined +functions, built-in functions, methods of built-in objects, class +objects, methods of class instances, and all objects having a +"__call__()" method are callable). All argument expressions are +evaluated before the call is attempted. Please refer to section +Function definitions for the syntax of formal *parameter* lists. + +If keyword arguments are present, they are first converted to +positional arguments, as follows. First, a list of unfilled slots is +created for the formal parameters. If there are N positional +arguments, they are placed in the first N slots. Next, for each +keyword argument, the identifier is used to determine the +corresponding slot (if the identifier is the same as the first formal +parameter name, the first slot is used, and so on). If the slot is +already filled, a "TypeError" exception is raised. Otherwise, the +argument is placed in the slot, filling it (even if the expression is +"None", it fills the slot). When all arguments have been processed, +the slots that are still unfilled are filled with the corresponding +default value from the function definition. (Default values are +calculated, once, when the function is defined; thus, a mutable object +such as a list or dictionary used as default value will be shared by +all calls that don’t specify an argument value for the corresponding +slot; this should usually be avoided.) If there are any unfilled +slots for which no default value is specified, a "TypeError" exception +is raised. Otherwise, the list of filled slots is used as the +argument list for the call. + +**CPython implementation detail:** An implementation may provide +built-in functions whose positional parameters do not have names, even +if they are ‘named’ for the purpose of documentation, and which +therefore cannot be supplied by keyword. In CPython, this is the case +for functions implemented in C that use "PyArg_ParseTuple()" to parse +their arguments. + +If there are more positional arguments than there are formal parameter +slots, a "TypeError" exception is raised, unless a formal parameter +using the syntax "*identifier" is present; in this case, that formal +parameter receives a tuple containing the excess positional arguments +(or an empty tuple if there were no excess positional arguments). + +If any keyword argument does not correspond to a formal parameter +name, a "TypeError" exception is raised, unless a formal parameter +using the syntax "**identifier" is present; in this case, that formal +parameter receives a dictionary containing the excess keyword +arguments (using the keywords as keys and the argument values as +corresponding values), or a (new) empty dictionary if there were no +excess keyword arguments. + +If the syntax "*expression" appears in the function call, "expression" +must evaluate to an *iterable*. Elements from these iterables are +treated as if they were additional positional arguments. For the call +"f(x1, x2, *y, x3, x4)", if *y* evaluates to a sequence *y1*, …, *yM*, +this is equivalent to a call with M+4 positional arguments *x1*, *x2*, +*y1*, …, *yM*, *x3*, *x4*. + +A consequence of this is that although the "*expression" syntax may +appear *after* explicit keyword arguments, it is processed *before* +the keyword arguments (and any "**expression" arguments – see below). +So: + + >>> def f(a, b): + ... print(a, b) + ... + >>> f(b=1, *(2,)) + 2 1 + >>> f(a=1, *(2,)) + Traceback (most recent call last): + File "", line 1, in + TypeError: f() got multiple values for keyword argument 'a' + >>> f(1, *(2,)) + 1 2 + +It is unusual for both keyword arguments and the "*expression" syntax +to be used in the same call, so in practice this confusion does not +often arise. + +If the syntax "**expression" appears in the function call, +"expression" must evaluate to a *mapping*, the contents of which are +treated as additional keyword arguments. If a parameter matching a key +has already been given a value (by an explicit keyword argument, or +from another unpacking), a "TypeError" exception is raised. + +When "**expression" is used, each key in this mapping must be a +string. Each value from the mapping is assigned to the first formal +parameter eligible for keyword assignment whose name is equal to the +key. A key need not be a Python identifier (e.g. ""max-temp °F"" is +acceptable, although it will not match any formal parameter that could +be declared). If there is no match to a formal parameter the key-value +pair is collected by the "**" parameter, if there is one, or if there +is not, a "TypeError" exception is raised. + +Formal parameters using the syntax "*identifier" or "**identifier" +cannot be used as positional argument slots or as keyword argument +names. + +Changed in version 3.5: Function calls accept any number of "*" and +"**" unpackings, positional arguments may follow iterable unpackings +("*"), and keyword arguments may follow dictionary unpackings ("**"). +Originally proposed by **PEP 448**. + +A call always returns some value, possibly "None", unless it raises an +exception. How this value is computed depends on the type of the +callable object. + +If it is— + +a user-defined function: + The code block for the function is executed, passing it the + argument list. The first thing the code block will do is bind the + formal parameters to the arguments; this is described in section + Function definitions. When the code block executes a "return" + statement, this specifies the return value of the function call. + If execution reaches the end of the code block without executing a + "return" statement, the return value is "None". + +a built-in function or method: + The result is up to the interpreter; see Built-in Functions for the + descriptions of built-in functions and methods. + +a class object: + A new instance of that class is returned. + +a class instance method: + The corresponding user-defined function is called, with an argument + list that is one longer than the argument list of the call: the + instance becomes the first argument. + +a class instance: + The class must define a "__call__()" method; the effect is then the + same as if that method was called. +''', + 'class': r'''Class definitions +***************** + +A class definition defines a class object (see section The standard +type hierarchy): + + classdef ::= [decorators] "class" classname [type_params] [inheritance] ":" suite + inheritance ::= "(" [argument_list] ")" + classname ::= identifier + +A class definition is an executable statement. The inheritance list +usually gives a list of base classes (see Metaclasses for more +advanced uses), so each item in the list should evaluate to a class +object which allows subclassing. Classes without an inheritance list +inherit, by default, from the base class "object"; hence, + + class Foo: + pass + +is equivalent to + + class Foo(object): + pass + +The class’s suite is then executed in a new execution frame (see +Naming and binding), using a newly created local namespace and the +original global namespace. (Usually, the suite contains mostly +function definitions.) When the class’s suite finishes execution, its +execution frame is discarded but its local namespace is saved. [5] A +class object is then created using the inheritance list for the base +classes and the saved local namespace for the attribute dictionary. +The class name is bound to this class object in the original local +namespace. + +The order in which attributes are defined in the class body is +preserved in the new class’s "__dict__". Note that this is reliable +only right after the class is created and only for classes that were +defined using the definition syntax. + +Class creation can be customized heavily using metaclasses. + +Classes can also be decorated: just like when decorating functions, + + @f1(arg) + @f2 + class Foo: pass + +is roughly equivalent to + + class Foo: pass + Foo = f1(arg)(f2(Foo)) + +The evaluation rules for the decorator expressions are the same as for +function decorators. The result is then bound to the class name. + +Changed in version 3.9: Classes may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets immediately +after the class’s name. This indicates to static type checkers that +the class is generic. At runtime, the type parameters can be retrieved +from the class’s "__type_params__" attribute. See Generic classes for +more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +**Programmer’s note:** Variables defined in the class definition are +class attributes; they are shared by instances. Instance attributes +can be set in a method with "self.name = value". Both class and +instance attributes are accessible through the notation “"self.name"”, +and an instance attribute hides a class attribute with the same name +when accessed in this way. Class attributes can be used as defaults +for instance attributes, but using mutable values there can lead to +unexpected results. Descriptors can be used to create instance +variables with different implementation details. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + The proposal that changed the declaration of metaclasses to the + current syntax, and the semantics for how classes with + metaclasses are constructed. + + **PEP 3129** - Class Decorators + The proposal that added class decorators. Function and method + decorators were introduced in **PEP 318**. +''', + 'comparisons': r'''Comparisons +*********** + +Unlike C, all comparison operations in Python have the same priority, +which is lower than that of any arithmetic, shifting or bitwise +operation. Also unlike C, expressions like "a < b < c" have the +interpretation that is conventional in mathematics: + + comparison ::= or_expr (comp_operator or_expr)* + comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!=" + | "is" ["not"] | ["not"] "in" + +Comparisons yield boolean values: "True" or "False". Custom *rich +comparison methods* may return non-boolean values. In this case Python +will call "bool()" on such value in boolean contexts. + +Comparisons can be chained arbitrarily, e.g., "x < y <= z" is +equivalent to "x < y and y <= z", except that "y" is evaluated only +once (but in both cases "z" is not evaluated at all when "x < y" is +found to be false). + +Formally, if *a*, *b*, *c*, …, *y*, *z* are expressions and *op1*, +*op2*, …, *opN* are comparison operators, then "a op1 b op2 c ... y +opN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except +that each expression is evaluated at most once. + +Note that "a op1 b op2 c" doesn’t imply any kind of comparison between +*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though +perhaps not pretty). + + +Value comparisons +================= + +The operators "<", ">", "==", ">=", "<=", and "!=" compare the values +of two objects. The objects do not need to have the same type. + +Chapter Objects, values and types states that objects have a value (in +addition to type and identity). The value of an object is a rather +abstract notion in Python: For example, there is no canonical access +method for an object’s value. Also, there is no requirement that the +value of an object should be constructed in a particular way, e.g. +comprised of all its data attributes. Comparison operators implement a +particular notion of what the value of an object is. One can think of +them as defining the value of an object indirectly, by means of their +comparison implementation. + +Because all types are (direct or indirect) subtypes of "object", they +inherit the default comparison behavior from "object". Types can +customize their comparison behavior by implementing *rich comparison +methods* like "__lt__()", described in Basic customization. + +The default behavior for equality comparison ("==" and "!=") is based +on the identity of the objects. Hence, equality comparison of +instances with the same identity results in equality, and equality +comparison of instances with different identities results in +inequality. A motivation for this default behavior is the desire that +all objects should be reflexive (i.e. "x is y" implies "x == y"). + +A default order comparison ("<", ">", "<=", and ">=") is not provided; +an attempt raises "TypeError". A motivation for this default behavior +is the lack of a similar invariant as for equality. + +The behavior of the default equality comparison, that instances with +different identities are always unequal, may be in contrast to what +types will need that have a sensible definition of object value and +value-based equality. Such types will need to customize their +comparison behavior, and in fact, a number of built-in types have done +that. + +The following list describes the comparison behavior of the most +important built-in types. + +* Numbers of built-in numeric types (Numeric Types — int, float, + complex) and of the standard library types "fractions.Fraction" and + "decimal.Decimal" can be compared within and across their types, + with the restriction that complex numbers do not support order + comparison. Within the limits of the types involved, they compare + mathematically (algorithmically) correct without loss of precision. + + The not-a-number values "float('NaN')" and "decimal.Decimal('NaN')" + are special. Any ordered comparison of a number to a not-a-number + value is false. A counter-intuitive implication is that not-a-number + values are not equal to themselves. For example, if "x = + float('NaN')", "3 < x", "x < 3" and "x == x" are all false, while "x + != x" is true. This behavior is compliant with IEEE 754. + +* "None" and "NotImplemented" are singletons. **PEP 8** advises that + comparisons for singletons should always be done with "is" or "is + not", never the equality operators. + +* Binary sequences (instances of "bytes" or "bytearray") can be + compared within and across their types. They compare + lexicographically using the numeric values of their elements. + +* Strings (instances of "str") compare lexicographically using the + numerical Unicode code points (the result of the built-in function + "ord()") of their characters. [3] + + Strings and binary sequences cannot be directly compared. + +* Sequences (instances of "tuple", "list", or "range") can be compared + only within each of their types, with the restriction that ranges do + not support order comparison. Equality comparison across these + types results in inequality, and ordering comparison across these + types raises "TypeError". + + Sequences compare lexicographically using comparison of + corresponding elements. The built-in containers typically assume + identical objects are equal to themselves. That lets them bypass + equality tests for identical objects to improve performance and to + maintain their internal invariants. + + Lexicographical comparison between built-in collections works as + follows: + + * For two collections to compare equal, they must be of the same + type, have the same length, and each pair of corresponding + elements must compare equal (for example, "[1,2] == (1,2)" is + false because the type is not the same). + + * Collections that support order comparison are ordered the same as + their first unequal elements (for example, "[1,2,x] <= [1,2,y]" + has the same value as "x <= y"). If a corresponding element does + not exist, the shorter collection is ordered first (for example, + "[1,2] < [1,2,3]" is true). + +* Mappings (instances of "dict") compare equal if and only if they + have equal "(key, value)" pairs. Equality comparison of the keys and + values enforces reflexivity. + + Order comparisons ("<", ">", "<=", and ">=") raise "TypeError". + +* Sets (instances of "set" or "frozenset") can be compared within and + across their types. + + They define order comparison operators to mean subset and superset + tests. Those relations do not define total orderings (for example, + the two sets "{1,2}" and "{2,3}" are not equal, nor subsets of one + another, nor supersets of one another). Accordingly, sets are not + appropriate arguments for functions which depend on total ordering + (for example, "min()", "max()", and "sorted()" produce undefined + results given a list of sets as inputs). + + Comparison of sets enforces reflexivity of its elements. + +* Most other built-in types have no comparison methods implemented, so + they inherit the default comparison behavior. + +User-defined classes that customize their comparison behavior should +follow some consistency rules, if possible: + +* Equality comparison should be reflexive. In other words, identical + objects should compare equal: + + "x is y" implies "x == y" + +* Comparison should be symmetric. In other words, the following + expressions should have the same result: + + "x == y" and "y == x" + + "x != y" and "y != x" + + "x < y" and "y > x" + + "x <= y" and "y >= x" + +* Comparison should be transitive. The following (non-exhaustive) + examples illustrate that: + + "x > y and y > z" implies "x > z" + + "x < y and y <= z" implies "x < z" + +* Inverse comparison should result in the boolean negation. In other + words, the following expressions should have the same result: + + "x == y" and "not x != y" + + "x < y" and "not x >= y" (for total ordering) + + "x > y" and "not x <= y" (for total ordering) + + The last two expressions apply to totally ordered collections (e.g. + to sequences, but not to sets or mappings). See also the + "total_ordering()" decorator. + +* The "hash()" result should be consistent with equality. Objects that + are equal should either have the same hash value, or be marked as + unhashable. + +Python does not enforce these consistency rules. In fact, the +not-a-number values are an example for not following these rules. + + +Membership test operations +========================== + +The operators "in" and "not in" test for membership. "x in s" +evaluates to "True" if *x* is a member of *s*, and "False" otherwise. +"x not in s" returns the negation of "x in s". All built-in sequences +and set types support this as well as dictionary, for which "in" tests +whether the dictionary has a given key. For container types such as +list, tuple, set, frozenset, dict, or collections.deque, the +expression "x in y" is equivalent to "any(x is e or x == e for e in +y)". + +For the string and bytes types, "x in y" is "True" if and only if *x* +is a substring of *y*. An equivalent test is "y.find(x) != -1". +Empty strings are always considered to be a substring of any other +string, so """ in "abc"" will return "True". + +For user-defined classes which define the "__contains__()" method, "x +in y" returns "True" if "y.__contains__(x)" returns a true value, and +"False" otherwise. + +For user-defined classes which do not define "__contains__()" but do +define "__iter__()", "x in y" is "True" if some value "z", for which +the expression "x is z or x == z" is true, is produced while iterating +over "y". If an exception is raised during the iteration, it is as if +"in" raised that exception. + +Lastly, the old-style iteration protocol is tried: if a class defines +"__getitem__()", "x in y" is "True" if and only if there is a non- +negative integer index *i* such that "x is y[i] or x == y[i]", and no +lower integer index raises the "IndexError" exception. (If any other +exception is raised, it is as if "in" raised that exception). + +The operator "not in" is defined to have the inverse truth value of +"in". + + +Identity comparisons +==================== + +The operators "is" and "is not" test for an object’s identity: "x is +y" is true if and only if *x* and *y* are the same object. An +Object’s identity is determined using the "id()" function. "x is not +y" yields the inverse truth value. [4] +''', + 'compound': r'''Compound statements +******************* + +Compound statements contain (groups of) other statements; they affect +or control the execution of those other statements in some way. In +general, compound statements span multiple lines, although in simple +incarnations a whole compound statement may be contained in one line. + +The "if", "while" and "for" statements implement traditional control +flow constructs. "try" specifies exception handlers and/or cleanup +code for a group of statements, while the "with" statement allows the +execution of initialization and finalization code around a block of +code. Function and class definitions are also syntactically compound +statements. + +A compound statement consists of one or more ‘clauses.’ A clause +consists of a header and a ‘suite.’ The clause headers of a +particular compound statement are all at the same indentation level. +Each clause header begins with a uniquely identifying keyword and ends +with a colon. A suite is a group of statements controlled by a +clause. A suite can be one or more semicolon-separated simple +statements on the same line as the header, following the header’s +colon, or it can be one or more indented statements on subsequent +lines. Only the latter form of a suite can contain nested compound +statements; the following is illegal, mostly because it wouldn’t be +clear to which "if" clause a following "else" clause would belong: + + if test1: if test2: print(x) + +Also note that the semicolon binds tighter than the colon in this +context, so that in the following example, either all or none of the +"print()" calls are executed: + + if x < y < z: print(x); print(y); print(z) + +Summarizing: + + compound_stmt ::= if_stmt + | while_stmt + | for_stmt + | try_stmt + | with_stmt + | match_stmt + | funcdef + | classdef + | async_with_stmt + | async_for_stmt + | async_funcdef + suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT + statement ::= stmt_list NEWLINE | compound_stmt + stmt_list ::= simple_stmt (";" simple_stmt)* [";"] + +Note that statements always end in a "NEWLINE" possibly followed by a +"DEDENT". Also note that optional continuation clauses always begin +with a keyword that cannot start a statement, thus there are no +ambiguities (the ‘dangling "else"’ problem is solved in Python by +requiring nested "if" statements to be indented). + +The formatting of the grammar rules in the following sections places +each clause on a separate line for clarity. + + +The "if" statement +================== + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. + + +The "while" statement +===================== + +The "while" statement is used for repeated execution as long as an +expression is true: + + while_stmt ::= "while" assignment_expression ":" suite + ["else" ":" suite] + +This repeatedly tests the expression and, if it is true, executes the +first suite; if the expression is false (which may be the first time +it is tested) the suite of the "else" clause, if present, is executed +and the loop terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and goes back +to testing the expression. + + +The "for" statement +=================== + +The "for" statement is used to iterate over the elements of a sequence +(such as a string, tuple or list) or other iterable object: + + for_stmt ::= "for" target_list "in" starred_list ":" suite + ["else" ":" suite] + +The "starred_list" expression is evaluated once; it should yield an +*iterable* object. An *iterator* is created for that iterable. The +first item provided by the iterator is then assigned to the target +list using the standard rules for assignments (see Assignment +statements), and the suite is executed. This repeats for each item +provided by the iterator. When the iterator is exhausted, the suite +in the "else" clause, if present, is executed, and the loop +terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and continues +with the next item, or with the "else" clause if there is no next +item. + +The for-loop makes assignments to the variables in the target list. +This overwrites all previous assignments to those variables including +those made in the suite of the for-loop: + + for i in range(10): + print(i) + i = 5 # this will not affect the for-loop + # because i will be overwritten with the next + # index in the range + +Names in the target list are not deleted when the loop is finished, +but if the sequence is empty, they will not have been assigned to at +all by the loop. Hint: the built-in type "range()" represents +immutable arithmetic sequences of integers. For instance, iterating +"range(3)" successively yields 0, 1, and then 2. + +Changed in version 3.11: Starred elements are now allowed in the +expression list. + + +The "try" statement +=================== + +The "try" statement specifies exception handlers and/or cleanup code +for a group of statements: + + try_stmt ::= try1_stmt | try2_stmt | try3_stmt + try1_stmt ::= "try" ":" suite + ("except" [expression ["as" identifier]] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try2_stmt ::= "try" ":" suite + ("except" "*" expression ["as" identifier] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try3_stmt ::= "try" ":" suite + "finally" ":" suite + +Additional information on exceptions can be found in section +Exceptions, and information on using the "raise" statement to generate +exceptions may be found in section The raise statement. + + +"except" clause +--------------- + +The "except" clause(s) specify one or more exception handlers. When no +exception occurs in the "try" clause, no exception handler is +executed. When an exception occurs in the "try" suite, a search for an +exception handler is started. This search inspects the "except" +clauses in turn until one is found that matches the exception. An +expression-less "except" clause, if present, must be last; it matches +any exception. + +For an "except" clause with an expression, the expression must +evaluate to an exception type or a tuple of exception types. The +raised exception matches an "except" clause whose expression evaluates +to the class or a *non-virtual base class* of the exception object, or +to a tuple that contains such a class. + +If no "except" clause matches the exception, the search for an +exception handler continues in the surrounding code and on the +invocation stack. [1] + +If the evaluation of an expression in the header of an "except" clause +raises an exception, the original search for a handler is canceled and +a search starts for the new exception in the surrounding code and on +the call stack (it is treated as if the entire "try" statement raised +the exception). + +When a matching "except" clause is found, the exception is assigned to +the target specified after the "as" keyword in that "except" clause, +if present, and the "except" clause’s suite is executed. All "except" +clauses must have an executable block. When the end of this block is +reached, execution continues normally after the entire "try" +statement. (This means that if two nested handlers exist for the same +exception, and the exception occurs in the "try" clause of the inner +handler, the outer handler will not handle the exception.) + +When an exception has been assigned using "as target", it is cleared +at the end of the "except" clause. This is as if + + except E as N: + foo + +was translated to + + except E as N: + try: + foo + finally: + del N + +This means the exception must be assigned to a different name to be +able to refer to it after the "except" clause. Exceptions are cleared +because with the traceback attached to them, they form a reference +cycle with the stack frame, keeping all locals in that frame alive +until the next garbage collection occurs. + +Before an "except" clause’s suite is executed, the exception is stored +in the "sys" module, where it can be accessed from within the body of +the "except" clause by calling "sys.exception()". When leaving an +exception handler, the exception stored in the "sys" module is reset +to its previous value: + + >>> print(sys.exception()) + None + >>> try: + ... raise TypeError + ... except: + ... print(repr(sys.exception())) + ... try: + ... raise ValueError + ... except: + ... print(repr(sys.exception())) + ... print(repr(sys.exception())) + ... + TypeError() + ValueError() + TypeError() + >>> print(sys.exception()) + None + + +"except*" clause +---------------- + +The "except*" clause(s) are used for handling "ExceptionGroup"s. The +exception type for matching is interpreted as in the case of "except", +but in the case of exception groups we can have partial matches when +the type matches some of the exceptions in the group. This means that +multiple "except*" clauses can execute, each handling part of the +exception group. Each clause executes at most once and handles an +exception group of all matching exceptions. Each exception in the +group is handled by at most one "except*" clause, the first that +matches it. + + >>> try: + ... raise ExceptionGroup("eg", + ... [ValueError(1), TypeError(2), OSError(3), OSError(4)]) + ... except* TypeError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... except* OSError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... + caught with nested (TypeError(2),) + caught with nested (OSError(3), OSError(4)) + + Exception Group Traceback (most recent call last): + | File "", line 2, in + | ExceptionGroup: eg + +-+---------------- 1 ---------------- + | ValueError: 1 + +------------------------------------ + +Any remaining exceptions that were not handled by any "except*" clause +are re-raised at the end, along with all exceptions that were raised +from within the "except*" clauses. If this list contains more than one +exception to reraise, they are combined into an exception group. + +If the raised exception is not an exception group and its type matches +one of the "except*" clauses, it is caught and wrapped by an exception +group with an empty message string. + + >>> try: + ... raise BlockingIOError + ... except* BlockingIOError as e: + ... print(repr(e)) + ... + ExceptionGroup('', (BlockingIOError())) + +An "except*" clause must have a matching expression; it cannot be +"except*:". Furthermore, this expression cannot contain exception +group types, because that would have ambiguous semantics. + +It is not possible to mix "except" and "except*" in the same "try". +"break", "continue" and "return" cannot appear in an "except*" clause. + + +"else" clause +------------- + +The optional "else" clause is executed if the control flow leaves the +"try" suite, no exception was raised, and no "return", "continue", or +"break" statement was executed. Exceptions in the "else" clause are +not handled by the preceding "except" clauses. + + +"finally" clause +---------------- + +If "finally" is present, it specifies a ‘cleanup’ handler. The "try" +clause is executed, including any "except" and "else" clauses. If an +exception occurs in any of the clauses and is not handled, the +exception is temporarily saved. The "finally" clause is executed. If +there is a saved exception it is re-raised at the end of the "finally" +clause. If the "finally" clause raises another exception, the saved +exception is set as the context of the new exception. If the "finally" +clause executes a "return", "break" or "continue" statement, the saved +exception is discarded: + + >>> def f(): + ... try: + ... 1/0 + ... finally: + ... return 42 + ... + >>> f() + 42 + +The exception information is not available to the program during +execution of the "finally" clause. + +When a "return", "break" or "continue" statement is executed in the +"try" suite of a "try"…"finally" statement, the "finally" clause is +also executed ‘on the way out.’ + +The return value of a function is determined by the last "return" +statement executed. Since the "finally" clause always executes, a +"return" statement executed in the "finally" clause will always be the +last one executed: + + >>> def foo(): + ... try: + ... return 'try' + ... finally: + ... return 'finally' + ... + >>> foo() + 'finally' + +Changed in version 3.8: Prior to Python 3.8, a "continue" statement +was illegal in the "finally" clause due to a problem with the +implementation. + + +The "with" statement +==================== + +The "with" statement is used to wrap the execution of a block with +methods defined by a context manager (see section With Statement +Context Managers). This allows common "try"…"except"…"finally" usage +patterns to be encapsulated for convenient reuse. + + with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | with_stmt_contents ) ":" suite + with_stmt_contents ::= with_item ("," with_item)* + with_item ::= expression ["as" target] + +The execution of the "with" statement with one “item” proceeds as +follows: + +1. The context expression (the expression given in the "with_item") is + evaluated to obtain a context manager. + +2. The context manager’s "__enter__()" is loaded for later use. + +3. The context manager’s "__exit__()" is loaded for later use. + +4. The context manager’s "__enter__()" method is invoked. + +5. If a target was included in the "with" statement, the return value + from "__enter__()" is assigned to it. + + Note: + + The "with" statement guarantees that if the "__enter__()" method + returns without an error, then "__exit__()" will always be + called. Thus, if an error occurs during the assignment to the + target list, it will be treated the same as an error occurring + within the suite would be. See step 7 below. + +6. The suite is executed. + +7. The context manager’s "__exit__()" method is invoked. If an + exception caused the suite to be exited, its type, value, and + traceback are passed as arguments to "__exit__()". Otherwise, three + "None" arguments are supplied. + + If the suite was exited due to an exception, and the return value + from the "__exit__()" method was false, the exception is reraised. + If the return value was true, the exception is suppressed, and + execution continues with the statement following the "with" + statement. + + If the suite was exited for any reason other than an exception, the + return value from "__exit__()" is ignored, and execution proceeds + at the normal location for the kind of exit that was taken. + +The following code: + + with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + enter = type(manager).__enter__ + exit = type(manager).__exit__ + value = enter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not exit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + exit(manager, None, None, None) + +With more than one item, the context managers are processed as if +multiple "with" statements were nested: + + with A() as a, B() as b: + SUITE + +is semantically equivalent to: + + with A() as a: + with B() as b: + SUITE + +You can also write multi-item context managers in multiple lines if +the items are surrounded by parentheses. For example: + + with ( + A() as a, + B() as b, + ): + SUITE + +Changed in version 3.1: Support for multiple context expressions. + +Changed in version 3.10: Support for using grouping parentheses to +break the statement in multiple lines. + +See also: + + **PEP 343** - The “with” statement + The specification, background, and examples for the Python "with" + statement. + + +The "match" statement +===================== + +Added in version 3.10. + +The match statement is used for pattern matching. Syntax: + + match_stmt ::= 'match' subject_expr ":" NEWLINE INDENT case_block+ DEDENT + subject_expr ::= star_named_expression "," star_named_expressions? + | named_expression + case_block ::= 'case' patterns [guard] ":" block + +Note: + + This section uses single quotes to denote soft keywords. + +Pattern matching takes a pattern as input (following "case") and a +subject value (following "match"). The pattern (which may contain +subpatterns) is matched against the subject value. The outcomes are: + +* A match success or failure (also termed a pattern success or + failure). + +* Possible binding of matched values to a name. The prerequisites for + this are further discussed below. + +The "match" and "case" keywords are soft keywords. + +See also: + + * **PEP 634** – Structural Pattern Matching: Specification + + * **PEP 636** – Structural Pattern Matching: Tutorial + + +Overview +-------- + +Here’s an overview of the logical flow of a match statement: + +1. The subject expression "subject_expr" is evaluated and a resulting + subject value obtained. If the subject expression contains a comma, + a tuple is constructed using the standard rules. + +2. Each pattern in a "case_block" is attempted to match with the + subject value. The specific rules for success or failure are + described below. The match attempt can also bind some or all of the + standalone names within the pattern. The precise pattern binding + rules vary per pattern type and are specified below. **Name + bindings made during a successful pattern match outlive the + executed block and can be used after the match statement**. + + Note: + + During failed pattern matches, some subpatterns may succeed. Do + not rely on bindings being made for a failed match. Conversely, + do not rely on variables remaining unchanged after a failed + match. The exact behavior is dependent on implementation and may + vary. This is an intentional decision made to allow different + implementations to add optimizations. + +3. If the pattern succeeds, the corresponding guard (if present) is + evaluated. In this case all name bindings are guaranteed to have + happened. + + * If the guard evaluates as true or is missing, the "block" inside + "case_block" is executed. + + * Otherwise, the next "case_block" is attempted as described above. + + * If there are no further case blocks, the match statement is + completed. + +Note: + + Users should generally never rely on a pattern being evaluated. + Depending on implementation, the interpreter may cache values or use + other optimizations which skip repeated evaluations. + +A sample match statement: + + >>> flag = False + >>> match (100, 200): + ... case (100, 300): # Mismatch: 200 != 300 + ... print('Case 1') + ... case (100, 200) if flag: # Successful match, but guard fails + ... print('Case 2') + ... case (100, y): # Matches and binds y to 200 + ... print(f'Case 3, y: {y}') + ... case _: # Pattern not attempted + ... print('Case 4, I match anything!') + ... + Case 3, y: 200 + +In this case, "if flag" is a guard. Read more about that in the next +section. + + +Guards +------ + + guard ::= "if" named_expression + +A "guard" (which is part of the "case") must succeed for code inside +the "case" block to execute. It takes the form: "if" followed by an +expression. + +The logical flow of a "case" block with a "guard" follows: + +1. Check that the pattern in the "case" block succeeded. If the + pattern failed, the "guard" is not evaluated and the next "case" + block is checked. + +2. If the pattern succeeded, evaluate the "guard". + + * If the "guard" condition evaluates as true, the case block is + selected. + + * If the "guard" condition evaluates as false, the case block is + not selected. + + * If the "guard" raises an exception during evaluation, the + exception bubbles up. + +Guards are allowed to have side effects as they are expressions. +Guard evaluation must proceed from the first to the last case block, +one at a time, skipping case blocks whose pattern(s) don’t all +succeed. (I.e., guard evaluation must happen in order.) Guard +evaluation must stop once a case block is selected. + + +Irrefutable Case Blocks +----------------------- + +An irrefutable case block is a match-all case block. A match +statement may have at most one irrefutable case block, and it must be +last. + +A case block is considered irrefutable if it has no guard and its +pattern is irrefutable. A pattern is considered irrefutable if we can +prove from its syntax alone that it will always succeed. Only the +following patterns are irrefutable: + +* AS Patterns whose left-hand side is irrefutable + +* OR Patterns containing at least one irrefutable pattern + +* Capture Patterns + +* Wildcard Patterns + +* parenthesized irrefutable patterns + + +Patterns +-------- + +Note: + + This section uses grammar notations beyond standard EBNF: + + * the notation "SEP.RULE+" is shorthand for "RULE (SEP RULE)*" + + * the notation "!RULE" is shorthand for a negative lookahead + assertion + +The top-level syntax for "patterns" is: + + patterns ::= open_sequence_pattern | pattern + pattern ::= as_pattern | or_pattern + closed_pattern ::= | literal_pattern + | capture_pattern + | wildcard_pattern + | value_pattern + | group_pattern + | sequence_pattern + | mapping_pattern + | class_pattern + +The descriptions below will include a description “in simple terms” of +what a pattern does for illustration purposes (credits to Raymond +Hettinger for a document that inspired most of the descriptions). Note +that these descriptions are purely for illustration purposes and **may +not** reflect the underlying implementation. Furthermore, they do not +cover all valid forms. + + +OR Patterns +~~~~~~~~~~~ + +An OR pattern is two or more patterns separated by vertical bars "|". +Syntax: + + or_pattern ::= "|".closed_pattern+ + +Only the final subpattern may be irrefutable, and each subpattern must +bind the same set of names to avoid ambiguity. + +An OR pattern matches each of its subpatterns in turn to the subject +value, until one succeeds. The OR pattern is then considered +successful. Otherwise, if none of the subpatterns succeed, the OR +pattern fails. + +In simple terms, "P1 | P2 | ..." will try to match "P1", if it fails +it will try to match "P2", succeeding immediately if any succeeds, +failing otherwise. + + +AS Patterns +~~~~~~~~~~~ + +An AS pattern matches an OR pattern on the left of the "as" keyword +against a subject. Syntax: + + as_pattern ::= or_pattern "as" capture_pattern + +If the OR pattern fails, the AS pattern fails. Otherwise, the AS +pattern binds the subject to the name on the right of the as keyword +and succeeds. "capture_pattern" cannot be a "_". + +In simple terms "P as NAME" will match with "P", and on success it +will set "NAME = ". + + +Literal Patterns +~~~~~~~~~~~~~~~~ + +A literal pattern corresponds to most literals in Python. Syntax: + + literal_pattern ::= signed_number + | signed_number "+" NUMBER + | signed_number "-" NUMBER + | strings + | "None" + | "True" + | "False" + signed_number ::= ["-"] NUMBER + +The rule "strings" and the token "NUMBER" are defined in the standard +Python grammar. Triple-quoted strings are supported. Raw strings and +byte strings are supported. f-strings are not supported. + +The forms "signed_number '+' NUMBER" and "signed_number '-' NUMBER" +are for expressing complex numbers; they require a real number on the +left and an imaginary number on the right. E.g. "3 + 4j". + +In simple terms, "LITERAL" will succeed only if " == +LITERAL". For the singletons "None", "True" and "False", the "is" +operator is used. + + +Capture Patterns +~~~~~~~~~~~~~~~~ + +A capture pattern binds the subject value to a name. Syntax: + + capture_pattern ::= !'_' NAME + +A single underscore "_" is not a capture pattern (this is what "!'_'" +expresses). It is instead treated as a "wildcard_pattern". + +In a given pattern, a given name can only be bound once. E.g. "case +x, x: ..." is invalid while "case [x] | x: ..." is allowed. + +Capture patterns always succeed. The binding follows scoping rules +established by the assignment expression operator in **PEP 572**; the +name becomes a local variable in the closest containing function scope +unless there’s an applicable "global" or "nonlocal" statement. + +In simple terms "NAME" will always succeed and it will set "NAME = +". + + +Wildcard Patterns +~~~~~~~~~~~~~~~~~ + +A wildcard pattern always succeeds (matches anything) and binds no +name. Syntax: + + wildcard_pattern ::= '_' + +"_" is a soft keyword within any pattern, but only within patterns. +It is an identifier, as usual, even within "match" subject +expressions, "guard"s, and "case" blocks. + +In simple terms, "_" will always succeed. + + +Value Patterns +~~~~~~~~~~~~~~ + +A value pattern represents a named value in Python. Syntax: + + value_pattern ::= attr + attr ::= name_or_attr "." NAME + name_or_attr ::= attr | NAME + +The dotted name in the pattern is looked up using standard Python name +resolution rules. The pattern succeeds if the value found compares +equal to the subject value (using the "==" equality operator). + +In simple terms "NAME1.NAME2" will succeed only if " == +NAME1.NAME2" + +Note: + + If the same value occurs multiple times in the same match statement, + the interpreter may cache the first value found and reuse it rather + than repeat the same lookup. This cache is strictly tied to a given + execution of a given match statement. + + +Group Patterns +~~~~~~~~~~~~~~ + +A group pattern allows users to add parentheses around patterns to +emphasize the intended grouping. Otherwise, it has no additional +syntax. Syntax: + + group_pattern ::= "(" pattern ")" + +In simple terms "(P)" has the same effect as "P". + + +Sequence Patterns +~~~~~~~~~~~~~~~~~ + +A sequence pattern contains several subpatterns to be matched against +sequence elements. The syntax is similar to the unpacking of a list or +tuple. + + sequence_pattern ::= "[" [maybe_sequence_pattern] "]" + | "(" [open_sequence_pattern] ")" + open_sequence_pattern ::= maybe_star_pattern "," [maybe_sequence_pattern] + maybe_sequence_pattern ::= ",".maybe_star_pattern+ ","? + maybe_star_pattern ::= star_pattern | pattern + star_pattern ::= "*" (capture_pattern | wildcard_pattern) + +There is no difference if parentheses or square brackets are used for +sequence patterns (i.e. "(...)" vs "[...]" ). + +Note: + + A single pattern enclosed in parentheses without a trailing comma + (e.g. "(3 | 4)") is a group pattern. While a single pattern enclosed + in square brackets (e.g. "[3 | 4]") is still a sequence pattern. + +At most one star subpattern may be in a sequence pattern. The star +subpattern may occur in any position. If no star subpattern is +present, the sequence pattern is a fixed-length sequence pattern; +otherwise it is a variable-length sequence pattern. + +The following is the logical flow for matching a sequence pattern +against a subject value: + +1. If the subject value is not a sequence [2], the sequence pattern + fails. + +2. If the subject value is an instance of "str", "bytes" or + "bytearray" the sequence pattern fails. + +3. The subsequent steps depend on whether the sequence pattern is + fixed or variable-length. + + If the sequence pattern is fixed-length: + + 1. If the length of the subject sequence is not equal to the number + of subpatterns, the sequence pattern fails + + 2. Subpatterns in the sequence pattern are matched to their + corresponding items in the subject sequence from left to right. + Matching stops as soon as a subpattern fails. If all + subpatterns succeed in matching their corresponding item, the + sequence pattern succeeds. + + Otherwise, if the sequence pattern is variable-length: + + 1. If the length of the subject sequence is less than the number of + non-star subpatterns, the sequence pattern fails. + + 2. The leading non-star subpatterns are matched to their + corresponding items as for fixed-length sequences. + + 3. If the previous step succeeds, the star subpattern matches a + list formed of the remaining subject items, excluding the + remaining items corresponding to non-star subpatterns following + the star subpattern. + + 4. Remaining non-star subpatterns are matched to their + corresponding subject items, as for a fixed-length sequence. + + Note: + + The length of the subject sequence is obtained via "len()" (i.e. + via the "__len__()" protocol). This length may be cached by the + interpreter in a similar manner as value patterns. + +In simple terms "[P1, P2, P3," … ", P]" matches only if all the +following happens: + +* check "" is a sequence + +* "len(subject) == " + +* "P1" matches "[0]" (note that this match can also bind + names) + +* "P2" matches "[1]" (note that this match can also bind + names) + +* … and so on for the corresponding pattern/element. + + +Mapping Patterns +~~~~~~~~~~~~~~~~ + +A mapping pattern contains one or more key-value patterns. The syntax +is similar to the construction of a dictionary. Syntax: + + mapping_pattern ::= "{" [items_pattern] "}" + items_pattern ::= ",".key_value_pattern+ ","? + key_value_pattern ::= (literal_pattern | value_pattern) ":" pattern + | double_star_pattern + double_star_pattern ::= "**" capture_pattern + +At most one double star pattern may be in a mapping pattern. The +double star pattern must be the last subpattern in the mapping +pattern. + +Duplicate keys in mapping patterns are disallowed. Duplicate literal +keys will raise a "SyntaxError". Two keys that otherwise have the same +value will raise a "ValueError" at runtime. + +The following is the logical flow for matching a mapping pattern +against a subject value: + +1. If the subject value is not a mapping [3],the mapping pattern + fails. + +2. If every key given in the mapping pattern is present in the subject + mapping, and the pattern for each key matches the corresponding + item of the subject mapping, the mapping pattern succeeds. + +3. If duplicate keys are detected in the mapping pattern, the pattern + is considered invalid. A "SyntaxError" is raised for duplicate + literal values; or a "ValueError" for named keys of the same value. + +Note: + + Key-value pairs are matched using the two-argument form of the + mapping subject’s "get()" method. Matched key-value pairs must + already be present in the mapping, and not created on-the-fly via + "__missing__()" or "__getitem__()". + +In simple terms "{KEY1: P1, KEY2: P2, ... }" matches only if all the +following happens: + +* check "" is a mapping + +* "KEY1 in " + +* "P1" matches "[KEY1]" + +* … and so on for the corresponding KEY/pattern pair. + + +Class Patterns +~~~~~~~~~~~~~~ + +A class pattern represents a class and its positional and keyword +arguments (if any). Syntax: + + class_pattern ::= name_or_attr "(" [pattern_arguments ","?] ")" + pattern_arguments ::= positional_patterns ["," keyword_patterns] + | keyword_patterns + positional_patterns ::= ",".pattern+ + keyword_patterns ::= ",".keyword_pattern+ + keyword_pattern ::= NAME "=" pattern + +The same keyword should not be repeated in class patterns. + +The following is the logical flow for matching a class pattern against +a subject value: + +1. If "name_or_attr" is not an instance of the builtin "type" , raise + "TypeError". + +2. If the subject value is not an instance of "name_or_attr" (tested + via "isinstance()"), the class pattern fails. + +3. If no pattern arguments are present, the pattern succeeds. + Otherwise, the subsequent steps depend on whether keyword or + positional argument patterns are present. + + For a number of built-in types (specified below), a single + positional subpattern is accepted which will match the entire + subject; for these types keyword patterns also work as for other + types. + + If only keyword patterns are present, they are processed as + follows, one by one: + + I. The keyword is looked up as an attribute on the subject. + + * If this raises an exception other than "AttributeError", the + exception bubbles up. + + * If this raises "AttributeError", the class pattern has failed. + + * Else, the subpattern associated with the keyword pattern is + matched against the subject’s attribute value. If this fails, + the class pattern fails; if this succeeds, the match proceeds + to the next keyword. + + II. If all keyword patterns succeed, the class pattern succeeds. + + If any positional patterns are present, they are converted to + keyword patterns using the "__match_args__" attribute on the class + "name_or_attr" before matching: + + I. The equivalent of "getattr(cls, "__match_args__", ())" is + called. + + * If this raises an exception, the exception bubbles up. + + * If the returned value is not a tuple, the conversion fails and + "TypeError" is raised. + + * If there are more positional patterns than + "len(cls.__match_args__)", "TypeError" is raised. + + * Otherwise, positional pattern "i" is converted to a keyword + pattern using "__match_args__[i]" as the keyword. + "__match_args__[i]" must be a string; if not "TypeError" is + raised. + + * If there are duplicate keywords, "TypeError" is raised. + + See also: + + Customizing positional arguments in class pattern matching + + II. Once all positional patterns have been converted to keyword + patterns, + the match proceeds as if there were only keyword patterns. + + For the following built-in types the handling of positional + subpatterns is different: + + * "bool" + + * "bytearray" + + * "bytes" + + * "dict" + + * "float" + + * "frozenset" + + * "int" + + * "list" + + * "set" + + * "str" + + * "tuple" + + These classes accept a single positional argument, and the pattern + there is matched against the whole object rather than an attribute. + For example "int(0|1)" matches the value "0", but not the value + "0.0". + +In simple terms "CLS(P1, attr=P2)" matches only if the following +happens: + +* "isinstance(, CLS)" + +* convert "P1" to a keyword pattern using "CLS.__match_args__" + +* For each keyword argument "attr=P2": + + * "hasattr(, "attr")" + + * "P2" matches ".attr" + +* … and so on for the corresponding keyword argument/pattern pair. + +See also: + + * **PEP 634** – Structural Pattern Matching: Specification + + * **PEP 636** – Structural Pattern Matching: Tutorial + + +Function definitions +==================== + +A function definition defines a user-defined function object (see +section The standard type hierarchy): + + funcdef ::= [decorators] "def" funcname [type_params] "(" [parameter_list] ")" + ["->" expression] ":" suite + decorators ::= decorator+ + decorator ::= "@" assignment_expression NEWLINE + parameter_list ::= defparameter ("," defparameter)* "," "/" ["," [parameter_list_no_posonly]] + | parameter_list_no_posonly + parameter_list_no_posonly ::= defparameter ("," defparameter)* ["," [parameter_list_starargs]] + | parameter_list_starargs + parameter_list_starargs ::= "*" [star_parameter] ("," defparameter)* ["," ["**" parameter [","]]] + | "**" parameter [","] + parameter ::= identifier [":" expression] + star_parameter ::= identifier [":" ["*"] expression] + defparameter ::= parameter ["=" expression] + funcname ::= identifier + +A function definition is an executable statement. Its execution binds +the function name in the current local namespace to a function object +(a wrapper around the executable code for the function). This +function object contains a reference to the current global namespace +as the global namespace to be used when the function is called. + +The function definition does not execute the function body; this gets +executed only when the function is called. [4] + +A function definition may be wrapped by one or more *decorator* +expressions. Decorator expressions are evaluated when the function is +defined, in the scope that contains the function definition. The +result must be a callable, which is invoked with the function object +as the only argument. The returned value is bound to the function name +instead of the function object. Multiple decorators are applied in +nested fashion. For example, the following code + + @f1(arg) + @f2 + def func(): pass + +is roughly equivalent to + + def func(): pass + func = f1(arg)(f2(func)) + +except that the original function is not temporarily bound to the name +"func". + +Changed in version 3.9: Functions may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets between the +function’s name and the opening parenthesis for its parameter list. +This indicates to static type checkers that the function is generic. +At runtime, the type parameters can be retrieved from the function’s +"__type_params__" attribute. See Generic functions for more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +When one or more *parameters* have the form *parameter* "=" +*expression*, the function is said to have “default parameter values.” +For a parameter with a default value, the corresponding *argument* may +be omitted from a call, in which case the parameter’s default value is +substituted. If a parameter has a default value, all following +parameters up until the “"*"” must also have a default value — this is +a syntactic restriction that is not expressed by the grammar. + +**Default parameter values are evaluated from left to right when the +function definition is executed.** This means that the expression is +evaluated once, when the function is defined, and that the same “pre- +computed” value is used for each call. This is especially important +to understand when a default parameter value is a mutable object, such +as a list or a dictionary: if the function modifies the object (e.g. +by appending an item to a list), the default parameter value is in +effect modified. This is generally not what was intended. A way +around this is to use "None" as the default, and explicitly test for +it in the body of the function, e.g.: + + def whats_on_the_telly(penguin=None): + if penguin is None: + penguin = [] + penguin.append("property of the zoo") + return penguin + +Function call semantics are described in more detail in section Calls. +A function call always assigns values to all parameters mentioned in +the parameter list, either from positional arguments, from keyword +arguments, or from default values. If the form “"*identifier"” is +present, it is initialized to a tuple receiving any excess positional +parameters, defaulting to the empty tuple. If the form +“"**identifier"” is present, it is initialized to a new ordered +mapping receiving any excess keyword arguments, defaulting to a new +empty mapping of the same type. Parameters after “"*"” or +“"*identifier"” are keyword-only parameters and may only be passed by +keyword arguments. Parameters before “"/"” are positional-only +parameters and may only be passed by positional arguments. + +Changed in version 3.8: The "/" function parameter syntax may be used +to indicate positional-only parameters. See **PEP 570** for details. + +Parameters may have an *annotation* of the form “": expression"” +following the parameter name. Any parameter may have an annotation, +even those of the form "*identifier" or "**identifier". (As a special +case, parameters of the form "*identifier" may have an annotation “": +*expression"”.) Functions may have “return” annotation of the form +“"-> expression"” after the parameter list. These annotations can be +any valid Python expression. The presence of annotations does not +change the semantics of a function. See Annotations for more +information on annotations. + +Changed in version 3.11: Parameters of the form “"*identifier"” may +have an annotation “": *expression"”. See **PEP 646**. + +It is also possible to create anonymous functions (functions not bound +to a name), for immediate use in expressions. This uses lambda +expressions, described in section Lambdas. Note that the lambda +expression is merely a shorthand for a simplified function definition; +a function defined in a “"def"” statement can be passed around or +assigned to another name just like a function defined by a lambda +expression. The “"def"” form is actually more powerful since it +allows the execution of multiple statements and annotations. + +**Programmer’s note:** Functions are first-class objects. A “"def"” +statement executed inside a function definition defines a local +function that can be returned or passed around. Free variables used +in the nested function can access the local variables of the function +containing the def. See section Naming and binding for details. + +See also: + + **PEP 3107** - Function Annotations + The original specification for function annotations. + + **PEP 484** - Type Hints + Definition of a standard meaning for annotations: type hints. + + **PEP 526** - Syntax for Variable Annotations + Ability to type hint variable declarations, including class + variables and instance variables. + + **PEP 563** - Postponed Evaluation of Annotations + Support for forward references within annotations by preserving + annotations in a string form at runtime instead of eager + evaluation. + + **PEP 318** - Decorators for Functions and Methods + Function and method decorators were introduced. Class decorators + were introduced in **PEP 3129**. + + +Class definitions +================= + +A class definition defines a class object (see section The standard +type hierarchy): + + classdef ::= [decorators] "class" classname [type_params] [inheritance] ":" suite + inheritance ::= "(" [argument_list] ")" + classname ::= identifier + +A class definition is an executable statement. The inheritance list +usually gives a list of base classes (see Metaclasses for more +advanced uses), so each item in the list should evaluate to a class +object which allows subclassing. Classes without an inheritance list +inherit, by default, from the base class "object"; hence, + + class Foo: + pass + +is equivalent to + + class Foo(object): + pass + +The class’s suite is then executed in a new execution frame (see +Naming and binding), using a newly created local namespace and the +original global namespace. (Usually, the suite contains mostly +function definitions.) When the class’s suite finishes execution, its +execution frame is discarded but its local namespace is saved. [5] A +class object is then created using the inheritance list for the base +classes and the saved local namespace for the attribute dictionary. +The class name is bound to this class object in the original local +namespace. + +The order in which attributes are defined in the class body is +preserved in the new class’s "__dict__". Note that this is reliable +only right after the class is created and only for classes that were +defined using the definition syntax. + +Class creation can be customized heavily using metaclasses. + +Classes can also be decorated: just like when decorating functions, + + @f1(arg) + @f2 + class Foo: pass + +is roughly equivalent to + + class Foo: pass + Foo = f1(arg)(f2(Foo)) + +The evaluation rules for the decorator expressions are the same as for +function decorators. The result is then bound to the class name. + +Changed in version 3.9: Classes may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets immediately +after the class’s name. This indicates to static type checkers that +the class is generic. At runtime, the type parameters can be retrieved +from the class’s "__type_params__" attribute. See Generic classes for +more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +**Programmer’s note:** Variables defined in the class definition are +class attributes; they are shared by instances. Instance attributes +can be set in a method with "self.name = value". Both class and +instance attributes are accessible through the notation “"self.name"”, +and an instance attribute hides a class attribute with the same name +when accessed in this way. Class attributes can be used as defaults +for instance attributes, but using mutable values there can lead to +unexpected results. Descriptors can be used to create instance +variables with different implementation details. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + The proposal that changed the declaration of metaclasses to the + current syntax, and the semantics for how classes with + metaclasses are constructed. + + **PEP 3129** - Class Decorators + The proposal that added class decorators. Function and method + decorators were introduced in **PEP 318**. + + +Coroutines +========== + +Added in version 3.5. + + +Coroutine function definition +----------------------------- + + async_funcdef ::= [decorators] "async" "def" funcname "(" [parameter_list] ")" + ["->" expression] ":" suite + +Execution of Python coroutines can be suspended and resumed at many +points (see *coroutine*). "await" expressions, "async for" and "async +with" can only be used in the body of a coroutine function. + +Functions defined with "async def" syntax are always coroutine +functions, even if they do not contain "await" or "async" keywords. + +It is a "SyntaxError" to use a "yield from" expression inside the body +of a coroutine function. + +An example of a coroutine function: + + async def func(param1, param2): + do_stuff() + await some_coroutine() + +Changed in version 3.7: "await" and "async" are now keywords; +previously they were only treated as such inside the body of a +coroutine function. + + +The "async for" statement +------------------------- + + async_for_stmt ::= "async" for_stmt + +An *asynchronous iterable* provides an "__aiter__" method that +directly returns an *asynchronous iterator*, which can call +asynchronous code in its "__anext__" method. + +The "async for" statement allows convenient iteration over +asynchronous iterables. + +The following code: + + async for TARGET in ITER: + SUITE + else: + SUITE2 + +Is semantically equivalent to: + + iter = (ITER) + iter = type(iter).__aiter__(iter) + running = True + + while running: + try: + TARGET = await type(iter).__anext__(iter) + except StopAsyncIteration: + running = False + else: + SUITE + else: + SUITE2 + +See also "__aiter__()" and "__anext__()" for details. + +It is a "SyntaxError" to use an "async for" statement outside the body +of a coroutine function. + + +The "async with" statement +-------------------------- + + async_with_stmt ::= "async" with_stmt + +An *asynchronous context manager* is a *context manager* that is able +to suspend execution in its *enter* and *exit* methods. + +The following code: + + async with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + aenter = type(manager).__aenter__ + aexit = type(manager).__aexit__ + value = await aenter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not await aexit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + await aexit(manager, None, None, None) + +See also "__aenter__()" and "__aexit__()" for details. + +It is a "SyntaxError" to use an "async with" statement outside the +body of a coroutine function. + +See also: + + **PEP 492** - Coroutines with async and await syntax + The proposal that made coroutines a proper standalone concept in + Python, and added supporting syntax. + + +Type parameter lists +==================== + +Added in version 3.12. + +Changed in version 3.13: Support for default values was added (see +**PEP 696**). + + type_params ::= "[" type_param ("," type_param)* "]" + type_param ::= typevar | typevartuple | paramspec + typevar ::= identifier (":" expression)? ("=" expression)? + typevartuple ::= "*" identifier ("=" expression)? + paramspec ::= "**" identifier ("=" expression)? + +Functions (including coroutines), classes and type aliases may contain +a type parameter list: + + def max[T](args: list[T]) -> T: + ... + + async def amax[T](args: list[T]) -> T: + ... + + class Bag[T]: + def __iter__(self) -> Iterator[T]: + ... + + def add(self, arg: T) -> None: + ... + + type ListOrSet[T] = list[T] | set[T] + +Semantically, this indicates that the function, class, or type alias +is generic over a type variable. This information is primarily used by +static type checkers, and at runtime, generic objects behave much like +their non-generic counterparts. + +Type parameters are declared in square brackets ("[]") immediately +after the name of the function, class, or type alias. The type +parameters are accessible within the scope of the generic object, but +not elsewhere. Thus, after a declaration "def func[T](): pass", the +name "T" is not available in the module scope. Below, the semantics of +generic objects are described with more precision. The scope of type +parameters is modeled with a special function (technically, an +annotation scope) that wraps the creation of the generic object. + +Generic functions, classes, and type aliases have a "__type_params__" +attribute listing their type parameters. + +Type parameters come in three kinds: + +* "typing.TypeVar", introduced by a plain name (e.g., "T"). + Semantically, this represents a single type to a type checker. + +* "typing.TypeVarTuple", introduced by a name prefixed with a single + asterisk (e.g., "*Ts"). Semantically, this stands for a tuple of any + number of types. + +* "typing.ParamSpec", introduced by a name prefixed with two asterisks + (e.g., "**P"). Semantically, this stands for the parameters of a + callable. + +"typing.TypeVar" declarations can define *bounds* and *constraints* +with a colon (":") followed by an expression. A single expression +after the colon indicates a bound (e.g. "T: int"). Semantically, this +means that the "typing.TypeVar" can only represent types that are a +subtype of this bound. A parenthesized tuple of expressions after the +colon indicates a set of constraints (e.g. "T: (str, bytes)"). Each +member of the tuple should be a type (again, this is not enforced at +runtime). Constrained type variables can only take on one of the types +in the list of constraints. + +For "typing.TypeVar"s declared using the type parameter list syntax, +the bound and constraints are not evaluated when the generic object is +created, but only when the value is explicitly accessed through the +attributes "__bound__" and "__constraints__". To accomplish this, the +bounds or constraints are evaluated in a separate annotation scope. + +"typing.TypeVarTuple"s and "typing.ParamSpec"s cannot have bounds or +constraints. + +All three flavors of type parameters can also have a *default value*, +which is used when the type parameter is not explicitly provided. This +is added by appending a single equals sign ("=") followed by an +expression. Like the bounds and constraints of type variables, the +default value is not evaluated when the object is created, but only +when the type parameter’s "__default__" attribute is accessed. To this +end, the default value is evaluated in a separate annotation scope. If +no default value is specified for a type parameter, the "__default__" +attribute is set to the special sentinel object "typing.NoDefault". + +The following example indicates the full set of allowed type parameter +declarations: + + def overly_generic[ + SimpleTypeVar, + TypeVarWithDefault = int, + TypeVarWithBound: int, + TypeVarWithConstraints: (str, bytes), + *SimpleTypeVarTuple = (int, float), + **SimpleParamSpec = (str, bytearray), + ]( + a: SimpleTypeVar, + b: TypeVarWithDefault, + c: TypeVarWithBound, + d: Callable[SimpleParamSpec, TypeVarWithConstraints], + *e: SimpleTypeVarTuple, + ): ... + + +Generic functions +----------------- + +Generic functions are declared as follows: + + def func[T](arg: T): ... + +This syntax is equivalent to: + + annotation-def TYPE_PARAMS_OF_func(): + T = typing.TypeVar("T") + def func(arg: T): ... + func.__type_params__ = (T,) + return func + func = TYPE_PARAMS_OF_func() + +Here "annotation-def" indicates an annotation scope, which is not +actually bound to any name at runtime. (One other liberty is taken in +the translation: the syntax does not go through attribute access on +the "typing" module, but creates an instance of "typing.TypeVar" +directly.) + +The annotations of generic functions are evaluated within the +annotation scope used for declaring the type parameters, but the +function’s defaults and decorators are not. + +The following example illustrates the scoping rules for these cases, +as well as for additional flavors of type parameters: + + @decorator + def func[T: int, *Ts, **P](*args: *Ts, arg: Callable[P, T] = some_default): + ... + +Except for the lazy evaluation of the "TypeVar" bound, this is +equivalent to: + + DEFAULT_OF_arg = some_default + + annotation-def TYPE_PARAMS_OF_func(): + + annotation-def BOUND_OF_T(): + return int + # In reality, BOUND_OF_T() is evaluated only on demand. + T = typing.TypeVar("T", bound=BOUND_OF_T()) + + Ts = typing.TypeVarTuple("Ts") + P = typing.ParamSpec("P") + + def func(*args: *Ts, arg: Callable[P, T] = DEFAULT_OF_arg): + ... + + func.__type_params__ = (T, Ts, P) + return func + func = decorator(TYPE_PARAMS_OF_func()) + +The capitalized names like "DEFAULT_OF_arg" are not actually bound at +runtime. + + +Generic classes +--------------- + +Generic classes are declared as follows: + + class Bag[T]: ... + +This syntax is equivalent to: + + annotation-def TYPE_PARAMS_OF_Bag(): + T = typing.TypeVar("T") + class Bag(typing.Generic[T]): + __type_params__ = (T,) + ... + return Bag + Bag = TYPE_PARAMS_OF_Bag() + +Here again "annotation-def" (not a real keyword) indicates an +annotation scope, and the name "TYPE_PARAMS_OF_Bag" is not actually +bound at runtime. + +Generic classes implicitly inherit from "typing.Generic". The base +classes and keyword arguments of generic classes are evaluated within +the type scope for the type parameters, and decorators are evaluated +outside that scope. This is illustrated by this example: + + @decorator + class Bag(Base[T], arg=T): ... + +This is equivalent to: + + annotation-def TYPE_PARAMS_OF_Bag(): + T = typing.TypeVar("T") + class Bag(Base[T], typing.Generic[T], arg=T): + __type_params__ = (T,) + ... + return Bag + Bag = decorator(TYPE_PARAMS_OF_Bag()) + + +Generic type aliases +-------------------- + +The "type" statement can also be used to create a generic type alias: + + type ListOrSet[T] = list[T] | set[T] + +Except for the lazy evaluation of the value, this is equivalent to: + + annotation-def TYPE_PARAMS_OF_ListOrSet(): + T = typing.TypeVar("T") + + annotation-def VALUE_OF_ListOrSet(): + return list[T] | set[T] + # In reality, the value is lazily evaluated + return typing.TypeAliasType("ListOrSet", VALUE_OF_ListOrSet(), type_params=(T,)) + ListOrSet = TYPE_PARAMS_OF_ListOrSet() + +Here, "annotation-def" (not a real keyword) indicates an annotation +scope. The capitalized names like "TYPE_PARAMS_OF_ListOrSet" are not +actually bound at runtime. + + +Annotations +=========== + +Changed in version 3.14: Annotations are now lazily evaluated by +default. + +Variables and function parameters may carry *annotations*, created by +adding a colon after the name, followed by an expression: + + x: annotation = 1 + def f(param: annotation): ... + +Functions may also carry a return annotation following an arrow: + + def f() -> annotation: ... + +Annotations are conventionally used for *type hints*, but this is not +enforced by the language, and in general annotations may contain +arbitrary expressions. The presence of annotations does not change the +runtime semantics of the code, except if some mechanism is used that +introspects and uses the annotations (such as "dataclasses" or +"functools.singledispatch()"). + +By default, annotations are lazily evaluated in a annotation scope. +This means that they are not evaluated when the code containing the +annotation is evaluated. Instead, the interpreter saves information +that can be used to evaluate the annotation later if requested. The +"annotationlib" module provides tools for evaluating annotations. + +If the future statement "from __future__ import annotations" is +present, all annotations are instead stored as strings: + + >>> from __future__ import annotations + >>> def f(param: annotation): ... + >>> f.__annotations__ + {'param': 'annotation'} + +-[ Footnotes ]- + +[1] The exception is propagated to the invocation stack unless there + is a "finally" clause which happens to raise another exception. + That new exception causes the old one to be lost. + +[2] In pattern matching, a sequence is defined as one of the + following: + + * a class that inherits from "collections.abc.Sequence" + + * a Python class that has been registered as + "collections.abc.Sequence" + + * a builtin class that has its (CPython) "Py_TPFLAGS_SEQUENCE" bit + set + + * a class that inherits from any of the above + + The following standard library classes are sequences: + + * "array.array" + + * "collections.deque" + + * "list" + + * "memoryview" + + * "range" + + * "tuple" + + Note: + + Subject values of type "str", "bytes", and "bytearray" do not + match sequence patterns. + +[3] In pattern matching, a mapping is defined as one of the following: + + * a class that inherits from "collections.abc.Mapping" + + * a Python class that has been registered as + "collections.abc.Mapping" + + * a builtin class that has its (CPython) "Py_TPFLAGS_MAPPING" bit + set + + * a class that inherits from any of the above + + The standard library classes "dict" and "types.MappingProxyType" + are mappings. + +[4] A string literal appearing as the first statement in the function + body is transformed into the function’s "__doc__" attribute and + therefore the function’s *docstring*. + +[5] A string literal appearing as the first statement in the class + body is transformed into the namespace’s "__doc__" item and + therefore the class’s *docstring*. +''', + 'context-managers': r'''With Statement Context Managers +******************************* + +A *context manager* is an object that defines the runtime context to +be established when executing a "with" statement. The context manager +handles the entry into, and the exit from, the desired runtime context +for the execution of the block of code. Context managers are normally +invoked using the "with" statement (described in section The with +statement), but can also be used by directly invoking their methods. + +Typical uses of context managers include saving and restoring various +kinds of global state, locking and unlocking resources, closing opened +files, etc. + +For more information on context managers, see Context Manager Types. +The "object" class itself does not provide the context manager +methods. + +object.__enter__(self) + + Enter the runtime context related to this object. The "with" + statement will bind this method’s return value to the target(s) + specified in the "as" clause of the statement, if any. + +object.__exit__(self, exc_type, exc_value, traceback) + + Exit the runtime context related to this object. The parameters + describe the exception that caused the context to be exited. If the + context was exited without an exception, all three arguments will + be "None". + + If an exception is supplied, and the method wishes to suppress the + exception (i.e., prevent it from being propagated), it should + return a true value. Otherwise, the exception will be processed + normally upon exit from this method. + + Note that "__exit__()" methods should not reraise the passed-in + exception; this is the caller’s responsibility. + +See also: + + **PEP 343** - The “with” statement + The specification, background, and examples for the Python "with" + statement. +''', + 'continue': r'''The "continue" statement +************************ + + continue_stmt ::= "continue" + +"continue" may only occur syntactically nested in a "for" or "while" +loop, but not nested in a function or class definition within that +loop. It continues with the next cycle of the nearest enclosing loop. + +When "continue" passes control out of a "try" statement with a +"finally" clause, that "finally" clause is executed before really +starting the next loop cycle. +''', + 'conversions': r'''Arithmetic conversions +********************** + +When a description of an arithmetic operator below uses the phrase +“the numeric arguments are converted to a common real type”, this +means that the operator implementation for built-in types works as +follows: + +* If both arguments are complex numbers, no conversion is performed; + +* if either argument is a complex or a floating-point number, the + other is converted to a floating-point number; + +* otherwise, both must be integers and no conversion is necessary. + +Some additional rules apply for certain operators (e.g., a string as a +left argument to the ‘%’ operator). Extensions must define their own +conversion behavior. +''', + 'customization': r'''Basic customization +******************* + +object.__new__(cls[, ...]) + + Called to create a new instance of class *cls*. "__new__()" is a + static method (special-cased so you need not declare it as such) + that takes the class of which an instance was requested as its + first argument. The remaining arguments are those passed to the + object constructor expression (the call to the class). The return + value of "__new__()" should be the new object instance (usually an + instance of *cls*). + + Typical implementations create a new instance of the class by + invoking the superclass’s "__new__()" method using + "super().__new__(cls[, ...])" with appropriate arguments and then + modifying the newly created instance as necessary before returning + it. + + If "__new__()" is invoked during object construction and it returns + an instance of *cls*, then the new instance’s "__init__()" method + will be invoked like "__init__(self[, ...])", where *self* is the + new instance and the remaining arguments are the same as were + passed to the object constructor. + + If "__new__()" does not return an instance of *cls*, then the new + instance’s "__init__()" method will not be invoked. + + "__new__()" is intended mainly to allow subclasses of immutable + types (like int, str, or tuple) to customize instance creation. It + is also commonly overridden in custom metaclasses in order to + customize class creation. + +object.__init__(self[, ...]) + + Called after the instance has been created (by "__new__()"), but + before it is returned to the caller. The arguments are those + passed to the class constructor expression. If a base class has an + "__init__()" method, the derived class’s "__init__()" method, if + any, must explicitly call it to ensure proper initialization of the + base class part of the instance; for example: + "super().__init__([args...])". + + Because "__new__()" and "__init__()" work together in constructing + objects ("__new__()" to create it, and "__init__()" to customize + it), no non-"None" value may be returned by "__init__()"; doing so + will cause a "TypeError" to be raised at runtime. + +object.__del__(self) + + Called when the instance is about to be destroyed. This is also + called a finalizer or (improperly) a destructor. If a base class + has a "__del__()" method, the derived class’s "__del__()" method, + if any, must explicitly call it to ensure proper deletion of the + base class part of the instance. + + It is possible (though not recommended!) for the "__del__()" method + to postpone destruction of the instance by creating a new reference + to it. This is called object *resurrection*. It is + implementation-dependent whether "__del__()" is called a second + time when a resurrected object is about to be destroyed; the + current *CPython* implementation only calls it once. + + It is not guaranteed that "__del__()" methods are called for + objects that still exist when the interpreter exits. + "weakref.finalize" provides a straightforward way to register a + cleanup function to be called when an object is garbage collected. + + Note: + + "del x" doesn’t directly call "x.__del__()" — the former + decrements the reference count for "x" by one, and the latter is + only called when "x"’s reference count reaches zero. + + **CPython implementation detail:** It is possible for a reference + cycle to prevent the reference count of an object from going to + zero. In this case, the cycle will be later detected and deleted + by the *cyclic garbage collector*. A common cause of reference + cycles is when an exception has been caught in a local variable. + The frame’s locals then reference the exception, which references + its own traceback, which references the locals of all frames caught + in the traceback. + + See also: Documentation for the "gc" module. + + Warning: + + Due to the precarious circumstances under which "__del__()" + methods are invoked, exceptions that occur during their execution + are ignored, and a warning is printed to "sys.stderr" instead. + In particular: + + * "__del__()" can be invoked when arbitrary code is being + executed, including from any arbitrary thread. If "__del__()" + needs to take a lock or invoke any other blocking resource, it + may deadlock as the resource may already be taken by the code + that gets interrupted to execute "__del__()". + + * "__del__()" can be executed during interpreter shutdown. As a + consequence, the global variables it needs to access (including + other modules) may already have been deleted or set to "None". + Python guarantees that globals whose name begins with a single + underscore are deleted from their module before other globals + are deleted; if no other references to such globals exist, this + may help in assuring that imported modules are still available + at the time when the "__del__()" method is called. + +object.__repr__(self) + + Called by the "repr()" built-in function to compute the “official” + string representation of an object. If at all possible, this + should look like a valid Python expression that could be used to + recreate an object with the same value (given an appropriate + environment). If this is not possible, a string of the form + "<...some useful description...>" should be returned. The return + value must be a string object. If a class defines "__repr__()" but + not "__str__()", then "__repr__()" is also used when an “informal” + string representation of instances of that class is required. + + This is typically used for debugging, so it is important that the + representation is information-rich and unambiguous. A default + implementation is provided by the "object" class itself. + +object.__str__(self) + + Called by "str(object)", the default "__format__()" implementation, + and the built-in function "print()", to compute the “informal” or + nicely printable string representation of an object. The return + value must be a str object. + + This method differs from "object.__repr__()" in that there is no + expectation that "__str__()" return a valid Python expression: a + more convenient or concise representation can be used. + + The default implementation defined by the built-in type "object" + calls "object.__repr__()". + +object.__bytes__(self) + + Called by bytes to compute a byte-string representation of an + object. This should return a "bytes" object. The "object" class + itself does not provide this method. + +object.__format__(self, format_spec) + + Called by the "format()" built-in function, and by extension, + evaluation of formatted string literals and the "str.format()" + method, to produce a “formatted” string representation of an + object. The *format_spec* argument is a string that contains a + description of the formatting options desired. The interpretation + of the *format_spec* argument is up to the type implementing + "__format__()", however most classes will either delegate + formatting to one of the built-in types, or use a similar + formatting option syntax. + + See Format Specification Mini-Language for a description of the + standard formatting syntax. + + The return value must be a string object. + + The default implementation by the "object" class should be given an + empty *format_spec* string. It delegates to "__str__()". + + Changed in version 3.4: The __format__ method of "object" itself + raises a "TypeError" if passed any non-empty string. + + Changed in version 3.7: "object.__format__(x, '')" is now + equivalent to "str(x)" rather than "format(str(x), '')". + +object.__lt__(self, other) +object.__le__(self, other) +object.__eq__(self, other) +object.__ne__(self, other) +object.__gt__(self, other) +object.__ge__(self, other) + + These are the so-called “rich comparison” methods. The + correspondence between operator symbols and method names is as + follows: "xy" calls + "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)". + + A rich comparison method may return the singleton "NotImplemented" + if it does not implement the operation for a given pair of + arguments. By convention, "False" and "True" are returned for a + successful comparison. However, these methods can return any value, + so if the comparison operator is used in a Boolean context (e.g., + in the condition of an "if" statement), Python will call "bool()" + on the value to determine if the result is true or false. + + By default, "object" implements "__eq__()" by using "is", returning + "NotImplemented" in the case of a false comparison: "True if x is y + else NotImplemented". For "__ne__()", by default it delegates to + "__eq__()" and inverts the result unless it is "NotImplemented". + There are no other implied relationships among the comparison + operators or default implementations; for example, the truth of + "(x.__hash__". + + If a class that does not override "__eq__()" wishes to suppress + hash support, it should include "__hash__ = None" in the class + definition. A class which defines its own "__hash__()" that + explicitly raises a "TypeError" would be incorrectly identified as + hashable by an "isinstance(obj, collections.abc.Hashable)" call. + + Note: + + By default, the "__hash__()" values of str and bytes objects are + “salted” with an unpredictable random value. Although they + remain constant within an individual Python process, they are not + predictable between repeated invocations of Python.This is + intended to provide protection against a denial-of-service caused + by carefully chosen inputs that exploit the worst case + performance of a dict insertion, *O*(*n*^2) complexity. See + http://ocert.org/advisories/ocert-2011-003.html for + details.Changing hash values affects the iteration order of sets. + Python has never made guarantees about this ordering (and it + typically varies between 32-bit and 64-bit builds).See also + "PYTHONHASHSEED". + + Changed in version 3.3: Hash randomization is enabled by default. + +object.__bool__(self) + + Called to implement truth value testing and the built-in operation + "bool()"; should return "False" or "True". When this method is not + defined, "__len__()" is called, if it is defined, and the object is + considered true if its result is nonzero. If a class defines + neither "__len__()" nor "__bool__()" (which is true of the "object" + class itself), all its instances are considered true. +''', + 'debugger': r'''"pdb" — The Python Debugger +*************************** + +**Source code:** Lib/pdb.py + +====================================================================== + +The module "pdb" defines an interactive source code debugger for +Python programs. It supports setting (conditional) breakpoints and +single stepping at the source line level, inspection of stack frames, +source code listing, and evaluation of arbitrary Python code in the +context of any stack frame. It also supports post-mortem debugging +and can be called under program control. + +The debugger is extensible – it is actually defined as the class +"Pdb". This is currently undocumented but easily understood by reading +the source. The extension interface uses the modules "bdb" and "cmd". + +See also: + + Module "faulthandler" + Used to dump Python tracebacks explicitly, on a fault, after a + timeout, or on a user signal. + + Module "traceback" + Standard interface to extract, format and print stack traces of + Python programs. + +The typical usage to break into the debugger is to insert: + + import pdb; pdb.set_trace() + +Or: + + breakpoint() + +at the location you want to break into the debugger, and then run the +program. You can then step through the code following this statement, +and continue running without the debugger using the "continue" +command. + +Changed in version 3.7: The built-in "breakpoint()", when called with +defaults, can be used instead of "import pdb; pdb.set_trace()". + + def double(x): + breakpoint() + return x * 2 + val = 3 + print(f"{val} * 2 is {double(val)}") + +The debugger’s prompt is "(Pdb)", which is the indicator that you are +in debug mode: + + > ...(2)double() + -> breakpoint() + (Pdb) p x + 3 + (Pdb) continue + 3 * 2 is 6 + +Changed in version 3.3: Tab-completion via the "readline" module is +available for commands and command arguments, e.g. the current global +and local names are offered as arguments of the "p" command. + +You can also invoke "pdb" from the command line to debug other +scripts. For example: + + python -m pdb myscript.py + +When invoked as a module, pdb will automatically enter post-mortem +debugging if the program being debugged exits abnormally. After post- +mortem debugging (or after normal exit of the program), pdb will +restart the program. Automatic restarting preserves pdb’s state (such +as breakpoints) and in most cases is more useful than quitting the +debugger upon program’s exit. + +Changed in version 3.2: Added the "-c" option to execute commands as +if given in a ".pdbrc" file; see Debugger Commands. + +Changed in version 3.7: Added the "-m" option to execute modules +similar to the way "python -m" does. As with a script, the debugger +will pause execution just before the first line of the module. + +Typical usage to execute a statement under control of the debugger is: + + >>> import pdb + >>> def f(x): + ... print(1 / x) + >>> pdb.run("f(2)") + > (1)() + (Pdb) continue + 0.5 + >>> + +The typical usage to inspect a crashed program is: + + >>> import pdb + >>> def f(x): + ... print(1 / x) + ... + >>> f(0) + Traceback (most recent call last): + File "", line 1, in + File "", line 2, in f + ZeroDivisionError: division by zero + >>> pdb.pm() + > (2)f() + (Pdb) p x + 0 + (Pdb) + +Changed in version 3.13: The implementation of **PEP 667** means that +name assignments made via "pdb" will immediately affect the active +scope, even when running inside an *optimized scope*. + +The module defines the following functions; each enters the debugger +in a slightly different way: + +pdb.run(statement, globals=None, locals=None) + + Execute the *statement* (given as a string or a code object) under + debugger control. The debugger prompt appears before any code is + executed; you can set breakpoints and type "continue", or you can + step through the statement using "step" or "next" (all these + commands are explained below). The optional *globals* and *locals* + arguments specify the environment in which the code is executed; by + default the dictionary of the module "__main__" is used. (See the + explanation of the built-in "exec()" or "eval()" functions.) + +pdb.runeval(expression, globals=None, locals=None) + + Evaluate the *expression* (given as a string or a code object) + under debugger control. When "runeval()" returns, it returns the + value of the *expression*. Otherwise this function is similar to + "run()". + +pdb.runcall(function, *args, **kwds) + + Call the *function* (a function or method object, not a string) + with the given arguments. When "runcall()" returns, it returns + whatever the function call returned. The debugger prompt appears + as soon as the function is entered. + +pdb.set_trace(*, header=None, commands=None) + + Enter the debugger at the calling stack frame. This is useful to + hard-code a breakpoint at a given point in a program, even if the + code is not otherwise being debugged (e.g. when an assertion + fails). If given, *header* is printed to the console just before + debugging begins. The *commands* argument, if given, is a list of + commands to execute when the debugger starts. + + Changed in version 3.7: The keyword-only argument *header*. + + Changed in version 3.13: "set_trace()" will enter the debugger + immediately, rather than on the next line of code to be executed. + + Added in version 3.14: The *commands* argument. + +pdb.post_mortem(t=None) + + Enter post-mortem debugging of the given exception or traceback + object. If no value is given, it uses the exception that is + currently being handled, or raises "ValueError" if there isn’t one. + + Changed in version 3.13: Support for exception objects was added. + +pdb.pm() + + Enter post-mortem debugging of the exception found in + "sys.last_exc". + +The "run*" functions and "set_trace()" are aliases for instantiating +the "Pdb" class and calling the method of the same name. If you want +to access further features, you have to do this yourself: + +class pdb.Pdb(completekey='tab', stdin=None, stdout=None, skip=None, nosigint=False, readrc=True, mode=None) + + "Pdb" is the debugger class. + + The *completekey*, *stdin* and *stdout* arguments are passed to the + underlying "cmd.Cmd" class; see the description there. + + The *skip* argument, if given, must be an iterable of glob-style + module name patterns. The debugger will not step into frames that + originate in a module that matches one of these patterns. [1] + + By default, Pdb sets a handler for the SIGINT signal (which is sent + when the user presses "Ctrl"-"C" on the console) when you give a + "continue" command. This allows you to break into the debugger + again by pressing "Ctrl"-"C". If you want Pdb not to touch the + SIGINT handler, set *nosigint* to true. + + The *readrc* argument defaults to true and controls whether Pdb + will load .pdbrc files from the filesystem. + + The *mode* argument specifies how the debugger was invoked. It + impacts the workings of some debugger commands. Valid values are + "'inline'" (used by the breakpoint() builtin), "'cli'" (used by the + command line invocation) or "None" (for backwards compatible + behaviour, as before the *mode* argument was added). + + Example call to enable tracing with *skip*: + + import pdb; pdb.Pdb(skip=['django.*']).set_trace() + + Raises an auditing event "pdb.Pdb" with no arguments. + + Changed in version 3.1: Added the *skip* parameter. + + Changed in version 3.2: Added the *nosigint* parameter. Previously, + a SIGINT handler was never set by Pdb. + + Changed in version 3.6: The *readrc* argument. + + Added in version 3.14: Added the *mode* argument. + + run(statement, globals=None, locals=None) + runeval(expression, globals=None, locals=None) + runcall(function, *args, **kwds) + set_trace() + + See the documentation for the functions explained above. + + +Debugger Commands +================= + +The commands recognized by the debugger are listed below. Most +commands can be abbreviated to one or two letters as indicated; e.g. +"h(elp)" means that either "h" or "help" can be used to enter the help +command (but not "he" or "hel", nor "H" or "Help" or "HELP"). +Arguments to commands must be separated by whitespace (spaces or +tabs). Optional arguments are enclosed in square brackets ("[]") in +the command syntax; the square brackets must not be typed. +Alternatives in the command syntax are separated by a vertical bar +("|"). + +Entering a blank line repeats the last command entered. Exception: if +the last command was a "list" command, the next 11 lines are listed. + +Commands that the debugger doesn’t recognize are assumed to be Python +statements and are executed in the context of the program being +debugged. Python statements can also be prefixed with an exclamation +point ("!"). This is a powerful way to inspect the program being +debugged; it is even possible to change a variable or call a function. +When an exception occurs in such a statement, the exception name is +printed but the debugger’s state is not changed. + +Changed in version 3.13: Expressions/Statements whose prefix is a pdb +command are now correctly identified and executed. + +The debugger supports aliases. Aliases can have parameters which +allows one a certain level of adaptability to the context under +examination. + +Multiple commands may be entered on a single line, separated by ";;". +(A single ";" is not used as it is the separator for multiple commands +in a line that is passed to the Python parser.) No intelligence is +applied to separating the commands; the input is split at the first +";;" pair, even if it is in the middle of a quoted string. A +workaround for strings with double semicolons is to use implicit +string concatenation "';'';'" or "";"";"". + +To set a temporary global variable, use a *convenience variable*. A +*convenience variable* is a variable whose name starts with "$". For +example, "$foo = 1" sets a global variable "$foo" which you can use in +the debugger session. The *convenience variables* are cleared when +the program resumes execution so it’s less likely to interfere with +your program compared to using normal variables like "foo = 1". + +There are three preset *convenience variables*: + +* "$_frame": the current frame you are debugging + +* "$_retval": the return value if the frame is returning + +* "$_exception": the exception if the frame is raising an exception + +Added in version 3.12: Added the *convenience variable* feature. + +If a file ".pdbrc" exists in the user’s home directory or in the +current directory, it is read with "'utf-8'" encoding and executed as +if it had been typed at the debugger prompt, with the exception that +empty lines and lines starting with "#" are ignored. This is +particularly useful for aliases. If both files exist, the one in the +home directory is read first and aliases defined there can be +overridden by the local file. + +Changed in version 3.2: ".pdbrc" can now contain commands that +continue debugging, such as "continue" or "next". Previously, these +commands had no effect. + +Changed in version 3.11: ".pdbrc" is now read with "'utf-8'" encoding. +Previously, it was read with the system locale encoding. + +h(elp) [command] + + Without argument, print the list of available commands. With a + *command* as argument, print help about that command. "help pdb" + displays the full documentation (the docstring of the "pdb" + module). Since the *command* argument must be an identifier, "help + exec" must be entered to get help on the "!" command. + +w(here) [count] + + Print a stack trace, with the most recent frame at the bottom. if + *count* is 0, print the current frame entry. If *count* is + negative, print the least recent - *count* frames. If *count* is + positive, print the most recent *count* frames. An arrow (">") + indicates the current frame, which determines the context of most + commands. + + Changed in version 3.14: *count* argument is added. + +d(own) [count] + + Move the current frame *count* (default one) levels down in the + stack trace (to a newer frame). + +u(p) [count] + + Move the current frame *count* (default one) levels up in the stack + trace (to an older frame). + +b(reak) [([filename:]lineno | function) [, condition]] + + With a *lineno* argument, set a break at line *lineno* in the + current file. The line number may be prefixed with a *filename* and + a colon, to specify a breakpoint in another file (possibly one that + hasn’t been loaded yet). The file is searched on "sys.path". + Acceptable forms of *filename* are "/abspath/to/file.py", + "relpath/file.py", "module" and "package.module". + + With a *function* argument, set a break at the first executable + statement within that function. *function* can be any expression + that evaluates to a function in the current namespace. + + If a second argument is present, it is an expression which must + evaluate to true before the breakpoint is honored. + + Without argument, list all breaks, including for each breakpoint, + the number of times that breakpoint has been hit, the current + ignore count, and the associated condition if any. + + Each breakpoint is assigned a number to which all the other + breakpoint commands refer. + +tbreak [([filename:]lineno | function) [, condition]] + + Temporary breakpoint, which is removed automatically when it is + first hit. The arguments are the same as for "break". + +cl(ear) [filename:lineno | bpnumber ...] + + With a *filename:lineno* argument, clear all the breakpoints at + this line. With a space separated list of breakpoint numbers, clear + those breakpoints. Without argument, clear all breaks (but first + ask confirmation). + +disable bpnumber [bpnumber ...] + + Disable the breakpoints given as a space separated list of + breakpoint numbers. Disabling a breakpoint means it cannot cause + the program to stop execution, but unlike clearing a breakpoint, it + remains in the list of breakpoints and can be (re-)enabled. + +enable bpnumber [bpnumber ...] + + Enable the breakpoints specified. + +ignore bpnumber [count] + + Set the ignore count for the given breakpoint number. If *count* + is omitted, the ignore count is set to 0. A breakpoint becomes + active when the ignore count is zero. When non-zero, the *count* + is decremented each time the breakpoint is reached and the + breakpoint is not disabled and any associated condition evaluates + to true. + +condition bpnumber [condition] + + Set a new *condition* for the breakpoint, an expression which must + evaluate to true before the breakpoint is honored. If *condition* + is absent, any existing condition is removed; i.e., the breakpoint + is made unconditional. + +commands [bpnumber] + + Specify a list of commands for breakpoint number *bpnumber*. The + commands themselves appear on the following lines. Type a line + containing just "end" to terminate the commands. An example: + + (Pdb) commands 1 + (com) p some_variable + (com) end + (Pdb) + + To remove all commands from a breakpoint, type "commands" and + follow it immediately with "end"; that is, give no commands. + + With no *bpnumber* argument, "commands" refers to the last + breakpoint set. + + You can use breakpoint commands to start your program up again. + Simply use the "continue" command, or "step", or any other command + that resumes execution. + + Specifying any command resuming execution (currently "continue", + "step", "next", "return", "until", "jump", "quit" and their + abbreviations) terminates the command list (as if that command was + immediately followed by end). This is because any time you resume + execution (even with a simple next or step), you may encounter + another breakpoint—which could have its own command list, leading + to ambiguities about which list to execute. + + If the list of commands contains the "silent" command, or a command + that resumes execution, then the breakpoint message containing + information about the frame is not displayed. + + Changed in version 3.14: Frame information will not be displayed if + a command that resumes execution is present in the command list. + +s(tep) + + Execute the current line, stop at the first possible occasion + (either in a function that is called or on the next line in the + current function). + +n(ext) + + Continue execution until the next line in the current function is + reached or it returns. (The difference between "next" and "step" + is that "step" stops inside a called function, while "next" + executes called functions at (nearly) full speed, only stopping at + the next line in the current function.) + +unt(il) [lineno] + + Without argument, continue execution until the line with a number + greater than the current one is reached. + + With *lineno*, continue execution until a line with a number + greater or equal to *lineno* is reached. In both cases, also stop + when the current frame returns. + + Changed in version 3.2: Allow giving an explicit line number. + +r(eturn) + + Continue execution until the current function returns. + +c(ont(inue)) + + Continue execution, only stop when a breakpoint is encountered. + +j(ump) lineno + + Set the next line that will be executed. Only available in the + bottom-most frame. This lets you jump back and execute code again, + or jump forward to skip code that you don’t want to run. + + It should be noted that not all jumps are allowed – for instance it + is not possible to jump into the middle of a "for" loop or out of a + "finally" clause. + +l(ist) [first[, last]] + + List source code for the current file. Without arguments, list 11 + lines around the current line or continue the previous listing. + With "." as argument, list 11 lines around the current line. With + one argument, list 11 lines around at that line. With two + arguments, list the given range; if the second argument is less + than the first, it is interpreted as a count. + + The current line in the current frame is indicated by "->". If an + exception is being debugged, the line where the exception was + originally raised or propagated is indicated by ">>", if it differs + from the current line. + + Changed in version 3.2: Added the ">>" marker. + +ll | longlist + + List all source code for the current function or frame. + Interesting lines are marked as for "list". + + Added in version 3.2. + +a(rgs) + + Print the arguments of the current function and their current + values. + +p expression + + Evaluate *expression* in the current context and print its value. + + Note: + + "print()" can also be used, but is not a debugger command — this + executes the Python "print()" function. + +pp expression + + Like the "p" command, except the value of *expression* is pretty- + printed using the "pprint" module. + +whatis expression + + Print the type of *expression*. + +source expression + + Try to get source code of *expression* and display it. + + Added in version 3.2. + +display [expression] + + Display the value of *expression* if it changed, each time + execution stops in the current frame. + + Without *expression*, list all display expressions for the current + frame. + + Note: + + Display evaluates *expression* and compares to the result of the + previous evaluation of *expression*, so when the result is + mutable, display may not be able to pick up the changes. + + Example: + + lst = [] + breakpoint() + pass + lst.append(1) + print(lst) + + Display won’t realize "lst" has been changed because the result of + evaluation is modified in place by "lst.append(1)" before being + compared: + + > example.py(3)() + -> pass + (Pdb) display lst + display lst: [] + (Pdb) n + > example.py(4)() + -> lst.append(1) + (Pdb) n + > example.py(5)() + -> print(lst) + (Pdb) + + You can do some tricks with copy mechanism to make it work: + + > example.py(3)() + -> pass + (Pdb) display lst[:] + display lst[:]: [] + (Pdb) n + > example.py(4)() + -> lst.append(1) + (Pdb) n + > example.py(5)() + -> print(lst) + display lst[:]: [1] [old: []] + (Pdb) + + Added in version 3.2. + +undisplay [expression] + + Do not display *expression* anymore in the current frame. Without + *expression*, clear all display expressions for the current frame. + + Added in version 3.2. + +interact + + Start an interactive interpreter (using the "code" module) in a new + global namespace initialised from the local and global namespaces + for the current scope. Use "exit()" or "quit()" to exit the + interpreter and return to the debugger. + + Note: + + As "interact" creates a new dedicated namespace for code + execution, assignments to variables will not affect the original + namespaces. However, modifications to any referenced mutable + objects will be reflected in the original namespaces as usual. + + Added in version 3.2. + + Changed in version 3.13: "exit()" and "quit()" can be used to exit + the "interact" command. + + Changed in version 3.13: "interact" directs its output to the + debugger’s output channel rather than "sys.stderr". + +alias [name [command]] + + Create an alias called *name* that executes *command*. The + *command* must *not* be enclosed in quotes. Replaceable parameters + can be indicated by "%1", "%2", … and "%9", while "%*" is replaced + by all the parameters. If *command* is omitted, the current alias + for *name* is shown. If no arguments are given, all aliases are + listed. + + Aliases may be nested and can contain anything that can be legally + typed at the pdb prompt. Note that internal pdb commands *can* be + overridden by aliases. Such a command is then hidden until the + alias is removed. Aliasing is recursively applied to the first + word of the command line; all other words in the line are left + alone. + + As an example, here are two useful aliases (especially when placed + in the ".pdbrc" file): + + # Print instance variables (usage "pi classInst") + alias pi for k in %1.__dict__.keys(): print(f"%1.{k} = {%1.__dict__[k]}") + # Print instance variables in self + alias ps pi self + +unalias name + + Delete the specified alias *name*. + +! statement + + Execute the (one-line) *statement* in the context of the current + stack frame. The exclamation point can be omitted unless the first + word of the statement resembles a debugger command, e.g.: + + (Pdb) ! n=42 + (Pdb) + + To set a global variable, you can prefix the assignment command + with a "global" statement on the same line, e.g.: + + (Pdb) global list_options; list_options = ['-l'] + (Pdb) + +run [args ...] +restart [args ...] + + Restart the debugged Python program. If *args* is supplied, it is + split with "shlex" and the result is used as the new "sys.argv". + History, breakpoints, actions and debugger options are preserved. + "restart" is an alias for "run". + + Changed in version 3.14: "run" and "restart" commands are disabled + when the debugger is invoked in "'inline'" mode. + +q(uit) + + Quit from the debugger. The program being executed is aborted. + +debug code + + Enter a recursive debugger that steps through *code* (which is an + arbitrary expression or statement to be executed in the current + environment). + +retval + + Print the return value for the last return of the current function. + +exceptions [excnumber] + + List or jump between chained exceptions. + + When using "pdb.pm()" or "Pdb.post_mortem(...)" with a chained + exception instead of a traceback, it allows the user to move + between the chained exceptions using "exceptions" command to list + exceptions, and "exception " to switch to that exception. + + Example: + + def out(): + try: + middle() + except Exception as e: + raise ValueError("reraise middle() error") from e + + def middle(): + try: + return inner(0) + except Exception as e: + raise ValueError("Middle fail") + + def inner(x): + 1 / x + + out() + + calling "pdb.pm()" will allow to move between exceptions: + + > example.py(5)out() + -> raise ValueError("reraise middle() error") from e + + (Pdb) exceptions + 0 ZeroDivisionError('division by zero') + 1 ValueError('Middle fail') + > 2 ValueError('reraise middle() error') + + (Pdb) exceptions 0 + > example.py(16)inner() + -> 1 / x + + (Pdb) up + > example.py(10)middle() + -> return inner(0) + + Added in version 3.13. + +-[ Footnotes ]- + +[1] Whether a frame is considered to originate in a certain module is + determined by the "__name__" in the frame globals. +''', + 'del': r'''The "del" statement +******************* + + del_stmt ::= "del" target_list + +Deletion is recursively defined very similar to the way assignment is +defined. Rather than spelling it out in full details, here are some +hints. + +Deletion of a target list recursively deletes each target, from left +to right. + +Deletion of a name removes the binding of that name from the local or +global namespace, depending on whether the name occurs in a "global" +statement in the same code block. If the name is unbound, a +"NameError" exception will be raised. + +Deletion of attribute references, subscriptions and slicings is passed +to the primary object involved; deletion of a slicing is in general +equivalent to assignment of an empty slice of the right type (but even +this is determined by the sliced object). + +Changed in version 3.2: Previously it was illegal to delete a name +from the local namespace if it occurs as a free variable in a nested +block. +''', + 'dict': r'''Dictionary displays +******************* + +A dictionary display is a possibly empty series of dict items +(key/value pairs) enclosed in curly braces: + + dict_display ::= "{" [dict_item_list | dict_comprehension] "}" + dict_item_list ::= dict_item ("," dict_item)* [","] + dict_item ::= expression ":" expression | "**" or_expr + dict_comprehension ::= expression ":" expression comp_for + +A dictionary display yields a new dictionary object. + +If a comma-separated sequence of dict items is given, they are +evaluated from left to right to define the entries of the dictionary: +each key object is used as a key into the dictionary to store the +corresponding value. This means that you can specify the same key +multiple times in the dict item list, and the final dictionary’s value +for that key will be the last one given. + +A double asterisk "**" denotes *dictionary unpacking*. Its operand +must be a *mapping*. Each mapping item is added to the new +dictionary. Later values replace values already set by earlier dict +items and earlier dictionary unpackings. + +Added in version 3.5: Unpacking into dictionary displays, originally +proposed by **PEP 448**. + +A dict comprehension, in contrast to list and set comprehensions, +needs two expressions separated with a colon followed by the usual +“for” and “if” clauses. When the comprehension is run, the resulting +key and value elements are inserted in the new dictionary in the order +they are produced. + +Restrictions on the types of the key values are listed earlier in +section The standard type hierarchy. (To summarize, the key type +should be *hashable*, which excludes all mutable objects.) Clashes +between duplicate keys are not detected; the last value (textually +rightmost in the display) stored for a given key value prevails. + +Changed in version 3.8: Prior to Python 3.8, in dict comprehensions, +the evaluation order of key and value was not well-defined. In +CPython, the value was evaluated before the key. Starting with 3.8, +the key is evaluated before the value, as proposed by **PEP 572**. +''', + 'dynamic-features': r'''Interaction with dynamic features +********************************* + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. +''', + 'else': r'''The "if" statement +****************** + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. +''', + 'exceptions': r'''Exceptions +********** + +Exceptions are a means of breaking out of the normal flow of control +of a code block in order to handle errors or other exceptional +conditions. An exception is *raised* at the point where the error is +detected; it may be *handled* by the surrounding code block or by any +code block that directly or indirectly invoked the code block where +the error occurred. + +The Python interpreter raises an exception when it detects a run-time +error (such as division by zero). A Python program can also +explicitly raise an exception with the "raise" statement. Exception +handlers are specified with the "try" … "except" statement. The +"finally" clause of such a statement can be used to specify cleanup +code which does not handle the exception, but is executed whether an +exception occurred or not in the preceding code. + +Python uses the “termination” model of error handling: an exception +handler can find out what happened and continue execution at an outer +level, but it cannot repair the cause of the error and retry the +failing operation (except by re-entering the offending piece of code +from the top). + +When an exception is not handled at all, the interpreter terminates +execution of the program, or returns to its interactive main loop. In +either case, it prints a stack traceback, except when the exception is +"SystemExit". + +Exceptions are identified by class instances. The "except" clause is +selected depending on the class of the instance: it must reference the +class of the instance or a *non-virtual base class* thereof. The +instance can be received by the handler and can carry additional +information about the exceptional condition. + +Note: + + Exception messages are not part of the Python API. Their contents + may change from one version of Python to the next without warning + and should not be relied on by code which will run under multiple + versions of the interpreter. + +See also the description of the "try" statement in section The try +statement and "raise" statement in section The raise statement. + +-[ Footnotes ]- + +[1] This limitation occurs because the code that is executed by these + operations is not available at the time the module is compiled. +''', + 'execmodel': r'''Execution model +*************** + + +Structure of a program +====================== + +A Python program is constructed from code blocks. A *block* is a piece +of Python program text that is executed as a unit. The following are +blocks: a module, a function body, and a class definition. Each +command typed interactively is a block. A script file (a file given +as standard input to the interpreter or specified as a command line +argument to the interpreter) is a code block. A script command (a +command specified on the interpreter command line with the "-c" +option) is a code block. A module run as a top level script (as module +"__main__") from the command line using a "-m" argument is also a code +block. The string argument passed to the built-in functions "eval()" +and "exec()" is a code block. + +A code block is executed in an *execution frame*. A frame contains +some administrative information (used for debugging) and determines +where and how execution continues after the code block’s execution has +completed. + + +Naming and binding +================== + + +Binding of names +---------------- + +*Names* refer to objects. Names are introduced by name binding +operations. + +The following constructs bind names: + +* formal parameters to functions, + +* class definitions, + +* function definitions, + +* assignment expressions, + +* targets that are identifiers if occurring in an assignment: + + * "for" loop header, + + * after "as" in a "with" statement, "except" clause, "except*" + clause, or in the as-pattern in structural pattern matching, + + * in a capture pattern in structural pattern matching + +* "import" statements. + +* "type" statements. + +* type parameter lists. + +The "import" statement of the form "from ... import *" binds all names +defined in the imported module, except those beginning with an +underscore. This form may only be used at the module level. + +A target occurring in a "del" statement is also considered bound for +this purpose (though the actual semantics are to unbind the name). + +Each assignment or import statement occurs within a block defined by a +class or function definition or at the module level (the top-level +code block). + +If a name is bound in a block, it is a local variable of that block, +unless declared as "nonlocal" or "global". If a name is bound at the +module level, it is a global variable. (The variables of the module +code block are local and global.) If a variable is used in a code +block but not defined there, it is a *free variable*. + +Each occurrence of a name in the program text refers to the *binding* +of that name established by the following name resolution rules. + + +Resolution of names +------------------- + +A *scope* defines the visibility of a name within a block. If a local +variable is defined in a block, its scope includes that block. If the +definition occurs in a function block, the scope extends to any blocks +contained within the defining one, unless a contained block introduces +a different binding for the name. + +When a name is used in a code block, it is resolved using the nearest +enclosing scope. The set of all such scopes visible to a code block +is called the block’s *environment*. + +When a name is not found at all, a "NameError" exception is raised. If +the current scope is a function scope, and the name refers to a local +variable that has not yet been bound to a value at the point where the +name is used, an "UnboundLocalError" exception is raised. +"UnboundLocalError" is a subclass of "NameError". + +If a name binding operation occurs anywhere within a code block, all +uses of the name within the block are treated as references to the +current block. This can lead to errors when a name is used within a +block before it is bound. This rule is subtle. Python lacks +declarations and allows name binding operations to occur anywhere +within a code block. The local variables of a code block can be +determined by scanning the entire text of the block for name binding +operations. See the FAQ entry on UnboundLocalError for examples. + +If the "global" statement occurs within a block, all uses of the names +specified in the statement refer to the bindings of those names in the +top-level namespace. Names are resolved in the top-level namespace by +searching the global namespace, i.e. the namespace of the module +containing the code block, and the builtins namespace, the namespace +of the module "builtins". The global namespace is searched first. If +the names are not found there, the builtins namespace is searched +next. If the names are also not found in the builtins namespace, new +variables are created in the global namespace. The global statement +must precede all uses of the listed names. + +The "global" statement has the same scope as a name binding operation +in the same block. If the nearest enclosing scope for a free variable +contains a global statement, the free variable is treated as a global. + +The "nonlocal" statement causes corresponding names to refer to +previously bound variables in the nearest enclosing function scope. +"SyntaxError" is raised at compile time if the given name does not +exist in any enclosing function scope. Type parameters cannot be +rebound with the "nonlocal" statement. + +The namespace for a module is automatically created the first time a +module is imported. The main module for a script is always called +"__main__". + +Class definition blocks and arguments to "exec()" and "eval()" are +special in the context of name resolution. A class definition is an +executable statement that may use and define names. These references +follow the normal rules for name resolution with an exception that +unbound local variables are looked up in the global namespace. The +namespace of the class definition becomes the attribute dictionary of +the class. The scope of names defined in a class block is limited to +the class block; it does not extend to the code blocks of methods. +This includes comprehensions and generator expressions, but it does +not include annotation scopes, which have access to their enclosing +class scopes. This means that the following will fail: + + class A: + a = 42 + b = list(a + i for i in range(10)) + +However, the following will succeed: + + class A: + type Alias = Nested + class Nested: pass + + print(A.Alias.__value__) # + + +Annotation scopes +----------------- + +*Annotations*, type parameter lists and "type" statements introduce +*annotation scopes*, which behave mostly like function scopes, but +with some exceptions discussed below. + +Annotation scopes are used in the following contexts: + +* *Function annotations*. + +* *Variable annotations*. + +* Type parameter lists for generic type aliases. + +* Type parameter lists for generic functions. A generic function’s + annotations are executed within the annotation scope, but its + defaults and decorators are not. + +* Type parameter lists for generic classes. A generic class’s base + classes and keyword arguments are executed within the annotation + scope, but its decorators are not. + +* The bounds, constraints, and default values for type parameters + (lazily evaluated). + +* The value of type aliases (lazily evaluated). + +Annotation scopes differ from function scopes in the following ways: + +* Annotation scopes have access to their enclosing class namespace. If + an annotation scope is immediately within a class scope, or within + another annotation scope that is immediately within a class scope, + the code in the annotation scope can use names defined in the class + scope as if it were executed directly within the class body. This + contrasts with regular functions defined within classes, which + cannot access names defined in the class scope. + +* Expressions in annotation scopes cannot contain "yield", "yield + from", "await", or ":=" expressions. (These expressions are allowed + in other scopes contained within the annotation scope.) + +* Names defined in annotation scopes cannot be rebound with "nonlocal" + statements in inner scopes. This includes only type parameters, as + no other syntactic elements that can appear within annotation scopes + can introduce new names. + +* While annotation scopes have an internal name, that name is not + reflected in the *qualified name* of objects defined within the + scope. Instead, the "__qualname__" of such objects is as if the + object were defined in the enclosing scope. + +Added in version 3.12: Annotation scopes were introduced in Python +3.12 as part of **PEP 695**. + +Changed in version 3.13: Annotation scopes are also used for type +parameter defaults, as introduced by **PEP 696**. + +Changed in version 3.14: Annotation scopes are now also used for +annotations, as specified in **PEP 649** and **PEP 749**. + + +Lazy evaluation +--------------- + +Most annotation scopes are *lazily evaluated*. This includes +annotations, the values of type aliases created through the "type" +statement, and the bounds, constraints, and default values of type +variables created through the type parameter syntax. This means that +they are not evaluated when the type alias or type variable is +created, or when the object carrying annotations is created. Instead, +they are only evaluated when necessary, for example when the +"__value__" attribute on a type alias is accessed. + +Example: + + >>> type Alias = 1/0 + >>> Alias.__value__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + >>> def func[T: 1/0](): pass + >>> T = func.__type_params__[0] + >>> T.__bound__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + +Here the exception is raised only when the "__value__" attribute of +the type alias or the "__bound__" attribute of the type variable is +accessed. + +This behavior is primarily useful for references to types that have +not yet been defined when the type alias or type variable is created. +For example, lazy evaluation enables creation of mutually recursive +type aliases: + + from typing import Literal + + type SimpleExpr = int | Parenthesized + type Parenthesized = tuple[Literal["("], Expr, Literal[")"]] + type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], Expr] + +Lazily evaluated values are evaluated in annotation scope, which means +that names that appear inside the lazily evaluated value are looked up +as if they were used in the immediately enclosing scope. + +Added in version 3.12. + + +Builtins and restricted execution +--------------------------------- + +**CPython implementation detail:** Users should not touch +"__builtins__"; it is strictly an implementation detail. Users +wanting to override values in the builtins namespace should "import" +the "builtins" module and modify its attributes appropriately. + +The builtins namespace associated with the execution of a code block +is actually found by looking up the name "__builtins__" in its global +namespace; this should be a dictionary or a module (in the latter case +the module’s dictionary is used). By default, when in the "__main__" +module, "__builtins__" is the built-in module "builtins"; when in any +other module, "__builtins__" is an alias for the dictionary of the +"builtins" module itself. + + +Interaction with dynamic features +--------------------------------- + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. + + +Exceptions +========== + +Exceptions are a means of breaking out of the normal flow of control +of a code block in order to handle errors or other exceptional +conditions. An exception is *raised* at the point where the error is +detected; it may be *handled* by the surrounding code block or by any +code block that directly or indirectly invoked the code block where +the error occurred. + +The Python interpreter raises an exception when it detects a run-time +error (such as division by zero). A Python program can also +explicitly raise an exception with the "raise" statement. Exception +handlers are specified with the "try" … "except" statement. The +"finally" clause of such a statement can be used to specify cleanup +code which does not handle the exception, but is executed whether an +exception occurred or not in the preceding code. + +Python uses the “termination” model of error handling: an exception +handler can find out what happened and continue execution at an outer +level, but it cannot repair the cause of the error and retry the +failing operation (except by re-entering the offending piece of code +from the top). + +When an exception is not handled at all, the interpreter terminates +execution of the program, or returns to its interactive main loop. In +either case, it prints a stack traceback, except when the exception is +"SystemExit". + +Exceptions are identified by class instances. The "except" clause is +selected depending on the class of the instance: it must reference the +class of the instance or a *non-virtual base class* thereof. The +instance can be received by the handler and can carry additional +information about the exceptional condition. + +Note: + + Exception messages are not part of the Python API. Their contents + may change from one version of Python to the next without warning + and should not be relied on by code which will run under multiple + versions of the interpreter. + +See also the description of the "try" statement in section The try +statement and "raise" statement in section The raise statement. + +-[ Footnotes ]- + +[1] This limitation occurs because the code that is executed by these + operations is not available at the time the module is compiled. +''', + 'exprlists': r'''Expression lists +**************** + + starred_expression ::= ["*"] or_expr + flexible_expression ::= assignment_expression | starred_expression + flexible_expression_list ::= flexible_expression ("," flexible_expression)* [","] + starred_expression_list ::= starred_expression ("," starred_expression)* [","] + expression_list ::= expression ("," expression)* [","] + yield_list ::= expression_list | starred_expression "," [starred_expression_list] + +Except when part of a list or set display, an expression list +containing at least one comma yields a tuple. The length of the tuple +is the number of expressions in the list. The expressions are +evaluated from left to right. + +An asterisk "*" denotes *iterable unpacking*. Its operand must be an +*iterable*. The iterable is expanded into a sequence of items, which +are included in the new tuple, list, or set, at the site of the +unpacking. + +Added in version 3.5: Iterable unpacking in expression lists, +originally proposed by **PEP 448**. + +Added in version 3.11: Any item in an expression list may be starred. +See **PEP 646**. + +A trailing comma is required only to create a one-item tuple, such as +"1,"; it is optional in all other cases. A single expression without a +trailing comma doesn’t create a tuple, but rather yields the value of +that expression. (To create an empty tuple, use an empty pair of +parentheses: "()".) +''', + 'floating': r'''Floating-point literals +*********************** + +Floating-point literals are described by the following lexical +definitions: + + floatnumber ::= pointfloat | exponentfloat + pointfloat ::= [digitpart] fraction | digitpart "." + exponentfloat ::= (digitpart | pointfloat) exponent + digitpart ::= digit (["_"] digit)* + fraction ::= "." digitpart + exponent ::= ("e" | "E") ["+" | "-"] digitpart + +Note that the integer and exponent parts are always interpreted using +radix 10. For example, "077e010" is legal, and denotes the same number +as "77e10". The allowed range of floating-point literals is +implementation-dependent. As in integer literals, underscores are +supported for digit grouping. + +Some examples of floating-point literals: + + 3.14 10. .001 1e100 3.14e-10 0e0 3.14_15_93 + +Changed in version 3.6: Underscores are now allowed for grouping +purposes in literals. +''', + 'for': r'''The "for" statement +******************* + +The "for" statement is used to iterate over the elements of a sequence +(such as a string, tuple or list) or other iterable object: + + for_stmt ::= "for" target_list "in" starred_list ":" suite + ["else" ":" suite] + +The "starred_list" expression is evaluated once; it should yield an +*iterable* object. An *iterator* is created for that iterable. The +first item provided by the iterator is then assigned to the target +list using the standard rules for assignments (see Assignment +statements), and the suite is executed. This repeats for each item +provided by the iterator. When the iterator is exhausted, the suite +in the "else" clause, if present, is executed, and the loop +terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and continues +with the next item, or with the "else" clause if there is no next +item. + +The for-loop makes assignments to the variables in the target list. +This overwrites all previous assignments to those variables including +those made in the suite of the for-loop: + + for i in range(10): + print(i) + i = 5 # this will not affect the for-loop + # because i will be overwritten with the next + # index in the range + +Names in the target list are not deleted when the loop is finished, +but if the sequence is empty, they will not have been assigned to at +all by the loop. Hint: the built-in type "range()" represents +immutable arithmetic sequences of integers. For instance, iterating +"range(3)" successively yields 0, 1, and then 2. + +Changed in version 3.11: Starred elements are now allowed in the +expression list. +''', + 'formatstrings': r'''Format String Syntax +******************** + +The "str.format()" method and the "Formatter" class share the same +syntax for format strings (although in the case of "Formatter", +subclasses can define their own format string syntax). The syntax is +related to that of formatted string literals, but it is less +sophisticated and, in particular, does not support arbitrary +expressions. + +Format strings contain “replacement fields” surrounded by curly braces +"{}". Anything that is not contained in braces is considered literal +text, which is copied unchanged to the output. If you need to include +a brace character in the literal text, it can be escaped by doubling: +"{{" and "}}". + +The grammar for a replacement field is as follows: + + replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}" + field_name ::= arg_name ("." attribute_name | "[" element_index "]")* + arg_name ::= [identifier | digit+] + attribute_name ::= identifier + element_index ::= digit+ | index_string + index_string ::= + + conversion ::= "r" | "s" | "a" + format_spec ::= format-spec:format_spec + +In less formal terms, the replacement field can start with a +*field_name* that specifies the object whose value is to be formatted +and inserted into the output instead of the replacement field. The +*field_name* is optionally followed by a *conversion* field, which is +preceded by an exclamation point "'!'", and a *format_spec*, which is +preceded by a colon "':'". These specify a non-default format for the +replacement value. + +See also the Format Specification Mini-Language section. + +The *field_name* itself begins with an *arg_name* that is either a +number or a keyword. If it’s a number, it refers to a positional +argument, and if it’s a keyword, it refers to a named keyword +argument. An *arg_name* is treated as a number if a call to +"str.isdecimal()" on the string would return true. If the numerical +arg_names in a format string are 0, 1, 2, … in sequence, they can all +be omitted (not just some) and the numbers 0, 1, 2, … will be +automatically inserted in that order. Because *arg_name* is not quote- +delimited, it is not possible to specify arbitrary dictionary keys +(e.g., the strings "'10'" or "':-]'") within a format string. The +*arg_name* can be followed by any number of index or attribute +expressions. An expression of the form "'.name'" selects the named +attribute using "getattr()", while an expression of the form +"'[index]'" does an index lookup using "__getitem__()". + +Changed in version 3.1: The positional argument specifiers can be +omitted for "str.format()", so "'{} {}'.format(a, b)" is equivalent to +"'{0} {1}'.format(a, b)". + +Changed in version 3.4: The positional argument specifiers can be +omitted for "Formatter". + +Some simple format string examples: + + "First, thou shalt count to {0}" # References first positional argument + "Bring me a {}" # Implicitly references the first positional argument + "From {} to {}" # Same as "From {0} to {1}" + "My quest is {name}" # References keyword argument 'name' + "Weight in tons {0.weight}" # 'weight' attribute of first positional arg + "Units destroyed: {players[0]}" # First element of keyword argument 'players'. + +The *conversion* field causes a type coercion before formatting. +Normally, the job of formatting a value is done by the "__format__()" +method of the value itself. However, in some cases it is desirable to +force a type to be formatted as a string, overriding its own +definition of formatting. By converting the value to a string before +calling "__format__()", the normal formatting logic is bypassed. + +Three conversion flags are currently supported: "'!s'" which calls +"str()" on the value, "'!r'" which calls "repr()" and "'!a'" which +calls "ascii()". + +Some examples: + + "Harold's a clever {0!s}" # Calls str() on the argument first + "Bring out the holy {name!r}" # Calls repr() on the argument first + "More {!a}" # Calls ascii() on the argument first + +The *format_spec* field contains a specification of how the value +should be presented, including such details as field width, alignment, +padding, decimal precision and so on. Each value type can define its +own “formatting mini-language” or interpretation of the *format_spec*. + +Most built-in types support a common formatting mini-language, which +is described in the next section. + +A *format_spec* field can also include nested replacement fields +within it. These nested replacement fields may contain a field name, +conversion flag and format specification, but deeper nesting is not +allowed. The replacement fields within the format_spec are +substituted before the *format_spec* string is interpreted. This +allows the formatting of a value to be dynamically specified. + +See the Format examples section for some examples. + + +Format Specification Mini-Language +================================== + +“Format specifications” are used within replacement fields contained +within a format string to define how individual values are presented +(see Format String Syntax and f-strings). They can also be passed +directly to the built-in "format()" function. Each formattable type +may define how the format specification is to be interpreted. + +Most built-in types implement the following options for format +specifications, although some of the formatting options are only +supported by the numeric types. + +A general convention is that an empty format specification produces +the same result as if you had called "str()" on the value. A non-empty +format specification typically modifies the result. + +The general form of a *standard format specifier* is: + + format_spec ::= [[fill]align][sign]["z"]["#"]["0"][width][grouping_option]["." precision][type] + fill ::= + align ::= "<" | ">" | "=" | "^" + sign ::= "+" | "-" | " " + width ::= digit+ + grouping_option ::= "_" | "," + precision ::= digit+ + type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%" + +If a valid *align* value is specified, it can be preceded by a *fill* +character that can be any character and defaults to a space if +omitted. It is not possible to use a literal curly brace (”"{"” or +“"}"”) as the *fill* character in a formatted string literal or when +using the "str.format()" method. However, it is possible to insert a +curly brace with a nested replacement field. This limitation doesn’t +affect the "format()" function. + +The meaning of the various alignment options is as follows: + ++-----------+------------------------------------------------------------+ +| Option | Meaning | +|===========|============================================================| +| "'<'" | Forces the field to be left-aligned within the available | +| | space (this is the default for most objects). | ++-----------+------------------------------------------------------------+ +| "'>'" | Forces the field to be right-aligned within the available | +| | space (this is the default for numbers). | ++-----------+------------------------------------------------------------+ +| "'='" | Forces the padding to be placed after the sign (if any) | +| | but before the digits. This is used for printing fields | +| | in the form ‘+000000120’. This alignment option is only | +| | valid for numeric types, excluding "complex". It becomes | +| | the default for numbers when ‘0’ immediately precedes the | +| | field width. | ++-----------+------------------------------------------------------------+ +| "'^'" | Forces the field to be centered within the available | +| | space. | ++-----------+------------------------------------------------------------+ + +Note that unless a minimum field width is defined, the field width +will always be the same size as the data to fill it, so that the +alignment option has no meaning in this case. + +The *sign* option is only valid for number types, and can be one of +the following: + ++-----------+------------------------------------------------------------+ +| Option | Meaning | +|===========|============================================================| +| "'+'" | indicates that a sign should be used for both positive as | +| | well as negative numbers. | ++-----------+------------------------------------------------------------+ +| "'-'" | indicates that a sign should be used only for negative | +| | numbers (this is the default behavior). | ++-----------+------------------------------------------------------------+ +| space | indicates that a leading space should be used on positive | +| | numbers, and a minus sign on negative numbers. | ++-----------+------------------------------------------------------------+ + +The "'z'" option coerces negative zero floating-point values to +positive zero after rounding to the format precision. This option is +only valid for floating-point presentation types. + +Changed in version 3.11: Added the "'z'" option (see also **PEP +682**). + +The "'#'" option causes the “alternate form” to be used for the +conversion. The alternate form is defined differently for different +types. This option is only valid for integer, float and complex +types. For integers, when binary, octal, or hexadecimal output is +used, this option adds the respective prefix "'0b'", "'0o'", "'0x'", +or "'0X'" to the output value. For float and complex the alternate +form causes the result of the conversion to always contain a decimal- +point character, even if no digits follow it. Normally, a decimal- +point character appears in the result of these conversions only if a +digit follows it. In addition, for "'g'" and "'G'" conversions, +trailing zeros are not removed from the result. + +The "','" option signals the use of a comma for a thousands separator +for floating-point presentation types and for integer presentation +type "'d'". For other presentation types, this option is an error. For +a locale aware separator, use the "'n'" integer presentation type +instead. + +Changed in version 3.1: Added the "','" option (see also **PEP 378**). + +The "'_'" option signals the use of an underscore for a thousands +separator for floating-point presentation types and for integer +presentation type "'d'". For integer presentation types "'b'", "'o'", +"'x'", and "'X'", underscores will be inserted every 4 digits. For +other presentation types, specifying this option is an error. + +Changed in version 3.6: Added the "'_'" option (see also **PEP 515**). + +*width* is a decimal integer defining the minimum total field width, +including any prefixes, separators, and other formatting characters. +If not specified, then the field width will be determined by the +content. + +When no explicit alignment is given, preceding the *width* field by a +zero ("'0'") character enables sign-aware zero-padding for numeric +types, excluding "complex". This is equivalent to a *fill* character +of "'0'" with an *alignment* type of "'='". + +Changed in version 3.10: Preceding the *width* field by "'0'" no +longer affects the default alignment for strings. + +The *precision* is a decimal integer indicating how many digits should +be displayed after the decimal point for presentation types "'f'" and +"'F'", or before and after the decimal point for presentation types +"'g'" or "'G'". For string presentation types the field indicates the +maximum field size - in other words, how many characters will be used +from the field content. The *precision* is not allowed for integer +presentation types. + +Finally, the *type* determines how the data should be presented. + +The available string presentation types are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'s'" | String format. This is the default type for strings and | + | | may be omitted. | + +-----------+------------------------------------------------------------+ + | None | The same as "'s'". | + +-----------+------------------------------------------------------------+ + +The available integer presentation types are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'b'" | Binary format. Outputs the number in base 2. | + +-----------+------------------------------------------------------------+ + | "'c'" | Character. Converts the integer to the corresponding | + | | unicode character before printing. | + +-----------+------------------------------------------------------------+ + | "'d'" | Decimal Integer. Outputs the number in base 10. | + +-----------+------------------------------------------------------------+ + | "'o'" | Octal format. Outputs the number in base 8. | + +-----------+------------------------------------------------------------+ + | "'x'" | Hex format. Outputs the number in base 16, using lower- | + | | case letters for the digits above 9. | + +-----------+------------------------------------------------------------+ + | "'X'" | Hex format. Outputs the number in base 16, using upper- | + | | case letters for the digits above 9. In case "'#'" is | + | | specified, the prefix "'0x'" will be upper-cased to "'0X'" | + | | as well. | + +-----------+------------------------------------------------------------+ + | "'n'" | Number. This is the same as "'d'", except that it uses the | + | | current locale setting to insert the appropriate number | + | | separator characters. | + +-----------+------------------------------------------------------------+ + | None | The same as "'d'". | + +-----------+------------------------------------------------------------+ + +In addition to the above presentation types, integers can be formatted +with the floating-point presentation types listed below (except "'n'" +and "None"). When doing so, "float()" is used to convert the integer +to a floating-point number before formatting. + +The available presentation types for "float" and "Decimal" values are: + + +-----------+------------------------------------------------------------+ + | Type | Meaning | + |===========|============================================================| + | "'e'" | Scientific notation. For a given precision "p", formats | + | | the number in scientific notation with the letter ‘e’ | + | | separating the coefficient from the exponent. The | + | | coefficient has one digit before and "p" digits after the | + | | decimal point, for a total of "p + 1" significant digits. | + | | With no precision given, uses a precision of "6" digits | + | | after the decimal point for "float", and shows all | + | | coefficient digits for "Decimal". If "p=0", the decimal | + | | point is omitted unless the "#" option is used. | + +-----------+------------------------------------------------------------+ + | "'E'" | Scientific notation. Same as "'e'" except it uses an upper | + | | case ‘E’ as the separator character. | + +-----------+------------------------------------------------------------+ + | "'f'" | Fixed-point notation. For a given precision "p", formats | + | | the number as a decimal number with exactly "p" digits | + | | following the decimal point. With no precision given, uses | + | | a precision of "6" digits after the decimal point for | + | | "float", and uses a precision large enough to show all | + | | coefficient digits for "Decimal". If "p=0", the decimal | + | | point is omitted unless the "#" option is used. | + +-----------+------------------------------------------------------------+ + | "'F'" | Fixed-point notation. Same as "'f'", but converts "nan" to | + | | "NAN" and "inf" to "INF". | + +-----------+------------------------------------------------------------+ + | "'g'" | General format. For a given precision "p >= 1", this | + | | rounds the number to "p" significant digits and then | + | | formats the result in either fixed-point format or in | + | | scientific notation, depending on its magnitude. A | + | | precision of "0" is treated as equivalent to a precision | + | | of "1". The precise rules are as follows: suppose that | + | | the result formatted with presentation type "'e'" and | + | | precision "p-1" would have exponent "exp". Then, if "m <= | + | | exp < p", where "m" is -4 for floats and -6 for | + | | "Decimals", the number is formatted with presentation type | + | | "'f'" and precision "p-1-exp". Otherwise, the number is | + | | formatted with presentation type "'e'" and precision | + | | "p-1". In both cases insignificant trailing zeros are | + | | removed from the significand, and the decimal point is | + | | also removed if there are no remaining digits following | + | | it, unless the "'#'" option is used. With no precision | + | | given, uses a precision of "6" significant digits for | + | | "float". For "Decimal", the coefficient of the result is | + | | formed from the coefficient digits of the value; | + | | scientific notation is used for values smaller than "1e-6" | + | | in absolute value and values where the place value of the | + | | least significant digit is larger than 1, and fixed-point | + | | notation is used otherwise. Positive and negative | + | | infinity, positive and negative zero, and nans, are | + | | formatted as "inf", "-inf", "0", "-0" and "nan" | + | | respectively, regardless of the precision. | + +-----------+------------------------------------------------------------+ + | "'G'" | General format. Same as "'g'" except switches to "'E'" if | + | | the number gets too large. The representations of infinity | + | | and NaN are uppercased, too. | + +-----------+------------------------------------------------------------+ + | "'n'" | Number. This is the same as "'g'", except that it uses the | + | | current locale setting to insert the appropriate number | + | | separator characters. | + +-----------+------------------------------------------------------------+ + | "'%'" | Percentage. Multiplies the number by 100 and displays in | + | | fixed ("'f'") format, followed by a percent sign. | + +-----------+------------------------------------------------------------+ + | None | For "float" this is like the "'g'" type, except that when | + | | fixed- point notation is used to format the result, it | + | | always includes at least one digit past the decimal point, | + | | and switches to the scientific notation when "exp >= p - | + | | 1". When the precision is not specified, the latter will | + | | be as large as needed to represent the given value | + | | faithfully. For "Decimal", this is the same as either | + | | "'g'" or "'G'" depending on the value of | + | | "context.capitals" for the current decimal context. The | + | | overall effect is to match the output of "str()" as | + | | altered by the other format modifiers. | + +-----------+------------------------------------------------------------+ + +The result should be correctly rounded to a given precision "p" of +digits after the decimal point. The rounding mode for "float" matches +that of the "round()" builtin. For "Decimal", the rounding mode of +the current context will be used. + +The available presentation types for "complex" are the same as those +for "float" ("'%'" is not allowed). Both the real and imaginary +components of a complex number are formatted as floating-point +numbers, according to the specified presentation type. They are +separated by the mandatory sign of the imaginary part, the latter +being terminated by a "j" suffix. If the presentation type is +missing, the result will match the output of "str()" (complex numbers +with a non-zero real part are also surrounded by parentheses), +possibly altered by other format modifiers. + + +Format examples +=============== + +This section contains examples of the "str.format()" syntax and +comparison with the old "%"-formatting. + +In most of the cases the syntax is similar to the old "%"-formatting, +with the addition of the "{}" and with ":" used instead of "%". For +example, "'%03.2f'" can be translated to "'{:03.2f}'". + +The new format syntax also supports new and different options, shown +in the following examples. + +Accessing arguments by position: + + >>> '{0}, {1}, {2}'.format('a', 'b', 'c') + 'a, b, c' + >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only + 'a, b, c' + >>> '{2}, {1}, {0}'.format('a', 'b', 'c') + 'c, b, a' + >>> '{2}, {1}, {0}'.format(*'abc') # unpacking argument sequence + 'c, b, a' + >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' indices can be repeated + 'abracadabra' + +Accessing arguments by name: + + >>> 'Coordinates: {latitude}, {longitude}'.format(latitude='37.24N', longitude='-115.81W') + 'Coordinates: 37.24N, -115.81W' + >>> coord = {'latitude': '37.24N', 'longitude': '-115.81W'} + >>> 'Coordinates: {latitude}, {longitude}'.format(**coord) + 'Coordinates: 37.24N, -115.81W' + +Accessing arguments’ attributes: + + >>> c = 3-5j + >>> ('The complex number {0} is formed from the real part {0.real} ' + ... 'and the imaginary part {0.imag}.').format(c) + 'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.' + >>> class Point: + ... def __init__(self, x, y): + ... self.x, self.y = x, y + ... def __str__(self): + ... return 'Point({self.x}, {self.y})'.format(self=self) + ... + >>> str(Point(4, 2)) + 'Point(4, 2)' + +Accessing arguments’ items: + + >>> coord = (3, 5) + >>> 'X: {0[0]}; Y: {0[1]}'.format(coord) + 'X: 3; Y: 5' + +Replacing "%s" and "%r": + + >>> "repr() shows quotes: {!r}; str() doesn't: {!s}".format('test1', 'test2') + "repr() shows quotes: 'test1'; str() doesn't: test2" + +Aligning the text and specifying a width: + + >>> '{:<30}'.format('left aligned') + 'left aligned ' + >>> '{:>30}'.format('right aligned') + ' right aligned' + >>> '{:^30}'.format('centered') + ' centered ' + >>> '{:*^30}'.format('centered') # use '*' as a fill char + '***********centered***********' + +Replacing "%+f", "%-f", and "% f" and specifying a sign: + + >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it always + '+3.140000; -3.140000' + >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space for positive numbers + ' 3.140000; -3.140000' + >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only the minus -- same as '{:f}; {:f}' + '3.140000; -3.140000' + +Replacing "%x" and "%o" and converting the value to different bases: + + >>> # format also supports binary numbers + >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42) + 'int: 42; hex: 2a; oct: 52; bin: 101010' + >>> # with 0x, 0o, or 0b as prefix: + >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42) + 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010' + +Using the comma as a thousands separator: + + >>> '{:,}'.format(1234567890) + '1,234,567,890' + +Expressing a percentage: + + >>> points = 19 + >>> total = 22 + >>> 'Correct answers: {:.2%}'.format(points/total) + 'Correct answers: 86.36%' + +Using type-specific formatting: + + >>> import datetime + >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58) + >>> '{:%Y-%m-%d %H:%M:%S}'.format(d) + '2010-07-04 12:15:58' + +Nesting arguments and more complex examples: + + >>> for align, text in zip('<^>', ['left', 'center', 'right']): + ... '{0:{fill}{align}16}'.format(text, fill=align, align=align) + ... + 'left<<<<<<<<<<<<' + '^^^^^center^^^^^' + '>>>>>>>>>>>right' + >>> + >>> octets = [192, 168, 0, 1] + >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets) + 'C0A80001' + >>> int(_, 16) + 3232235521 + >>> + >>> width = 5 + >>> for num in range(5,12): + ... for base in 'dXob': + ... print('{0:{width}{base}}'.format(num, base=base, width=width), end=' ') + ... print() + ... + 5 5 5 101 + 6 6 6 110 + 7 7 7 111 + 8 8 10 1000 + 9 9 11 1001 + 10 A 12 1010 + 11 B 13 1011 +''', + 'function': r'''Function definitions +******************** + +A function definition defines a user-defined function object (see +section The standard type hierarchy): + + funcdef ::= [decorators] "def" funcname [type_params] "(" [parameter_list] ")" + ["->" expression] ":" suite + decorators ::= decorator+ + decorator ::= "@" assignment_expression NEWLINE + parameter_list ::= defparameter ("," defparameter)* "," "/" ["," [parameter_list_no_posonly]] + | parameter_list_no_posonly + parameter_list_no_posonly ::= defparameter ("," defparameter)* ["," [parameter_list_starargs]] + | parameter_list_starargs + parameter_list_starargs ::= "*" [star_parameter] ("," defparameter)* ["," ["**" parameter [","]]] + | "**" parameter [","] + parameter ::= identifier [":" expression] + star_parameter ::= identifier [":" ["*"] expression] + defparameter ::= parameter ["=" expression] + funcname ::= identifier + +A function definition is an executable statement. Its execution binds +the function name in the current local namespace to a function object +(a wrapper around the executable code for the function). This +function object contains a reference to the current global namespace +as the global namespace to be used when the function is called. + +The function definition does not execute the function body; this gets +executed only when the function is called. [4] + +A function definition may be wrapped by one or more *decorator* +expressions. Decorator expressions are evaluated when the function is +defined, in the scope that contains the function definition. The +result must be a callable, which is invoked with the function object +as the only argument. The returned value is bound to the function name +instead of the function object. Multiple decorators are applied in +nested fashion. For example, the following code + + @f1(arg) + @f2 + def func(): pass + +is roughly equivalent to + + def func(): pass + func = f1(arg)(f2(func)) + +except that the original function is not temporarily bound to the name +"func". + +Changed in version 3.9: Functions may be decorated with any valid +"assignment_expression". Previously, the grammar was much more +restrictive; see **PEP 614** for details. + +A list of type parameters may be given in square brackets between the +function’s name and the opening parenthesis for its parameter list. +This indicates to static type checkers that the function is generic. +At runtime, the type parameters can be retrieved from the function’s +"__type_params__" attribute. See Generic functions for more. + +Changed in version 3.12: Type parameter lists are new in Python 3.12. + +When one or more *parameters* have the form *parameter* "=" +*expression*, the function is said to have “default parameter values.” +For a parameter with a default value, the corresponding *argument* may +be omitted from a call, in which case the parameter’s default value is +substituted. If a parameter has a default value, all following +parameters up until the “"*"” must also have a default value — this is +a syntactic restriction that is not expressed by the grammar. + +**Default parameter values are evaluated from left to right when the +function definition is executed.** This means that the expression is +evaluated once, when the function is defined, and that the same “pre- +computed” value is used for each call. This is especially important +to understand when a default parameter value is a mutable object, such +as a list or a dictionary: if the function modifies the object (e.g. +by appending an item to a list), the default parameter value is in +effect modified. This is generally not what was intended. A way +around this is to use "None" as the default, and explicitly test for +it in the body of the function, e.g.: + + def whats_on_the_telly(penguin=None): + if penguin is None: + penguin = [] + penguin.append("property of the zoo") + return penguin + +Function call semantics are described in more detail in section Calls. +A function call always assigns values to all parameters mentioned in +the parameter list, either from positional arguments, from keyword +arguments, or from default values. If the form “"*identifier"” is +present, it is initialized to a tuple receiving any excess positional +parameters, defaulting to the empty tuple. If the form +“"**identifier"” is present, it is initialized to a new ordered +mapping receiving any excess keyword arguments, defaulting to a new +empty mapping of the same type. Parameters after “"*"” or +“"*identifier"” are keyword-only parameters and may only be passed by +keyword arguments. Parameters before “"/"” are positional-only +parameters and may only be passed by positional arguments. + +Changed in version 3.8: The "/" function parameter syntax may be used +to indicate positional-only parameters. See **PEP 570** for details. + +Parameters may have an *annotation* of the form “": expression"” +following the parameter name. Any parameter may have an annotation, +even those of the form "*identifier" or "**identifier". (As a special +case, parameters of the form "*identifier" may have an annotation “": +*expression"”.) Functions may have “return” annotation of the form +“"-> expression"” after the parameter list. These annotations can be +any valid Python expression. The presence of annotations does not +change the semantics of a function. See Annotations for more +information on annotations. + +Changed in version 3.11: Parameters of the form “"*identifier"” may +have an annotation “": *expression"”. See **PEP 646**. + +It is also possible to create anonymous functions (functions not bound +to a name), for immediate use in expressions. This uses lambda +expressions, described in section Lambdas. Note that the lambda +expression is merely a shorthand for a simplified function definition; +a function defined in a “"def"” statement can be passed around or +assigned to another name just like a function defined by a lambda +expression. The “"def"” form is actually more powerful since it +allows the execution of multiple statements and annotations. + +**Programmer’s note:** Functions are first-class objects. A “"def"” +statement executed inside a function definition defines a local +function that can be returned or passed around. Free variables used +in the nested function can access the local variables of the function +containing the def. See section Naming and binding for details. + +See also: + + **PEP 3107** - Function Annotations + The original specification for function annotations. + + **PEP 484** - Type Hints + Definition of a standard meaning for annotations: type hints. + + **PEP 526** - Syntax for Variable Annotations + Ability to type hint variable declarations, including class + variables and instance variables. + + **PEP 563** - Postponed Evaluation of Annotations + Support for forward references within annotations by preserving + annotations in a string form at runtime instead of eager + evaluation. + + **PEP 318** - Decorators for Functions and Methods + Function and method decorators were introduced. Class decorators + were introduced in **PEP 3129**. +''', + 'global': r'''The "global" statement +********************** + + global_stmt ::= "global" identifier ("," identifier)* + +The "global" statement causes the listed identifiers to be interpreted +as globals. It would be impossible to assign to a global variable +without "global", although free variables may refer to globals without +being declared global. + +The "global" statement applies to the entire scope of a function or +class body. A "SyntaxError" is raised if a variable is used or +assigned to prior to its global declaration in the scope. + +**Programmer’s note:** "global" is a directive to the parser. It +applies only to code parsed at the same time as the "global" +statement. In particular, a "global" statement contained in a string +or code object supplied to the built-in "exec()" function does not +affect the code block *containing* the function call, and code +contained in such a string is unaffected by "global" statements in the +code containing the function call. The same applies to the "eval()" +and "compile()" functions. +''', + 'id-classes': r'''Reserved classes of identifiers +******************************* + +Certain classes of identifiers (besides keywords) have special +meanings. These classes are identified by the patterns of leading and +trailing underscore characters: + +"_*" + Not imported by "from module import *". + +"_" + In a "case" pattern within a "match" statement, "_" is a soft + keyword that denotes a wildcard. + + Separately, the interactive interpreter makes the result of the + last evaluation available in the variable "_". (It is stored in the + "builtins" module, alongside built-in functions like "print".) + + Elsewhere, "_" is a regular identifier. It is often used to name + “special” items, but it is not special to Python itself. + + Note: + + The name "_" is often used in conjunction with + internationalization; refer to the documentation for the + "gettext" module for more information on this convention.It is + also commonly used for unused variables. + +"__*__" + System-defined names, informally known as “dunder” names. These + names are defined by the interpreter and its implementation + (including the standard library). Current system names are + discussed in the Special method names section and elsewhere. More + will likely be defined in future versions of Python. *Any* use of + "__*__" names, in any context, that does not follow explicitly + documented use, is subject to breakage without warning. + +"__*" + Class-private names. Names in this category, when used within the + context of a class definition, are re-written to use a mangled form + to help avoid name clashes between “private” attributes of base and + derived classes. See section Identifiers (Names). +''', + 'identifiers': r'''Identifiers and keywords +************************ + +Identifiers (also referred to as *names*) are described by the +following lexical definitions. + +The syntax of identifiers in Python is based on the Unicode standard +annex UAX-31, with elaboration and changes as defined below; see also +**PEP 3131** for further details. + +Within the ASCII range (U+0001..U+007F), the valid characters for +identifiers include the uppercase and lowercase letters "A" through +"Z", the underscore "_" and, except for the first character, the +digits "0" through "9". Python 3.0 introduced additional characters +from outside the ASCII range (see **PEP 3131**). For these +characters, the classification uses the version of the Unicode +Character Database as included in the "unicodedata" module. + +Identifiers are unlimited in length. Case is significant. + + identifier ::= xid_start xid_continue* + id_start ::= + id_continue ::= + xid_start ::= + xid_continue ::= + +The Unicode category codes mentioned above stand for: + +* *Lu* - uppercase letters + +* *Ll* - lowercase letters + +* *Lt* - titlecase letters + +* *Lm* - modifier letters + +* *Lo* - other letters + +* *Nl* - letter numbers + +* *Mn* - nonspacing marks + +* *Mc* - spacing combining marks + +* *Nd* - decimal numbers + +* *Pc* - connector punctuations + +* *Other_ID_Start* - explicit list of characters in PropList.txt to + support backwards compatibility + +* *Other_ID_Continue* - likewise + +All identifiers are converted into the normal form NFKC while parsing; +comparison of identifiers is based on NFKC. + +A non-normative HTML file listing all valid identifier characters for +Unicode 16.0.0 can be found at +https://www.unicode.org/Public/16.0.0/ucd/DerivedCoreProperties.txt + + +Keywords +======== + +The following identifiers are used as reserved words, or *keywords* of +the language, and cannot be used as ordinary identifiers. They must +be spelled exactly as written here: + + False await else import pass + None break except in raise + True class finally is return + and continue for lambda try + as def from nonlocal while + assert del global not with + async elif if or yield + + +Soft Keywords +============= + +Added in version 3.10. + +Some identifiers are only reserved under specific contexts. These are +known as *soft keywords*. The identifiers "match", "case", "type" and +"_" can syntactically act as keywords in certain contexts, but this +distinction is done at the parser level, not when tokenizing. + +As soft keywords, their use in the grammar is possible while still +preserving compatibility with existing code that uses these names as +identifier names. + +"match", "case", and "_" are used in the "match" statement. "type" is +used in the "type" statement. + +Changed in version 3.12: "type" is now a soft keyword. + + +Reserved classes of identifiers +=============================== + +Certain classes of identifiers (besides keywords) have special +meanings. These classes are identified by the patterns of leading and +trailing underscore characters: + +"_*" + Not imported by "from module import *". + +"_" + In a "case" pattern within a "match" statement, "_" is a soft + keyword that denotes a wildcard. + + Separately, the interactive interpreter makes the result of the + last evaluation available in the variable "_". (It is stored in the + "builtins" module, alongside built-in functions like "print".) + + Elsewhere, "_" is a regular identifier. It is often used to name + “special” items, but it is not special to Python itself. + + Note: + + The name "_" is often used in conjunction with + internationalization; refer to the documentation for the + "gettext" module for more information on this convention.It is + also commonly used for unused variables. + +"__*__" + System-defined names, informally known as “dunder” names. These + names are defined by the interpreter and its implementation + (including the standard library). Current system names are + discussed in the Special method names section and elsewhere. More + will likely be defined in future versions of Python. *Any* use of + "__*__" names, in any context, that does not follow explicitly + documented use, is subject to breakage without warning. + +"__*" + Class-private names. Names in this category, when used within the + context of a class definition, are re-written to use a mangled form + to help avoid name clashes between “private” attributes of base and + derived classes. See section Identifiers (Names). +''', + 'if': r'''The "if" statement +****************** + +The "if" statement is used for conditional execution: + + if_stmt ::= "if" assignment_expression ":" suite + ("elif" assignment_expression ":" suite)* + ["else" ":" suite] + +It selects exactly one of the suites by evaluating the expressions one +by one until one is found to be true (see section Boolean operations +for the definition of true and false); then that suite is executed +(and no other part of the "if" statement is executed or evaluated). +If all expressions are false, the suite of the "else" clause, if +present, is executed. +''', + 'imaginary': r'''Imaginary literals +****************** + +Imaginary literals are described by the following lexical definitions: + + imagnumber ::= (floatnumber | digitpart) ("j" | "J") + +An imaginary literal yields a complex number with a real part of 0.0. +Complex numbers are represented as a pair of floating-point numbers +and have the same restrictions on their range. To create a complex +number with a nonzero real part, add a floating-point number to it, +e.g., "(3+4j)". Some examples of imaginary literals: + + 3.14j 10.j 10j .001j 1e100j 3.14e-10j 3.14_15_93j +''', + 'import': r'''The "import" statement +********************** + + import_stmt ::= "import" module ["as" identifier] ("," module ["as" identifier])* + | "from" relative_module "import" identifier ["as" identifier] + ("," identifier ["as" identifier])* + | "from" relative_module "import" "(" identifier ["as" identifier] + ("," identifier ["as" identifier])* [","] ")" + | "from" relative_module "import" "*" + module ::= (identifier ".")* identifier + relative_module ::= "."* module | "."+ + +The basic import statement (no "from" clause) is executed in two +steps: + +1. find a module, loading and initializing it if necessary + +2. define a name or names in the local namespace for the scope where + the "import" statement occurs. + +When the statement contains multiple clauses (separated by commas) the +two steps are carried out separately for each clause, just as though +the clauses had been separated out into individual import statements. + +The details of the first step, finding and loading modules, are +described in greater detail in the section on the import system, which +also describes the various types of packages and modules that can be +imported, as well as all the hooks that can be used to customize the +import system. Note that failures in this step may indicate either +that the module could not be located, *or* that an error occurred +while initializing the module, which includes execution of the +module’s code. + +If the requested module is retrieved successfully, it will be made +available in the local namespace in one of three ways: + +* If the module name is followed by "as", then the name following "as" + is bound directly to the imported module. + +* If no other name is specified, and the module being imported is a + top level module, the module’s name is bound in the local namespace + as a reference to the imported module + +* If the module being imported is *not* a top level module, then the + name of the top level package that contains the module is bound in + the local namespace as a reference to the top level package. The + imported module must be accessed using its full qualified name + rather than directly + +The "from" form uses a slightly more complex process: + +1. find the module specified in the "from" clause, loading and + initializing it if necessary; + +2. for each of the identifiers specified in the "import" clauses: + + 1. check if the imported module has an attribute by that name + + 2. if not, attempt to import a submodule with that name and then + check the imported module again for that attribute + + 3. if the attribute is not found, "ImportError" is raised. + + 4. otherwise, a reference to that value is stored in the local + namespace, using the name in the "as" clause if it is present, + otherwise using the attribute name + +Examples: + + import foo # foo imported and bound locally + import foo.bar.baz # foo, foo.bar, and foo.bar.baz imported, foo bound locally + import foo.bar.baz as fbb # foo, foo.bar, and foo.bar.baz imported, foo.bar.baz bound as fbb + from foo.bar import baz # foo, foo.bar, and foo.bar.baz imported, foo.bar.baz bound as baz + from foo import attr # foo imported and foo.attr bound as attr + +If the list of identifiers is replaced by a star ("'*'"), all public +names defined in the module are bound in the local namespace for the +scope where the "import" statement occurs. + +The *public names* defined by a module are determined by checking the +module’s namespace for a variable named "__all__"; if defined, it must +be a sequence of strings which are names defined or imported by that +module. The names given in "__all__" are all considered public and +are required to exist. If "__all__" is not defined, the set of public +names includes all names found in the module’s namespace which do not +begin with an underscore character ("'_'"). "__all__" should contain +the entire public API. It is intended to avoid accidentally exporting +items that are not part of the API (such as library modules which were +imported and used within the module). + +The wild card form of import — "from module import *" — is only +allowed at the module level. Attempting to use it in class or +function definitions will raise a "SyntaxError". + +When specifying what module to import you do not have to specify the +absolute name of the module. When a module or package is contained +within another package it is possible to make a relative import within +the same top package without having to mention the package name. By +using leading dots in the specified module or package after "from" you +can specify how high to traverse up the current package hierarchy +without specifying exact names. One leading dot means the current +package where the module making the import exists. Two dots means up +one package level. Three dots is up two levels, etc. So if you execute +"from . import mod" from a module in the "pkg" package then you will +end up importing "pkg.mod". If you execute "from ..subpkg2 import mod" +from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The +specification for relative imports is contained in the Package +Relative Imports section. + +"importlib.import_module()" is provided to support applications that +determine dynamically the modules to be loaded. + +Raises an auditing event "import" with arguments "module", "filename", +"sys.path", "sys.meta_path", "sys.path_hooks". + + +Future statements +================= + +A *future statement* is a directive to the compiler that a particular +module should be compiled using syntax or semantics that will be +available in a specified future release of Python where the feature +becomes standard. + +The future statement is intended to ease migration to future versions +of Python that introduce incompatible changes to the language. It +allows use of the new features on a per-module basis before the +release in which the feature becomes standard. + + future_stmt ::= "from" "__future__" "import" feature ["as" identifier] + ("," feature ["as" identifier])* + | "from" "__future__" "import" "(" feature ["as" identifier] + ("," feature ["as" identifier])* [","] ")" + feature ::= identifier + +A future statement must appear near the top of the module. The only +lines that can appear before a future statement are: + +* the module docstring (if any), + +* comments, + +* blank lines, and + +* other future statements. + +The only feature that requires using the future statement is +"annotations" (see **PEP 563**). + +All historical features enabled by the future statement are still +recognized by Python 3. The list includes "absolute_import", +"division", "generators", "generator_stop", "unicode_literals", +"print_function", "nested_scopes" and "with_statement". They are all +redundant because they are always enabled, and only kept for backwards +compatibility. + +A future statement is recognized and treated specially at compile +time: Changes to the semantics of core constructs are often +implemented by generating different code. It may even be the case +that a new feature introduces new incompatible syntax (such as a new +reserved word), in which case the compiler may need to parse the +module differently. Such decisions cannot be pushed off until +runtime. + +For any given release, the compiler knows which feature names have +been defined, and raises a compile-time error if a future statement +contains a feature not known to it. + +The direct runtime semantics are the same as for any import statement: +there is a standard module "__future__", described later, and it will +be imported in the usual way at the time the future statement is +executed. + +The interesting runtime semantics depend on the specific feature +enabled by the future statement. + +Note that there is nothing special about the statement: + + import __future__ [as name] + +That is not a future statement; it’s an ordinary import statement with +no special semantics or syntax restrictions. + +Code compiled by calls to the built-in functions "exec()" and +"compile()" that occur in a module "M" containing a future statement +will, by default, use the new syntax or semantics associated with the +future statement. This can be controlled by optional arguments to +"compile()" — see the documentation of that function for details. + +A future statement typed at an interactive interpreter prompt will +take effect for the rest of the interpreter session. If an +interpreter is started with the "-i" option, is passed a script name +to execute, and the script includes a future statement, it will be in +effect in the interactive session started after the script is +executed. + +See also: + + **PEP 236** - Back to the __future__ + The original proposal for the __future__ mechanism. +''', + 'in': r'''Membership test operations +************************** + +The operators "in" and "not in" test for membership. "x in s" +evaluates to "True" if *x* is a member of *s*, and "False" otherwise. +"x not in s" returns the negation of "x in s". All built-in sequences +and set types support this as well as dictionary, for which "in" tests +whether the dictionary has a given key. For container types such as +list, tuple, set, frozenset, dict, or collections.deque, the +expression "x in y" is equivalent to "any(x is e or x == e for e in +y)". + +For the string and bytes types, "x in y" is "True" if and only if *x* +is a substring of *y*. An equivalent test is "y.find(x) != -1". +Empty strings are always considered to be a substring of any other +string, so """ in "abc"" will return "True". + +For user-defined classes which define the "__contains__()" method, "x +in y" returns "True" if "y.__contains__(x)" returns a true value, and +"False" otherwise. + +For user-defined classes which do not define "__contains__()" but do +define "__iter__()", "x in y" is "True" if some value "z", for which +the expression "x is z or x == z" is true, is produced while iterating +over "y". If an exception is raised during the iteration, it is as if +"in" raised that exception. + +Lastly, the old-style iteration protocol is tried: if a class defines +"__getitem__()", "x in y" is "True" if and only if there is a non- +negative integer index *i* such that "x is y[i] or x == y[i]", and no +lower integer index raises the "IndexError" exception. (If any other +exception is raised, it is as if "in" raised that exception). + +The operator "not in" is defined to have the inverse truth value of +"in". +''', + 'integers': r'''Integer literals +**************** + +Integer literals are described by the following lexical definitions: + + integer ::= decinteger | bininteger | octinteger | hexinteger + decinteger ::= nonzerodigit (["_"] digit)* | "0"+ (["_"] "0")* + bininteger ::= "0" ("b" | "B") (["_"] bindigit)+ + octinteger ::= "0" ("o" | "O") (["_"] octdigit)+ + hexinteger ::= "0" ("x" | "X") (["_"] hexdigit)+ + nonzerodigit ::= "1"..."9" + digit ::= "0"..."9" + bindigit ::= "0" | "1" + octdigit ::= "0"..."7" + hexdigit ::= digit | "a"..."f" | "A"..."F" + +There is no limit for the length of integer literals apart from what +can be stored in available memory. + +Underscores are ignored for determining the numeric value of the +literal. They can be used to group digits for enhanced readability. +One underscore can occur between digits, and after base specifiers +like "0x". + +Note that leading zeros in a non-zero decimal number are not allowed. +This is for disambiguation with C-style octal literals, which Python +used before version 3.0. + +Some examples of integer literals: + + 7 2147483647 0o177 0b100110111 + 3 79228162514264337593543950336 0o377 0xdeadbeef + 100_000_000_000 0b_1110_0101 + +Changed in version 3.6: Underscores are now allowed for grouping +purposes in literals. +''', + 'lambda': r'''Lambdas +******* + + lambda_expr ::= "lambda" [parameter_list] ":" expression + +Lambda expressions (sometimes called lambda forms) are used to create +anonymous functions. The expression "lambda parameters: expression" +yields a function object. The unnamed object behaves like a function +object defined with: + + def (parameters): + return expression + +See section Function definitions for the syntax of parameter lists. +Note that functions created with lambda expressions cannot contain +statements or annotations. +''', + 'lists': r'''List displays +************* + +A list display is a possibly empty series of expressions enclosed in +square brackets: + + list_display ::= "[" [flexible_expression_list | comprehension] "]" + +A list display yields a new list object, the contents being specified +by either a list of expressions or a comprehension. When a comma- +separated list of expressions is supplied, its elements are evaluated +from left to right and placed into the list object in that order. +When a comprehension is supplied, the list is constructed from the +elements resulting from the comprehension. +''', + 'naming': r'''Naming and binding +****************** + + +Binding of names +================ + +*Names* refer to objects. Names are introduced by name binding +operations. + +The following constructs bind names: + +* formal parameters to functions, + +* class definitions, + +* function definitions, + +* assignment expressions, + +* targets that are identifiers if occurring in an assignment: + + * "for" loop header, + + * after "as" in a "with" statement, "except" clause, "except*" + clause, or in the as-pattern in structural pattern matching, + + * in a capture pattern in structural pattern matching + +* "import" statements. + +* "type" statements. + +* type parameter lists. + +The "import" statement of the form "from ... import *" binds all names +defined in the imported module, except those beginning with an +underscore. This form may only be used at the module level. + +A target occurring in a "del" statement is also considered bound for +this purpose (though the actual semantics are to unbind the name). + +Each assignment or import statement occurs within a block defined by a +class or function definition or at the module level (the top-level +code block). + +If a name is bound in a block, it is a local variable of that block, +unless declared as "nonlocal" or "global". If a name is bound at the +module level, it is a global variable. (The variables of the module +code block are local and global.) If a variable is used in a code +block but not defined there, it is a *free variable*. + +Each occurrence of a name in the program text refers to the *binding* +of that name established by the following name resolution rules. + + +Resolution of names +=================== + +A *scope* defines the visibility of a name within a block. If a local +variable is defined in a block, its scope includes that block. If the +definition occurs in a function block, the scope extends to any blocks +contained within the defining one, unless a contained block introduces +a different binding for the name. + +When a name is used in a code block, it is resolved using the nearest +enclosing scope. The set of all such scopes visible to a code block +is called the block’s *environment*. + +When a name is not found at all, a "NameError" exception is raised. If +the current scope is a function scope, and the name refers to a local +variable that has not yet been bound to a value at the point where the +name is used, an "UnboundLocalError" exception is raised. +"UnboundLocalError" is a subclass of "NameError". + +If a name binding operation occurs anywhere within a code block, all +uses of the name within the block are treated as references to the +current block. This can lead to errors when a name is used within a +block before it is bound. This rule is subtle. Python lacks +declarations and allows name binding operations to occur anywhere +within a code block. The local variables of a code block can be +determined by scanning the entire text of the block for name binding +operations. See the FAQ entry on UnboundLocalError for examples. + +If the "global" statement occurs within a block, all uses of the names +specified in the statement refer to the bindings of those names in the +top-level namespace. Names are resolved in the top-level namespace by +searching the global namespace, i.e. the namespace of the module +containing the code block, and the builtins namespace, the namespace +of the module "builtins". The global namespace is searched first. If +the names are not found there, the builtins namespace is searched +next. If the names are also not found in the builtins namespace, new +variables are created in the global namespace. The global statement +must precede all uses of the listed names. + +The "global" statement has the same scope as a name binding operation +in the same block. If the nearest enclosing scope for a free variable +contains a global statement, the free variable is treated as a global. + +The "nonlocal" statement causes corresponding names to refer to +previously bound variables in the nearest enclosing function scope. +"SyntaxError" is raised at compile time if the given name does not +exist in any enclosing function scope. Type parameters cannot be +rebound with the "nonlocal" statement. + +The namespace for a module is automatically created the first time a +module is imported. The main module for a script is always called +"__main__". + +Class definition blocks and arguments to "exec()" and "eval()" are +special in the context of name resolution. A class definition is an +executable statement that may use and define names. These references +follow the normal rules for name resolution with an exception that +unbound local variables are looked up in the global namespace. The +namespace of the class definition becomes the attribute dictionary of +the class. The scope of names defined in a class block is limited to +the class block; it does not extend to the code blocks of methods. +This includes comprehensions and generator expressions, but it does +not include annotation scopes, which have access to their enclosing +class scopes. This means that the following will fail: + + class A: + a = 42 + b = list(a + i for i in range(10)) + +However, the following will succeed: + + class A: + type Alias = Nested + class Nested: pass + + print(A.Alias.__value__) # + + +Annotation scopes +================= + +*Annotations*, type parameter lists and "type" statements introduce +*annotation scopes*, which behave mostly like function scopes, but +with some exceptions discussed below. + +Annotation scopes are used in the following contexts: + +* *Function annotations*. + +* *Variable annotations*. + +* Type parameter lists for generic type aliases. + +* Type parameter lists for generic functions. A generic function’s + annotations are executed within the annotation scope, but its + defaults and decorators are not. + +* Type parameter lists for generic classes. A generic class’s base + classes and keyword arguments are executed within the annotation + scope, but its decorators are not. + +* The bounds, constraints, and default values for type parameters + (lazily evaluated). + +* The value of type aliases (lazily evaluated). + +Annotation scopes differ from function scopes in the following ways: + +* Annotation scopes have access to their enclosing class namespace. If + an annotation scope is immediately within a class scope, or within + another annotation scope that is immediately within a class scope, + the code in the annotation scope can use names defined in the class + scope as if it were executed directly within the class body. This + contrasts with regular functions defined within classes, which + cannot access names defined in the class scope. + +* Expressions in annotation scopes cannot contain "yield", "yield + from", "await", or ":=" expressions. (These expressions are allowed + in other scopes contained within the annotation scope.) + +* Names defined in annotation scopes cannot be rebound with "nonlocal" + statements in inner scopes. This includes only type parameters, as + no other syntactic elements that can appear within annotation scopes + can introduce new names. + +* While annotation scopes have an internal name, that name is not + reflected in the *qualified name* of objects defined within the + scope. Instead, the "__qualname__" of such objects is as if the + object were defined in the enclosing scope. + +Added in version 3.12: Annotation scopes were introduced in Python +3.12 as part of **PEP 695**. + +Changed in version 3.13: Annotation scopes are also used for type +parameter defaults, as introduced by **PEP 696**. + +Changed in version 3.14: Annotation scopes are now also used for +annotations, as specified in **PEP 649** and **PEP 749**. + + +Lazy evaluation +=============== + +Most annotation scopes are *lazily evaluated*. This includes +annotations, the values of type aliases created through the "type" +statement, and the bounds, constraints, and default values of type +variables created through the type parameter syntax. This means that +they are not evaluated when the type alias or type variable is +created, or when the object carrying annotations is created. Instead, +they are only evaluated when necessary, for example when the +"__value__" attribute on a type alias is accessed. + +Example: + + >>> type Alias = 1/0 + >>> Alias.__value__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + >>> def func[T: 1/0](): pass + >>> T = func.__type_params__[0] + >>> T.__bound__ + Traceback (most recent call last): + ... + ZeroDivisionError: division by zero + +Here the exception is raised only when the "__value__" attribute of +the type alias or the "__bound__" attribute of the type variable is +accessed. + +This behavior is primarily useful for references to types that have +not yet been defined when the type alias or type variable is created. +For example, lazy evaluation enables creation of mutually recursive +type aliases: + + from typing import Literal + + type SimpleExpr = int | Parenthesized + type Parenthesized = tuple[Literal["("], Expr, Literal[")"]] + type Expr = SimpleExpr | tuple[SimpleExpr, Literal["+", "-"], Expr] + +Lazily evaluated values are evaluated in annotation scope, which means +that names that appear inside the lazily evaluated value are looked up +as if they were used in the immediately enclosing scope. + +Added in version 3.12. + + +Builtins and restricted execution +================================= + +**CPython implementation detail:** Users should not touch +"__builtins__"; it is strictly an implementation detail. Users +wanting to override values in the builtins namespace should "import" +the "builtins" module and modify its attributes appropriately. + +The builtins namespace associated with the execution of a code block +is actually found by looking up the name "__builtins__" in its global +namespace; this should be a dictionary or a module (in the latter case +the module’s dictionary is used). By default, when in the "__main__" +module, "__builtins__" is the built-in module "builtins"; when in any +other module, "__builtins__" is an alias for the dictionary of the +"builtins" module itself. + + +Interaction with dynamic features +================================= + +Name resolution of free variables occurs at runtime, not at compile +time. This means that the following code will print 42: + + i = 10 + def f(): + print(i) + i = 42 + f() + +The "eval()" and "exec()" functions do not have access to the full +environment for resolving names. Names may be resolved in the local +and global namespaces of the caller. Free variables are not resolved +in the nearest enclosing namespace, but in the global namespace. [1] +The "exec()" and "eval()" functions have optional arguments to +override the global and local namespace. If only one namespace is +specified, it is used for both. +''', + 'nonlocal': r'''The "nonlocal" statement +************************ + + nonlocal_stmt ::= "nonlocal" identifier ("," identifier)* + +When the definition of a function or class is nested (enclosed) within +the definitions of other functions, its nonlocal scopes are the local +scopes of the enclosing functions. The "nonlocal" statement causes the +listed identifiers to refer to names previously bound in nonlocal +scopes. It allows encapsulated code to rebind such nonlocal +identifiers. If a name is bound in more than one nonlocal scope, the +nearest binding is used. If a name is not bound in any nonlocal scope, +or if there is no nonlocal scope, a "SyntaxError" is raised. + +The "nonlocal" statement applies to the entire scope of a function or +class body. A "SyntaxError" is raised if a variable is used or +assigned to prior to its nonlocal declaration in the scope. + +See also: + + **PEP 3104** - Access to Names in Outer Scopes + The specification for the "nonlocal" statement. + +**Programmer’s note:** "nonlocal" is a directive to the parser and +applies only to code parsed along with it. See the note for the +"global" statement. +''', + 'numbers': r'''Numeric literals +**************** + +There are three types of numeric literals: integers, floating-point +numbers, and imaginary numbers. There are no complex literals +(complex numbers can be formed by adding a real number and an +imaginary number). + +Note that numeric literals do not include a sign; a phrase like "-1" +is actually an expression composed of the unary operator ‘"-"’ and the +literal "1". +''', + 'numeric-types': r'''Emulating numeric types +*********************** + +The following methods can be defined to emulate numeric objects. +Methods corresponding to operations that are not supported by the +particular kind of number implemented (e.g., bitwise operations for +non-integral numbers) should be left undefined. + +object.__add__(self, other) +object.__sub__(self, other) +object.__mul__(self, other) +object.__matmul__(self, other) +object.__truediv__(self, other) +object.__floordiv__(self, other) +object.__mod__(self, other) +object.__divmod__(self, other) +object.__pow__(self, other[, modulo]) +object.__lshift__(self, other) +object.__rshift__(self, other) +object.__and__(self, other) +object.__xor__(self, other) +object.__or__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, to + evaluate the expression "x + y", where *x* is an instance of a + class that has an "__add__()" method, "type(x).__add__(x, y)" is + called. The "__divmod__()" method should be the equivalent to + using "__floordiv__()" and "__mod__()"; it should not be related to + "__truediv__()". Note that "__pow__()" should be defined to accept + an optional third argument if the ternary version of the built-in + "pow()" function is to be supported. + + If one of those methods does not support the operation with the + supplied arguments, it should return "NotImplemented". + +object.__radd__(self, other) +object.__rsub__(self, other) +object.__rmul__(self, other) +object.__rmatmul__(self, other) +object.__rtruediv__(self, other) +object.__rfloordiv__(self, other) +object.__rmod__(self, other) +object.__rdivmod__(self, other) +object.__rpow__(self, other[, modulo]) +object.__rlshift__(self, other) +object.__rrshift__(self, other) +object.__rand__(self, other) +object.__rxor__(self, other) +object.__ror__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|") with reflected (swapped) + operands. These functions are only called if the operands are of + different types, when the left operand does not support the + corresponding operation [3], or the right operand’s class is + derived from the left operand’s class. [4] For instance, to + evaluate the expression "x - y", where *y* is an instance of a + class that has an "__rsub__()" method, "type(y).__rsub__(y, x)" is + called if "type(x).__sub__(x, y)" returns "NotImplemented" or + "type(y)" is a subclass of "type(x)". [5] + + Note that ternary "pow()" will not try calling "__rpow__()" (the + coercion rules would become too complicated). + + Note: + + If the right operand’s type is a subclass of the left operand’s + type and that subclass provides a different implementation of the + reflected method for the operation, this method will be called + before the left operand’s non-reflected method. This behavior + allows subclasses to override their ancestors’ operations. + +object.__iadd__(self, other) +object.__isub__(self, other) +object.__imul__(self, other) +object.__imatmul__(self, other) +object.__itruediv__(self, other) +object.__ifloordiv__(self, other) +object.__imod__(self, other) +object.__ipow__(self, other[, modulo]) +object.__ilshift__(self, other) +object.__irshift__(self, other) +object.__iand__(self, other) +object.__ixor__(self, other) +object.__ior__(self, other) + + These methods are called to implement the augmented arithmetic + assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", "**=", + "<<=", ">>=", "&=", "^=", "|="). These methods should attempt to + do the operation in-place (modifying *self*) and return the result + (which could be, but does not have to be, *self*). If a specific + method is not defined, or if that method returns "NotImplemented", + the augmented assignment falls back to the normal methods. For + instance, if *x* is an instance of a class with an "__iadd__()" + method, "x += y" is equivalent to "x = x.__iadd__(y)" . If + "__iadd__()" does not exist, or if "x.__iadd__(y)" returns + "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are + considered, as with the evaluation of "x + y". In certain + situations, augmented assignment can result in unexpected errors + (see Why does a_tuple[i] += [‘item’] raise an exception when the + addition works?), but this behavior is in fact part of the data + model. + +object.__neg__(self) +object.__pos__(self) +object.__abs__(self) +object.__invert__(self) + + Called to implement the unary arithmetic operations ("-", "+", + "abs()" and "~"). + +object.__complex__(self) +object.__int__(self) +object.__float__(self) + + Called to implement the built-in functions "complex()", "int()" and + "float()". Should return a value of the appropriate type. + +object.__index__(self) + + Called to implement "operator.index()", and whenever Python needs + to losslessly convert the numeric object to an integer object (such + as in slicing, or in the built-in "bin()", "hex()" and "oct()" + functions). Presence of this method indicates that the numeric + object is an integer type. Must return an integer. + + If "__int__()", "__float__()" and "__complex__()" are not defined + then corresponding built-in functions "int()", "float()" and + "complex()" fall back to "__index__()". + +object.__round__(self[, ndigits]) +object.__trunc__(self) +object.__floor__(self) +object.__ceil__(self) + + Called to implement the built-in function "round()" and "math" + functions "trunc()", "floor()" and "ceil()". Unless *ndigits* is + passed to "__round__()" all these methods should return the value + of the object truncated to an "Integral" (typically an "int"). + + Changed in version 3.14: "int()" no longer delegates to the + "__trunc__()" method. +''', + 'objects': r'''Objects, values and types +************************* + +*Objects* are Python’s abstraction for data. All data in a Python +program is represented by objects or by relations between objects. (In +a sense, and in conformance to Von Neumann’s model of a “stored +program computer”, code is also represented by objects.) + +Every object has an identity, a type and a value. An object’s +*identity* never changes once it has been created; you may think of it +as the object’s address in memory. The "is" operator compares the +identity of two objects; the "id()" function returns an integer +representing its identity. + +**CPython implementation detail:** For CPython, "id(x)" is the memory +address where "x" is stored. + +An object’s type determines the operations that the object supports +(e.g., “does it have a length?”) and also defines the possible values +for objects of that type. The "type()" function returns an object’s +type (which is an object itself). Like its identity, an object’s +*type* is also unchangeable. [1] + +The *value* of some objects can change. Objects whose value can +change are said to be *mutable*; objects whose value is unchangeable +once they are created are called *immutable*. (The value of an +immutable container object that contains a reference to a mutable +object can change when the latter’s value is changed; however the +container is still considered immutable, because the collection of +objects it contains cannot be changed. So, immutability is not +strictly the same as having an unchangeable value, it is more subtle.) +An object’s mutability is determined by its type; for instance, +numbers, strings and tuples are immutable, while dictionaries and +lists are mutable. + +Objects are never explicitly destroyed; however, when they become +unreachable they may be garbage-collected. An implementation is +allowed to postpone garbage collection or omit it altogether — it is a +matter of implementation quality how garbage collection is +implemented, as long as no objects are collected that are still +reachable. + +**CPython implementation detail:** CPython currently uses a reference- +counting scheme with (optional) delayed detection of cyclically linked +garbage, which collects most objects as soon as they become +unreachable, but is not guaranteed to collect garbage containing +circular references. See the documentation of the "gc" module for +information on controlling the collection of cyclic garbage. Other +implementations act differently and CPython may change. Do not depend +on immediate finalization of objects when they become unreachable (so +you should always close files explicitly). + +Note that the use of the implementation’s tracing or debugging +facilities may keep objects alive that would normally be collectable. +Also note that catching an exception with a "try"…"except" statement +may keep objects alive. + +Some objects contain references to “external” resources such as open +files or windows. It is understood that these resources are freed +when the object is garbage-collected, but since garbage collection is +not guaranteed to happen, such objects also provide an explicit way to +release the external resource, usually a "close()" method. Programs +are strongly recommended to explicitly close such objects. The +"try"…"finally" statement and the "with" statement provide convenient +ways to do this. + +Some objects contain references to other objects; these are called +*containers*. Examples of containers are tuples, lists and +dictionaries. The references are part of a container’s value. In +most cases, when we talk about the value of a container, we imply the +values, not the identities of the contained objects; however, when we +talk about the mutability of a container, only the identities of the +immediately contained objects are implied. So, if an immutable +container (like a tuple) contains a reference to a mutable object, its +value changes if that mutable object is changed. + +Types affect almost all aspects of object behavior. Even the +importance of object identity is affected in some sense: for immutable +types, operations that compute new values may actually return a +reference to any existing object with the same type and value, while +for mutable objects this is not allowed. For example, after "a = 1; b += 1", *a* and *b* may or may not refer to the same object with the +value one, depending on the implementation. This is because "int" is +an immutable type, so the reference to "1" can be reused. This +behaviour depends on the implementation used, so should not be relied +upon, but is something to be aware of when making use of object +identity tests. However, after "c = []; d = []", *c* and *d* are +guaranteed to refer to two different, unique, newly created empty +lists. (Note that "e = f = []" assigns the *same* object to both *e* +and *f*.) +''', + 'operator-summary': r'''Operator precedence +******************* + +The following table summarizes the operator precedence in Python, from +highest precedence (most binding) to lowest precedence (least +binding). Operators in the same box have the same precedence. Unless +the syntax is explicitly given, operators are binary. Operators in +the same box group left to right (except for exponentiation and +conditional expressions, which group from right to left). + +Note that comparisons, membership tests, and identity tests, all have +the same precedence and have a left-to-right chaining feature as +described in the Comparisons section. + ++-------------------------------------------------+---------------------------------------+ +| Operator | Description | +|=================================================|=======================================| +| "(expressions...)", "[expressions...]", "{key: | Binding or parenthesized expression, | +| value...}", "{expressions...}" | list display, dictionary display, set | +| | display | ++-------------------------------------------------+---------------------------------------+ +| "x[index]", "x[index:index]", | Subscription, slicing, call, | +| "x(arguments...)", "x.attribute" | attribute reference | ++-------------------------------------------------+---------------------------------------+ +| "await x" | Await expression | ++-------------------------------------------------+---------------------------------------+ +| "**" | Exponentiation [5] | ++-------------------------------------------------+---------------------------------------+ +| "+x", "-x", "~x" | Positive, negative, bitwise NOT | ++-------------------------------------------------+---------------------------------------+ +| "*", "@", "/", "//", "%" | Multiplication, matrix | +| | multiplication, division, floor | +| | division, remainder [6] | ++-------------------------------------------------+---------------------------------------+ +| "+", "-" | Addition and subtraction | ++-------------------------------------------------+---------------------------------------+ +| "<<", ">>" | Shifts | ++-------------------------------------------------+---------------------------------------+ +| "&" | Bitwise AND | ++-------------------------------------------------+---------------------------------------+ +| "^" | Bitwise XOR | ++-------------------------------------------------+---------------------------------------+ +| "|" | Bitwise OR | ++-------------------------------------------------+---------------------------------------+ +| "in", "not in", "is", "is not", "<", "<=", ">", | Comparisons, including membership | +| ">=", "!=", "==" | tests and identity tests | ++-------------------------------------------------+---------------------------------------+ +| "not x" | Boolean NOT | ++-------------------------------------------------+---------------------------------------+ +| "and" | Boolean AND | ++-------------------------------------------------+---------------------------------------+ +| "or" | Boolean OR | ++-------------------------------------------------+---------------------------------------+ +| "if" – "else" | Conditional expression | ++-------------------------------------------------+---------------------------------------+ +| "lambda" | Lambda expression | ++-------------------------------------------------+---------------------------------------+ +| ":=" | Assignment expression | ++-------------------------------------------------+---------------------------------------+ + +-[ Footnotes ]- + +[1] While "abs(x%y) < abs(y)" is true mathematically, for floats it + may not be true numerically due to roundoff. For example, and + assuming a platform on which a Python float is an IEEE 754 double- + precision number, in order that "-1e-100 % 1e100" have the same + sign as "1e100", the computed result is "-1e-100 + 1e100", which + is numerically exactly equal to "1e100". The function + "math.fmod()" returns a result whose sign matches the sign of the + first argument instead, and so returns "-1e-100" in this case. + Which approach is more appropriate depends on the application. + +[2] If x is very close to an exact integer multiple of y, it’s + possible for "x//y" to be one larger than "(x-x%y)//y" due to + rounding. In such cases, Python returns the latter result, in + order to preserve that "divmod(x,y)[0] * y + x % y" be very close + to "x". + +[3] The Unicode standard distinguishes between *code points* (e.g. + U+0041) and *abstract characters* (e.g. “LATIN CAPITAL LETTER A”). + While most abstract characters in Unicode are only represented + using one code point, there is a number of abstract characters + that can in addition be represented using a sequence of more than + one code point. For example, the abstract character “LATIN + CAPITAL LETTER C WITH CEDILLA” can be represented as a single + *precomposed character* at code position U+00C7, or as a sequence + of a *base character* at code position U+0043 (LATIN CAPITAL + LETTER C), followed by a *combining character* at code position + U+0327 (COMBINING CEDILLA). + + The comparison operators on strings compare at the level of + Unicode code points. This may be counter-intuitive to humans. For + example, ""\u00C7" == "\u0043\u0327"" is "False", even though both + strings represent the same abstract character “LATIN CAPITAL + LETTER C WITH CEDILLA”. + + To compare strings at the level of abstract characters (that is, + in a way intuitive to humans), use "unicodedata.normalize()". + +[4] Due to automatic garbage-collection, free lists, and the dynamic + nature of descriptors, you may notice seemingly unusual behaviour + in certain uses of the "is" operator, like those involving + comparisons between instance methods, or constants. Check their + documentation for more info. + +[5] The power operator "**" binds less tightly than an arithmetic or + bitwise unary operator on its right, that is, "2**-1" is "0.5". + +[6] The "%" operator is also used for string formatting; the same + precedence applies. +''', + 'pass': r'''The "pass" statement +******************** + + pass_stmt ::= "pass" + +"pass" is a null operation — when it is executed, nothing happens. It +is useful as a placeholder when a statement is required syntactically, +but no code needs to be executed, for example: + + def f(arg): pass # a function that does nothing (yet) + + class C: pass # a class with no methods (yet) +''', + 'power': r'''The power operator +****************** + +The power operator binds more tightly than unary operators on its +left; it binds less tightly than unary operators on its right. The +syntax is: + + power ::= (await_expr | primary) ["**" u_expr] + +Thus, in an unparenthesized sequence of power and unary operators, the +operators are evaluated from right to left (this does not constrain +the evaluation order for the operands): "-1**2" results in "-1". + +The power operator has the same semantics as the built-in "pow()" +function, when called with two arguments: it yields its left argument +raised to the power of its right argument. The numeric arguments are +first converted to a common type, and the result is of that type. + +For int operands, the result has the same type as the operands unless +the second argument is negative; in that case, all arguments are +converted to float and a float result is delivered. For example, +"10**2" returns "100", but "10**-2" returns "0.01". + +Raising "0.0" to a negative power results in a "ZeroDivisionError". +Raising a negative number to a fractional power results in a "complex" +number. (In earlier versions it raised a "ValueError".) + +This operation can be customized using the special "__pow__()" and +"__rpow__()" methods. +''', + 'raise': r'''The "raise" statement +********************* + + raise_stmt ::= "raise" [expression ["from" expression]] + +If no expressions are present, "raise" re-raises the exception that is +currently being handled, which is also known as the *active +exception*. If there isn’t currently an active exception, a +"RuntimeError" exception is raised indicating that this is an error. + +Otherwise, "raise" evaluates the first expression as the exception +object. It must be either a subclass or an instance of +"BaseException". If it is a class, the exception instance will be +obtained when needed by instantiating the class with no arguments. + +The *type* of the exception is the exception instance’s class, the +*value* is the instance itself. + +A traceback object is normally created automatically when an exception +is raised and attached to it as the "__traceback__" attribute. You can +create an exception and set your own traceback in one step using the +"with_traceback()" exception method (which returns the same exception +instance, with its traceback set to its argument), like so: + + raise Exception("foo occurred").with_traceback(tracebackobj) + +The "from" clause is used for exception chaining: if given, the second +*expression* must be another exception class or instance. If the +second expression is an exception instance, it will be attached to the +raised exception as the "__cause__" attribute (which is writable). If +the expression is an exception class, the class will be instantiated +and the resulting exception instance will be attached to the raised +exception as the "__cause__" attribute. If the raised exception is not +handled, both exceptions will be printed: + + >>> try: + ... print(1 / 0) + ... except Exception as exc: + ... raise RuntimeError("Something bad happened") from exc + ... + Traceback (most recent call last): + File "", line 2, in + print(1 / 0) + ~~^~~ + ZeroDivisionError: division by zero + + The above exception was the direct cause of the following exception: + + Traceback (most recent call last): + File "", line 4, in + raise RuntimeError("Something bad happened") from exc + RuntimeError: Something bad happened + +A similar mechanism works implicitly if a new exception is raised when +an exception is already being handled. An exception may be handled +when an "except" or "finally" clause, or a "with" statement, is used. +The previous exception is then attached as the new exception’s +"__context__" attribute: + + >>> try: + ... print(1 / 0) + ... except: + ... raise RuntimeError("Something bad happened") + ... + Traceback (most recent call last): + File "", line 2, in + print(1 / 0) + ~~^~~ + ZeroDivisionError: division by zero + + During handling of the above exception, another exception occurred: + + Traceback (most recent call last): + File "", line 4, in + raise RuntimeError("Something bad happened") + RuntimeError: Something bad happened + +Exception chaining can be explicitly suppressed by specifying "None" +in the "from" clause: + + >>> try: + ... print(1 / 0) + ... except: + ... raise RuntimeError("Something bad happened") from None + ... + Traceback (most recent call last): + File "", line 4, in + RuntimeError: Something bad happened + +Additional information on exceptions can be found in section +Exceptions, and information about handling exceptions is in section +The try statement. + +Changed in version 3.3: "None" is now permitted as "Y" in "raise X +from Y".Added the "__suppress_context__" attribute to suppress +automatic display of the exception context. + +Changed in version 3.11: If the traceback of the active exception is +modified in an "except" clause, a subsequent "raise" statement re- +raises the exception with the modified traceback. Previously, the +exception was re-raised with the traceback it had when it was caught. +''', + 'return': r'''The "return" statement +********************** + + return_stmt ::= "return" [expression_list] + +"return" may only occur syntactically nested in a function definition, +not within a nested class definition. + +If an expression list is present, it is evaluated, else "None" is +substituted. + +"return" leaves the current function call with the expression list (or +"None") as return value. + +When "return" passes control out of a "try" statement with a "finally" +clause, that "finally" clause is executed before really leaving the +function. + +In a generator function, the "return" statement indicates that the +generator is done and will cause "StopIteration" to be raised. The +returned value (if any) is used as an argument to construct +"StopIteration" and becomes the "StopIteration.value" attribute. + +In an asynchronous generator function, an empty "return" statement +indicates that the asynchronous generator is done and will cause +"StopAsyncIteration" to be raised. A non-empty "return" statement is +a syntax error in an asynchronous generator function. +''', + 'sequence-types': r'''Emulating container types +************************* + +The following methods can be defined to implement container objects. +None of them are provided by the "object" class itself. Containers +usually are *sequences* (such as "lists" or "tuples") or *mappings* +(like *dictionaries*), but can represent other containers as well. +The first set of methods is used either to emulate a sequence or to +emulate a mapping; the difference is that for a sequence, the +allowable keys should be the integers *k* for which "0 <= k < N" where +*N* is the length of the sequence, or "slice" objects, which define a +range of items. It is also recommended that mappings provide the +methods "keys()", "values()", "items()", "get()", "clear()", +"setdefault()", "pop()", "popitem()", "copy()", and "update()" +behaving similar to those for Python’s standard "dictionary" objects. +The "collections.abc" module provides a "MutableMapping" *abstract +base class* to help create those methods from a base set of +"__getitem__()", "__setitem__()", "__delitem__()", and "keys()". +Mutable sequences should provide methods "append()", "count()", +"index()", "extend()", "insert()", "pop()", "remove()", "reverse()" +and "sort()", like Python standard "list" objects. Finally, sequence +types should implement addition (meaning concatenation) and +multiplication (meaning repetition) by defining the methods +"__add__()", "__radd__()", "__iadd__()", "__mul__()", "__rmul__()" and +"__imul__()" described below; they should not define other numerical +operators. It is recommended that both mappings and sequences +implement the "__contains__()" method to allow efficient use of the +"in" operator; for mappings, "in" should search the mapping’s keys; +for sequences, it should search through the values. It is further +recommended that both mappings and sequences implement the +"__iter__()" method to allow efficient iteration through the +container; for mappings, "__iter__()" should iterate through the +object’s keys; for sequences, it should iterate through the values. + +object.__len__(self) + + Called to implement the built-in function "len()". Should return + the length of the object, an integer ">=" 0. Also, an object that + doesn’t define a "__bool__()" method and whose "__len__()" method + returns zero is considered to be false in a Boolean context. + + **CPython implementation detail:** In CPython, the length is + required to be at most "sys.maxsize". If the length is larger than + "sys.maxsize" some features (such as "len()") may raise + "OverflowError". To prevent raising "OverflowError" by truth value + testing, an object must define a "__bool__()" method. + +object.__length_hint__(self) + + Called to implement "operator.length_hint()". Should return an + estimated length for the object (which may be greater or less than + the actual length). The length must be an integer ">=" 0. The + return value may also be "NotImplemented", which is treated the + same as if the "__length_hint__" method didn’t exist at all. This + method is purely an optimization and is never required for + correctness. + + Added in version 3.4. + +Note: + + Slicing is done exclusively with the following three methods. A + call like + + a[1:2] = b + + is translated to + + a[slice(1, 2, None)] = b + + and so forth. Missing slice items are always filled in with "None". + +object.__getitem__(self, key) + + Called to implement evaluation of "self[key]". For *sequence* + types, the accepted keys should be integers. Optionally, they may + support "slice" objects as well. Negative index support is also + optional. If *key* is of an inappropriate type, "TypeError" may be + raised; if *key* is a value outside the set of indexes for the + sequence (after any special interpretation of negative values), + "IndexError" should be raised. For *mapping* types, if *key* is + missing (not in the container), "KeyError" should be raised. + + Note: + + "for" loops expect that an "IndexError" will be raised for + illegal indexes to allow proper detection of the end of the + sequence. + + Note: + + When subscripting a *class*, the special class method + "__class_getitem__()" may be called instead of "__getitem__()". + See __class_getitem__ versus __getitem__ for more details. + +object.__setitem__(self, key, value) + + Called to implement assignment to "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support changes to the values for keys, or if new keys + can be added, or for sequences if elements can be replaced. The + same exceptions should be raised for improper *key* values as for + the "__getitem__()" method. + +object.__delitem__(self, key) + + Called to implement deletion of "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support removal of keys, or for sequences if elements + can be removed from the sequence. The same exceptions should be + raised for improper *key* values as for the "__getitem__()" method. + +object.__missing__(self, key) + + Called by "dict"."__getitem__()" to implement "self[key]" for dict + subclasses when key is not in the dictionary. + +object.__iter__(self) + + This method is called when an *iterator* is required for a + container. This method should return a new iterator object that can + iterate over all the objects in the container. For mappings, it + should iterate over the keys of the container. + +object.__reversed__(self) + + Called (if present) by the "reversed()" built-in to implement + reverse iteration. It should return a new iterator object that + iterates over all the objects in the container in reverse order. + + If the "__reversed__()" method is not provided, the "reversed()" + built-in will fall back to using the sequence protocol ("__len__()" + and "__getitem__()"). Objects that support the sequence protocol + should only provide "__reversed__()" if they can provide an + implementation that is more efficient than the one provided by + "reversed()". + +The membership test operators ("in" and "not in") are normally +implemented as an iteration through a container. However, container +objects can supply the following special method with a more efficient +implementation, which also does not require the object be iterable. + +object.__contains__(self, item) + + Called to implement membership test operators. Should return true + if *item* is in *self*, false otherwise. For mapping objects, this + should consider the keys of the mapping rather than the values or + the key-item pairs. + + For objects that don’t define "__contains__()", the membership test + first tries iteration via "__iter__()", then the old sequence + iteration protocol via "__getitem__()", see this section in the + language reference. +''', + 'shifting': r'''Shifting operations +******************* + +The shifting operations have lower priority than the arithmetic +operations: + + shift_expr ::= a_expr | shift_expr ("<<" | ">>") a_expr + +These operators accept integers as arguments. They shift the first +argument to the left or right by the number of bits given by the +second argument. + +The left shift operation can be customized using the special +"__lshift__()" and "__rlshift__()" methods. The right shift operation +can be customized using the special "__rshift__()" and "__rrshift__()" +methods. + +A right shift by *n* bits is defined as floor division by "pow(2,n)". +A left shift by *n* bits is defined as multiplication with "pow(2,n)". +''', + 'slicings': r'''Slicings +******** + +A slicing selects a range of items in a sequence object (e.g., a +string, tuple or list). Slicings may be used as expressions or as +targets in assignment or "del" statements. The syntax for a slicing: + + slicing ::= primary "[" slice_list "]" + slice_list ::= slice_item ("," slice_item)* [","] + slice_item ::= expression | proper_slice + proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ] + lower_bound ::= expression + upper_bound ::= expression + stride ::= expression + +There is ambiguity in the formal syntax here: anything that looks like +an expression list also looks like a slice list, so any subscription +can be interpreted as a slicing. Rather than further complicating the +syntax, this is disambiguated by defining that in this case the +interpretation as a subscription takes priority over the +interpretation as a slicing (this is the case if the slice list +contains no proper slice). + +The semantics for a slicing are as follows. The primary is indexed +(using the same "__getitem__()" method as normal subscription) with a +key that is constructed from the slice list, as follows. If the slice +list contains at least one comma, the key is a tuple containing the +conversion of the slice items; otherwise, the conversion of the lone +slice item is the key. The conversion of a slice item that is an +expression is that expression. The conversion of a proper slice is a +slice object (see section The standard type hierarchy) whose "start", +"stop" and "step" attributes are the values of the expressions given +as lower bound, upper bound and stride, respectively, substituting +"None" for missing expressions. +''', + 'specialattrs': r'''Special Attributes +****************** + +The implementation adds a few special read-only attributes to several +object types, where they are relevant. Some of these are not reported +by the "dir()" built-in function. + +definition.__name__ + + The name of the class, function, method, descriptor, or generator + instance. + +definition.__qualname__ + + The *qualified name* of the class, function, method, descriptor, or + generator instance. + + Added in version 3.3. + +definition.__module__ + + The name of the module in which a class or function was defined. + +definition.__doc__ + + The documentation string of a class or function, or "None" if + undefined. + +definition.__type_params__ + + The type parameters of generic classes, functions, and type + aliases. For classes and functions that are not generic, this will + be an empty tuple. + + Added in version 3.12. +''', + 'specialnames': r'''Special method names +******************** + +A class can implement certain operations that are invoked by special +syntax (such as arithmetic operations or subscripting and slicing) by +defining methods with special names. This is Python’s approach to +*operator overloading*, allowing classes to define their own behavior +with respect to language operators. For instance, if a class defines +a method named "__getitem__()", and "x" is an instance of this class, +then "x[i]" is roughly equivalent to "type(x).__getitem__(x, i)". +Except where mentioned, attempts to execute an operation raise an +exception when no appropriate method is defined (typically +"AttributeError" or "TypeError"). + +Setting a special method to "None" indicates that the corresponding +operation is not available. For example, if a class sets "__iter__()" +to "None", the class is not iterable, so calling "iter()" on its +instances will raise a "TypeError" (without falling back to +"__getitem__()"). [2] + +When implementing a class that emulates any built-in type, it is +important that the emulation only be implemented to the degree that it +makes sense for the object being modelled. For example, some +sequences may work well with retrieval of individual elements, but +extracting a slice may not make sense. (One example of this is the +"NodeList" interface in the W3C’s Document Object Model.) + + +Basic customization +=================== + +object.__new__(cls[, ...]) + + Called to create a new instance of class *cls*. "__new__()" is a + static method (special-cased so you need not declare it as such) + that takes the class of which an instance was requested as its + first argument. The remaining arguments are those passed to the + object constructor expression (the call to the class). The return + value of "__new__()" should be the new object instance (usually an + instance of *cls*). + + Typical implementations create a new instance of the class by + invoking the superclass’s "__new__()" method using + "super().__new__(cls[, ...])" with appropriate arguments and then + modifying the newly created instance as necessary before returning + it. + + If "__new__()" is invoked during object construction and it returns + an instance of *cls*, then the new instance’s "__init__()" method + will be invoked like "__init__(self[, ...])", where *self* is the + new instance and the remaining arguments are the same as were + passed to the object constructor. + + If "__new__()" does not return an instance of *cls*, then the new + instance’s "__init__()" method will not be invoked. + + "__new__()" is intended mainly to allow subclasses of immutable + types (like int, str, or tuple) to customize instance creation. It + is also commonly overridden in custom metaclasses in order to + customize class creation. + +object.__init__(self[, ...]) + + Called after the instance has been created (by "__new__()"), but + before it is returned to the caller. The arguments are those + passed to the class constructor expression. If a base class has an + "__init__()" method, the derived class’s "__init__()" method, if + any, must explicitly call it to ensure proper initialization of the + base class part of the instance; for example: + "super().__init__([args...])". + + Because "__new__()" and "__init__()" work together in constructing + objects ("__new__()" to create it, and "__init__()" to customize + it), no non-"None" value may be returned by "__init__()"; doing so + will cause a "TypeError" to be raised at runtime. + +object.__del__(self) + + Called when the instance is about to be destroyed. This is also + called a finalizer or (improperly) a destructor. If a base class + has a "__del__()" method, the derived class’s "__del__()" method, + if any, must explicitly call it to ensure proper deletion of the + base class part of the instance. + + It is possible (though not recommended!) for the "__del__()" method + to postpone destruction of the instance by creating a new reference + to it. This is called object *resurrection*. It is + implementation-dependent whether "__del__()" is called a second + time when a resurrected object is about to be destroyed; the + current *CPython* implementation only calls it once. + + It is not guaranteed that "__del__()" methods are called for + objects that still exist when the interpreter exits. + "weakref.finalize" provides a straightforward way to register a + cleanup function to be called when an object is garbage collected. + + Note: + + "del x" doesn’t directly call "x.__del__()" — the former + decrements the reference count for "x" by one, and the latter is + only called when "x"’s reference count reaches zero. + + **CPython implementation detail:** It is possible for a reference + cycle to prevent the reference count of an object from going to + zero. In this case, the cycle will be later detected and deleted + by the *cyclic garbage collector*. A common cause of reference + cycles is when an exception has been caught in a local variable. + The frame’s locals then reference the exception, which references + its own traceback, which references the locals of all frames caught + in the traceback. + + See also: Documentation for the "gc" module. + + Warning: + + Due to the precarious circumstances under which "__del__()" + methods are invoked, exceptions that occur during their execution + are ignored, and a warning is printed to "sys.stderr" instead. + In particular: + + * "__del__()" can be invoked when arbitrary code is being + executed, including from any arbitrary thread. If "__del__()" + needs to take a lock or invoke any other blocking resource, it + may deadlock as the resource may already be taken by the code + that gets interrupted to execute "__del__()". + + * "__del__()" can be executed during interpreter shutdown. As a + consequence, the global variables it needs to access (including + other modules) may already have been deleted or set to "None". + Python guarantees that globals whose name begins with a single + underscore are deleted from their module before other globals + are deleted; if no other references to such globals exist, this + may help in assuring that imported modules are still available + at the time when the "__del__()" method is called. + +object.__repr__(self) + + Called by the "repr()" built-in function to compute the “official” + string representation of an object. If at all possible, this + should look like a valid Python expression that could be used to + recreate an object with the same value (given an appropriate + environment). If this is not possible, a string of the form + "<...some useful description...>" should be returned. The return + value must be a string object. If a class defines "__repr__()" but + not "__str__()", then "__repr__()" is also used when an “informal” + string representation of instances of that class is required. + + This is typically used for debugging, so it is important that the + representation is information-rich and unambiguous. A default + implementation is provided by the "object" class itself. + +object.__str__(self) + + Called by "str(object)", the default "__format__()" implementation, + and the built-in function "print()", to compute the “informal” or + nicely printable string representation of an object. The return + value must be a str object. + + This method differs from "object.__repr__()" in that there is no + expectation that "__str__()" return a valid Python expression: a + more convenient or concise representation can be used. + + The default implementation defined by the built-in type "object" + calls "object.__repr__()". + +object.__bytes__(self) + + Called by bytes to compute a byte-string representation of an + object. This should return a "bytes" object. The "object" class + itself does not provide this method. + +object.__format__(self, format_spec) + + Called by the "format()" built-in function, and by extension, + evaluation of formatted string literals and the "str.format()" + method, to produce a “formatted” string representation of an + object. The *format_spec* argument is a string that contains a + description of the formatting options desired. The interpretation + of the *format_spec* argument is up to the type implementing + "__format__()", however most classes will either delegate + formatting to one of the built-in types, or use a similar + formatting option syntax. + + See Format Specification Mini-Language for a description of the + standard formatting syntax. + + The return value must be a string object. + + The default implementation by the "object" class should be given an + empty *format_spec* string. It delegates to "__str__()". + + Changed in version 3.4: The __format__ method of "object" itself + raises a "TypeError" if passed any non-empty string. + + Changed in version 3.7: "object.__format__(x, '')" is now + equivalent to "str(x)" rather than "format(str(x), '')". + +object.__lt__(self, other) +object.__le__(self, other) +object.__eq__(self, other) +object.__ne__(self, other) +object.__gt__(self, other) +object.__ge__(self, other) + + These are the so-called “rich comparison” methods. The + correspondence between operator symbols and method names is as + follows: "xy" calls + "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)". + + A rich comparison method may return the singleton "NotImplemented" + if it does not implement the operation for a given pair of + arguments. By convention, "False" and "True" are returned for a + successful comparison. However, these methods can return any value, + so if the comparison operator is used in a Boolean context (e.g., + in the condition of an "if" statement), Python will call "bool()" + on the value to determine if the result is true or false. + + By default, "object" implements "__eq__()" by using "is", returning + "NotImplemented" in the case of a false comparison: "True if x is y + else NotImplemented". For "__ne__()", by default it delegates to + "__eq__()" and inverts the result unless it is "NotImplemented". + There are no other implied relationships among the comparison + operators or default implementations; for example, the truth of + "(x.__hash__". + + If a class that does not override "__eq__()" wishes to suppress + hash support, it should include "__hash__ = None" in the class + definition. A class which defines its own "__hash__()" that + explicitly raises a "TypeError" would be incorrectly identified as + hashable by an "isinstance(obj, collections.abc.Hashable)" call. + + Note: + + By default, the "__hash__()" values of str and bytes objects are + “salted” with an unpredictable random value. Although they + remain constant within an individual Python process, they are not + predictable between repeated invocations of Python.This is + intended to provide protection against a denial-of-service caused + by carefully chosen inputs that exploit the worst case + performance of a dict insertion, *O*(*n*^2) complexity. See + http://ocert.org/advisories/ocert-2011-003.html for + details.Changing hash values affects the iteration order of sets. + Python has never made guarantees about this ordering (and it + typically varies between 32-bit and 64-bit builds).See also + "PYTHONHASHSEED". + + Changed in version 3.3: Hash randomization is enabled by default. + +object.__bool__(self) + + Called to implement truth value testing and the built-in operation + "bool()"; should return "False" or "True". When this method is not + defined, "__len__()" is called, if it is defined, and the object is + considered true if its result is nonzero. If a class defines + neither "__len__()" nor "__bool__()" (which is true of the "object" + class itself), all its instances are considered true. + + +Customizing attribute access +============================ + +The following methods can be defined to customize the meaning of +attribute access (use of, assignment to, or deletion of "x.name") for +class instances. + +object.__getattr__(self, name) + + Called when the default attribute access fails with an + "AttributeError" (either "__getattribute__()" raises an + "AttributeError" because *name* is not an instance attribute or an + attribute in the class tree for "self"; or "__get__()" of a *name* + property raises "AttributeError"). This method should either + return the (computed) attribute value or raise an "AttributeError" + exception. The "object" class itself does not provide this method. + + Note that if the attribute is found through the normal mechanism, + "__getattr__()" is not called. (This is an intentional asymmetry + between "__getattr__()" and "__setattr__()".) This is done both for + efficiency reasons and because otherwise "__getattr__()" would have + no way to access other attributes of the instance. Note that at + least for instance variables, you can take total control by not + inserting any values in the instance attribute dictionary (but + instead inserting them in another object). See the + "__getattribute__()" method below for a way to actually get total + control over attribute access. + +object.__getattribute__(self, name) + + Called unconditionally to implement attribute accesses for + instances of the class. If the class also defines "__getattr__()", + the latter will not be called unless "__getattribute__()" either + calls it explicitly or raises an "AttributeError". This method + should return the (computed) attribute value or raise an + "AttributeError" exception. In order to avoid infinite recursion in + this method, its implementation should always call the base class + method with the same name to access any attributes it needs, for + example, "object.__getattribute__(self, name)". + + Note: + + This method may still be bypassed when looking up special methods + as the result of implicit invocation via language syntax or + built-in functions. See Special method lookup. + + For certain sensitive attribute accesses, raises an auditing event + "object.__getattr__" with arguments "obj" and "name". + +object.__setattr__(self, name, value) + + Called when an attribute assignment is attempted. This is called + instead of the normal mechanism (i.e. store the value in the + instance dictionary). *name* is the attribute name, *value* is the + value to be assigned to it. + + If "__setattr__()" wants to assign to an instance attribute, it + should call the base class method with the same name, for example, + "object.__setattr__(self, name, value)". + + For certain sensitive attribute assignments, raises an auditing + event "object.__setattr__" with arguments "obj", "name", "value". + +object.__delattr__(self, name) + + Like "__setattr__()" but for attribute deletion instead of + assignment. This should only be implemented if "del obj.name" is + meaningful for the object. + + For certain sensitive attribute deletions, raises an auditing event + "object.__delattr__" with arguments "obj" and "name". + +object.__dir__(self) + + Called when "dir()" is called on the object. An iterable must be + returned. "dir()" converts the returned iterable to a list and + sorts it. + + +Customizing module attribute access +----------------------------------- + +Special names "__getattr__" and "__dir__" can be also used to +customize access to module attributes. The "__getattr__" function at +the module level should accept one argument which is the name of an +attribute and return the computed value or raise an "AttributeError". +If an attribute is not found on a module object through the normal +lookup, i.e. "object.__getattribute__()", then "__getattr__" is +searched in the module "__dict__" before raising an "AttributeError". +If found, it is called with the attribute name and the result is +returned. + +The "__dir__" function should accept no arguments, and return an +iterable of strings that represents the names accessible on module. If +present, this function overrides the standard "dir()" search on a +module. + +For a more fine grained customization of the module behavior (setting +attributes, properties, etc.), one can set the "__class__" attribute +of a module object to a subclass of "types.ModuleType". For example: + + import sys + from types import ModuleType + + class VerboseModule(ModuleType): + def __repr__(self): + return f'Verbose {self.__name__}' + + def __setattr__(self, attr, value): + print(f'Setting {attr}...') + super().__setattr__(attr, value) + + sys.modules[__name__].__class__ = VerboseModule + +Note: + + Defining module "__getattr__" and setting module "__class__" only + affect lookups made using the attribute access syntax – directly + accessing the module globals (whether by code within the module, or + via a reference to the module’s globals dictionary) is unaffected. + +Changed in version 3.5: "__class__" module attribute is now writable. + +Added in version 3.7: "__getattr__" and "__dir__" module attributes. + +See also: + + **PEP 562** - Module __getattr__ and __dir__ + Describes the "__getattr__" and "__dir__" functions on modules. + + +Implementing Descriptors +------------------------ + +The following methods only apply when an instance of the class +containing the method (a so-called *descriptor* class) appears in an +*owner* class (the descriptor must be in either the owner’s class +dictionary or in the class dictionary for one of its parents). In the +examples below, “the attribute” refers to the attribute whose name is +the key of the property in the owner class’ "__dict__". The "object" +class itself does not implement any of these protocols. + +object.__get__(self, instance, owner=None) + + Called to get the attribute of the owner class (class attribute + access) or of an instance of that class (instance attribute + access). The optional *owner* argument is the owner class, while + *instance* is the instance that the attribute was accessed through, + or "None" when the attribute is accessed through the *owner*. + + This method should return the computed attribute value or raise an + "AttributeError" exception. + + **PEP 252** specifies that "__get__()" is callable with one or two + arguments. Python’s own built-in descriptors support this + specification; however, it is likely that some third-party tools + have descriptors that require both arguments. Python’s own + "__getattribute__()" implementation always passes in both arguments + whether they are required or not. + +object.__set__(self, instance, value) + + Called to set the attribute on an instance *instance* of the owner + class to a new value, *value*. + + Note, adding "__set__()" or "__delete__()" changes the kind of + descriptor to a “data descriptor”. See Invoking Descriptors for + more details. + +object.__delete__(self, instance) + + Called to delete the attribute on an instance *instance* of the + owner class. + +Instances of descriptors may also have the "__objclass__" attribute +present: + +object.__objclass__ + + The attribute "__objclass__" is interpreted by the "inspect" module + as specifying the class where this object was defined (setting this + appropriately can assist in runtime introspection of dynamic class + attributes). For callables, it may indicate that an instance of the + given type (or a subclass) is expected or required as the first + positional argument (for example, CPython sets this attribute for + unbound methods that are implemented in C). + + +Invoking Descriptors +-------------------- + +In general, a descriptor is an object attribute with “binding +behavior”, one whose attribute access has been overridden by methods +in the descriptor protocol: "__get__()", "__set__()", and +"__delete__()". If any of those methods are defined for an object, it +is said to be a descriptor. + +The default behavior for attribute access is to get, set, or delete +the attribute from an object’s dictionary. For instance, "a.x" has a +lookup chain starting with "a.__dict__['x']", then +"type(a).__dict__['x']", and continuing through the base classes of +"type(a)" excluding metaclasses. + +However, if the looked-up value is an object defining one of the +descriptor methods, then Python may override the default behavior and +invoke the descriptor method instead. Where this occurs in the +precedence chain depends on which descriptor methods were defined and +how they were called. + +The starting point for descriptor invocation is a binding, "a.x". How +the arguments are assembled depends on "a": + +Direct Call + The simplest and least common call is when user code directly + invokes a descriptor method: "x.__get__(a)". + +Instance Binding + If binding to an object instance, "a.x" is transformed into the + call: "type(a).__dict__['x'].__get__(a, type(a))". + +Class Binding + If binding to a class, "A.x" is transformed into the call: + "A.__dict__['x'].__get__(None, A)". + +Super Binding + A dotted lookup such as "super(A, a).x" searches + "a.__class__.__mro__" for a base class "B" following "A" and then + returns "B.__dict__['x'].__get__(a, A)". If not a descriptor, "x" + is returned unchanged. + +For instance bindings, the precedence of descriptor invocation depends +on which descriptor methods are defined. A descriptor can define any +combination of "__get__()", "__set__()" and "__delete__()". If it +does not define "__get__()", then accessing the attribute will return +the descriptor object itself unless there is a value in the object’s +instance dictionary. If the descriptor defines "__set__()" and/or +"__delete__()", it is a data descriptor; if it defines neither, it is +a non-data descriptor. Normally, data descriptors define both +"__get__()" and "__set__()", while non-data descriptors have just the +"__get__()" method. Data descriptors with "__get__()" and "__set__()" +(and/or "__delete__()") defined always override a redefinition in an +instance dictionary. In contrast, non-data descriptors can be +overridden by instances. + +Python methods (including those decorated with "@staticmethod" and +"@classmethod") are implemented as non-data descriptors. Accordingly, +instances can redefine and override methods. This allows individual +instances to acquire behaviors that differ from other instances of the +same class. + +The "property()" function is implemented as a data descriptor. +Accordingly, instances cannot override the behavior of a property. + + +__slots__ +--------- + +*__slots__* allow us to explicitly declare data members (like +properties) and deny the creation of "__dict__" and *__weakref__* +(unless explicitly declared in *__slots__* or available in a parent.) + +The space saved over using "__dict__" can be significant. Attribute +lookup speed can be significantly improved as well. + +object.__slots__ + + This class variable can be assigned a string, iterable, or sequence + of strings with variable names used by instances. *__slots__* + reserves space for the declared variables and prevents the + automatic creation of "__dict__" and *__weakref__* for each + instance. + +Notes on using *__slots__*: + +* When inheriting from a class without *__slots__*, the "__dict__" and + *__weakref__* attribute of the instances will always be accessible. + +* Without a "__dict__" variable, instances cannot be assigned new + variables not listed in the *__slots__* definition. Attempts to + assign to an unlisted variable name raises "AttributeError". If + dynamic assignment of new variables is desired, then add + "'__dict__'" to the sequence of strings in the *__slots__* + declaration. + +* Without a *__weakref__* variable for each instance, classes defining + *__slots__* do not support "weak references" to its instances. If + weak reference support is needed, then add "'__weakref__'" to the + sequence of strings in the *__slots__* declaration. + +* *__slots__* are implemented at the class level by creating + descriptors for each variable name. As a result, class attributes + cannot be used to set default values for instance variables defined + by *__slots__*; otherwise, the class attribute would overwrite the + descriptor assignment. + +* The action of a *__slots__* declaration is not limited to the class + where it is defined. *__slots__* declared in parents are available + in child classes. However, instances of a child subclass will get a + "__dict__" and *__weakref__* unless the subclass also defines + *__slots__* (which should only contain names of any *additional* + slots). + +* If a class defines a slot also defined in a base class, the instance + variable defined by the base class slot is inaccessible (except by + retrieving its descriptor directly from the base class). This + renders the meaning of the program undefined. In the future, a + check may be added to prevent this. + +* "TypeError" will be raised if nonempty *__slots__* are defined for a + class derived from a ""variable-length" built-in type" such as + "int", "bytes", and "tuple". + +* Any non-string *iterable* may be assigned to *__slots__*. + +* If a "dictionary" is used to assign *__slots__*, the dictionary keys + will be used as the slot names. The values of the dictionary can be + used to provide per-attribute docstrings that will be recognised by + "inspect.getdoc()" and displayed in the output of "help()". + +* "__class__" assignment works only if both classes have the same + *__slots__*. + +* Multiple inheritance with multiple slotted parent classes can be + used, but only one parent is allowed to have attributes created by + slots (the other bases must have empty slot layouts) - violations + raise "TypeError". + +* If an *iterator* is used for *__slots__* then a *descriptor* is + created for each of the iterator’s values. However, the *__slots__* + attribute will be an empty iterator. + + +Customizing class creation +========================== + +Whenever a class inherits from another class, "__init_subclass__()" is +called on the parent class. This way, it is possible to write classes +which change the behavior of subclasses. This is closely related to +class decorators, but where class decorators only affect the specific +class they’re applied to, "__init_subclass__" solely applies to future +subclasses of the class defining the method. + +classmethod object.__init_subclass__(cls) + + This method is called whenever the containing class is subclassed. + *cls* is then the new subclass. If defined as a normal instance + method, this method is implicitly converted to a class method. + + Keyword arguments which are given to a new class are passed to the + parent class’s "__init_subclass__". For compatibility with other + classes using "__init_subclass__", one should take out the needed + keyword arguments and pass the others over to the base class, as + in: + + class Philosopher: + def __init_subclass__(cls, /, default_name, **kwargs): + super().__init_subclass__(**kwargs) + cls.default_name = default_name + + class AustralianPhilosopher(Philosopher, default_name="Bruce"): + pass + + The default implementation "object.__init_subclass__" does nothing, + but raises an error if it is called with any arguments. + + Note: + + The metaclass hint "metaclass" is consumed by the rest of the + type machinery, and is never passed to "__init_subclass__" + implementations. The actual metaclass (rather than the explicit + hint) can be accessed as "type(cls)". + + Added in version 3.6. + +When a class is created, "type.__new__()" scans the class variables +and makes callbacks to those with a "__set_name__()" hook. + +object.__set_name__(self, owner, name) + + Automatically called at the time the owning class *owner* is + created. The object has been assigned to *name* in that class: + + class A: + x = C() # Automatically calls: x.__set_name__(A, 'x') + + If the class variable is assigned after the class is created, + "__set_name__()" will not be called automatically. If needed, + "__set_name__()" can be called directly: + + class A: + pass + + c = C() + A.x = c # The hook is not called + c.__set_name__(A, 'x') # Manually invoke the hook + + See Creating the class object for more details. + + Added in version 3.6. + + +Metaclasses +----------- + +By default, classes are constructed using "type()". The class body is +executed in a new namespace and the class name is bound locally to the +result of "type(name, bases, namespace)". + +The class creation process can be customized by passing the +"metaclass" keyword argument in the class definition line, or by +inheriting from an existing class that included such an argument. In +the following example, both "MyClass" and "MySubclass" are instances +of "Meta": + + class Meta(type): + pass + + class MyClass(metaclass=Meta): + pass + + class MySubclass(MyClass): + pass + +Any other keyword arguments that are specified in the class definition +are passed through to all metaclass operations described below. + +When a class definition is executed, the following steps occur: + +* MRO entries are resolved; + +* the appropriate metaclass is determined; + +* the class namespace is prepared; + +* the class body is executed; + +* the class object is created. + + +Resolving MRO entries +--------------------- + +object.__mro_entries__(self, bases) + + If a base that appears in a class definition is not an instance of + "type", then an "__mro_entries__()" method is searched on the base. + If an "__mro_entries__()" method is found, the base is substituted + with the result of a call to "__mro_entries__()" when creating the + class. The method is called with the original bases tuple passed to + the *bases* parameter, and must return a tuple of classes that will + be used instead of the base. The returned tuple may be empty: in + these cases, the original base is ignored. + +See also: + + "types.resolve_bases()" + Dynamically resolve bases that are not instances of "type". + + "types.get_original_bases()" + Retrieve a class’s “original bases” prior to modifications by + "__mro_entries__()". + + **PEP 560** + Core support for typing module and generic types. + + +Determining the appropriate metaclass +------------------------------------- + +The appropriate metaclass for a class definition is determined as +follows: + +* if no bases and no explicit metaclass are given, then "type()" is + used; + +* if an explicit metaclass is given and it is *not* an instance of + "type()", then it is used directly as the metaclass; + +* if an instance of "type()" is given as the explicit metaclass, or + bases are defined, then the most derived metaclass is used. + +The most derived metaclass is selected from the explicitly specified +metaclass (if any) and the metaclasses (i.e. "type(cls)") of all +specified base classes. The most derived metaclass is one which is a +subtype of *all* of these candidate metaclasses. If none of the +candidate metaclasses meets that criterion, then the class definition +will fail with "TypeError". + + +Preparing the class namespace +----------------------------- + +Once the appropriate metaclass has been identified, then the class +namespace is prepared. If the metaclass has a "__prepare__" attribute, +it is called as "namespace = metaclass.__prepare__(name, bases, +**kwds)" (where the additional keyword arguments, if any, come from +the class definition). The "__prepare__" method should be implemented +as a "classmethod". The namespace returned by "__prepare__" is passed +in to "__new__", but when the final class object is created the +namespace is copied into a new "dict". + +If the metaclass has no "__prepare__" attribute, then the class +namespace is initialised as an empty ordered mapping. + +See also: + + **PEP 3115** - Metaclasses in Python 3000 + Introduced the "__prepare__" namespace hook + + +Executing the class body +------------------------ + +The class body is executed (approximately) as "exec(body, globals(), +namespace)". The key difference from a normal call to "exec()" is that +lexical scoping allows the class body (including any methods) to +reference names from the current and outer scopes when the class +definition occurs inside a function. + +However, even when the class definition occurs inside the function, +methods defined inside the class still cannot see names defined at the +class scope. Class variables must be accessed through the first +parameter of instance or class methods, or through the implicit +lexically scoped "__class__" reference described in the next section. + + +Creating the class object +------------------------- + +Once the class namespace has been populated by executing the class +body, the class object is created by calling "metaclass(name, bases, +namespace, **kwds)" (the additional keywords passed here are the same +as those passed to "__prepare__"). + +This class object is the one that will be referenced by the zero- +argument form of "super()". "__class__" is an implicit closure +reference created by the compiler if any methods in a class body refer +to either "__class__" or "super". This allows the zero argument form +of "super()" to correctly identify the class being defined based on +lexical scoping, while the class or instance that was used to make the +current call is identified based on the first argument passed to the +method. + +**CPython implementation detail:** In CPython 3.6 and later, the +"__class__" cell is passed to the metaclass as a "__classcell__" entry +in the class namespace. If present, this must be propagated up to the +"type.__new__" call in order for the class to be initialised +correctly. Failing to do so will result in a "RuntimeError" in Python +3.8. + +When using the default metaclass "type", or any metaclass that +ultimately calls "type.__new__", the following additional +customization steps are invoked after creating the class object: + +1. The "type.__new__" method collects all of the attributes in the + class namespace that define a "__set_name__()" method; + +2. Those "__set_name__" methods are called with the class being + defined and the assigned name of that particular attribute; + +3. The "__init_subclass__()" hook is called on the immediate parent of + the new class in its method resolution order. + +After the class object is created, it is passed to the class +decorators included in the class definition (if any) and the resulting +object is bound in the local namespace as the defined class. + +When a new class is created by "type.__new__", the object provided as +the namespace parameter is copied to a new ordered mapping and the +original object is discarded. The new copy is wrapped in a read-only +proxy, which becomes the "__dict__" attribute of the class object. + +See also: + + **PEP 3135** - New super + Describes the implicit "__class__" closure reference + + +Uses for metaclasses +-------------------- + +The potential uses for metaclasses are boundless. Some ideas that have +been explored include enum, logging, interface checking, automatic +delegation, automatic property creation, proxies, frameworks, and +automatic resource locking/synchronization. + + +Customizing instance and subclass checks +======================================== + +The following methods are used to override the default behavior of the +"isinstance()" and "issubclass()" built-in functions. + +In particular, the metaclass "abc.ABCMeta" implements these methods in +order to allow the addition of Abstract Base Classes (ABCs) as +“virtual base classes” to any class or type (including built-in +types), including other ABCs. + +type.__instancecheck__(self, instance) + + Return true if *instance* should be considered a (direct or + indirect) instance of *class*. If defined, called to implement + "isinstance(instance, class)". + +type.__subclasscheck__(self, subclass) + + Return true if *subclass* should be considered a (direct or + indirect) subclass of *class*. If defined, called to implement + "issubclass(subclass, class)". + +Note that these methods are looked up on the type (metaclass) of a +class. They cannot be defined as class methods in the actual class. +This is consistent with the lookup of special methods that are called +on instances, only in this case the instance is itself a class. + +See also: + + **PEP 3119** - Introducing Abstract Base Classes + Includes the specification for customizing "isinstance()" and + "issubclass()" behavior through "__instancecheck__()" and + "__subclasscheck__()", with motivation for this functionality in + the context of adding Abstract Base Classes (see the "abc" + module) to the language. + + +Emulating generic types +======================= + +When using *type annotations*, it is often useful to *parameterize* a +*generic type* using Python’s square-brackets notation. For example, +the annotation "list[int]" might be used to signify a "list" in which +all the elements are of type "int". + +See also: + + **PEP 484** - Type Hints + Introducing Python’s framework for type annotations + + Generic Alias Types + Documentation for objects representing parameterized generic + classes + + Generics, user-defined generics and "typing.Generic" + Documentation on how to implement generic classes that can be + parameterized at runtime and understood by static type-checkers. + +A class can *generally* only be parameterized if it defines the +special class method "__class_getitem__()". + +classmethod object.__class_getitem__(cls, key) + + Return an object representing the specialization of a generic class + by type arguments found in *key*. + + When defined on a class, "__class_getitem__()" is automatically a + class method. As such, there is no need for it to be decorated with + "@classmethod" when it is defined. + + +The purpose of *__class_getitem__* +---------------------------------- + +The purpose of "__class_getitem__()" is to allow runtime +parameterization of standard-library generic classes in order to more +easily apply *type hints* to these classes. + +To implement custom generic classes that can be parameterized at +runtime and understood by static type-checkers, users should either +inherit from a standard library class that already implements +"__class_getitem__()", or inherit from "typing.Generic", which has its +own implementation of "__class_getitem__()". + +Custom implementations of "__class_getitem__()" on classes defined +outside of the standard library may not be understood by third-party +type-checkers such as mypy. Using "__class_getitem__()" on any class +for purposes other than type hinting is discouraged. + + +*__class_getitem__* versus *__getitem__* +---------------------------------------- + +Usually, the subscription of an object using square brackets will call +the "__getitem__()" instance method defined on the object’s class. +However, if the object being subscribed is itself a class, the class +method "__class_getitem__()" may be called instead. +"__class_getitem__()" should return a GenericAlias object if it is +properly defined. + +Presented with the *expression* "obj[x]", the Python interpreter +follows something like the following process to decide whether +"__getitem__()" or "__class_getitem__()" should be called: + + from inspect import isclass + + def subscribe(obj, x): + """Return the result of the expression 'obj[x]'""" + + class_of_obj = type(obj) + + # If the class of obj defines __getitem__, + # call class_of_obj.__getitem__(obj, x) + if hasattr(class_of_obj, '__getitem__'): + return class_of_obj.__getitem__(obj, x) + + # Else, if obj is a class and defines __class_getitem__, + # call obj.__class_getitem__(x) + elif isclass(obj) and hasattr(obj, '__class_getitem__'): + return obj.__class_getitem__(x) + + # Else, raise an exception + else: + raise TypeError( + f"'{class_of_obj.__name__}' object is not subscriptable" + ) + +In Python, all classes are themselves instances of other classes. The +class of a class is known as that class’s *metaclass*, and most +classes have the "type" class as their metaclass. "type" does not +define "__getitem__()", meaning that expressions such as "list[int]", +"dict[str, float]" and "tuple[str, bytes]" all result in +"__class_getitem__()" being called: + + >>> # list has class "type" as its metaclass, like most classes: + >>> type(list) + + >>> type(dict) == type(list) == type(tuple) == type(str) == type(bytes) + True + >>> # "list[int]" calls "list.__class_getitem__(int)" + >>> list[int] + list[int] + >>> # list.__class_getitem__ returns a GenericAlias object: + >>> type(list[int]) + + +However, if a class has a custom metaclass that defines +"__getitem__()", subscribing the class may result in different +behaviour. An example of this can be found in the "enum" module: + + >>> from enum import Enum + >>> class Menu(Enum): + ... """A breakfast menu""" + ... SPAM = 'spam' + ... BACON = 'bacon' + ... + >>> # Enum classes have a custom metaclass: + >>> type(Menu) + + >>> # EnumMeta defines __getitem__, + >>> # so __class_getitem__ is not called, + >>> # and the result is not a GenericAlias object: + >>> Menu['SPAM'] + + >>> type(Menu['SPAM']) + + +See also: + + **PEP 560** - Core Support for typing module and generic types + Introducing "__class_getitem__()", and outlining when a + subscription results in "__class_getitem__()" being called + instead of "__getitem__()" + + +Emulating callable objects +========================== + +object.__call__(self[, args...]) + + Called when the instance is “called” as a function; if this method + is defined, "x(arg1, arg2, ...)" roughly translates to + "type(x).__call__(x, arg1, ...)". The "object" class itself does + not provide this method. + + +Emulating container types +========================= + +The following methods can be defined to implement container objects. +None of them are provided by the "object" class itself. Containers +usually are *sequences* (such as "lists" or "tuples") or *mappings* +(like *dictionaries*), but can represent other containers as well. +The first set of methods is used either to emulate a sequence or to +emulate a mapping; the difference is that for a sequence, the +allowable keys should be the integers *k* for which "0 <= k < N" where +*N* is the length of the sequence, or "slice" objects, which define a +range of items. It is also recommended that mappings provide the +methods "keys()", "values()", "items()", "get()", "clear()", +"setdefault()", "pop()", "popitem()", "copy()", and "update()" +behaving similar to those for Python’s standard "dictionary" objects. +The "collections.abc" module provides a "MutableMapping" *abstract +base class* to help create those methods from a base set of +"__getitem__()", "__setitem__()", "__delitem__()", and "keys()". +Mutable sequences should provide methods "append()", "count()", +"index()", "extend()", "insert()", "pop()", "remove()", "reverse()" +and "sort()", like Python standard "list" objects. Finally, sequence +types should implement addition (meaning concatenation) and +multiplication (meaning repetition) by defining the methods +"__add__()", "__radd__()", "__iadd__()", "__mul__()", "__rmul__()" and +"__imul__()" described below; they should not define other numerical +operators. It is recommended that both mappings and sequences +implement the "__contains__()" method to allow efficient use of the +"in" operator; for mappings, "in" should search the mapping’s keys; +for sequences, it should search through the values. It is further +recommended that both mappings and sequences implement the +"__iter__()" method to allow efficient iteration through the +container; for mappings, "__iter__()" should iterate through the +object’s keys; for sequences, it should iterate through the values. + +object.__len__(self) + + Called to implement the built-in function "len()". Should return + the length of the object, an integer ">=" 0. Also, an object that + doesn’t define a "__bool__()" method and whose "__len__()" method + returns zero is considered to be false in a Boolean context. + + **CPython implementation detail:** In CPython, the length is + required to be at most "sys.maxsize". If the length is larger than + "sys.maxsize" some features (such as "len()") may raise + "OverflowError". To prevent raising "OverflowError" by truth value + testing, an object must define a "__bool__()" method. + +object.__length_hint__(self) + + Called to implement "operator.length_hint()". Should return an + estimated length for the object (which may be greater or less than + the actual length). The length must be an integer ">=" 0. The + return value may also be "NotImplemented", which is treated the + same as if the "__length_hint__" method didn’t exist at all. This + method is purely an optimization and is never required for + correctness. + + Added in version 3.4. + +Note: + + Slicing is done exclusively with the following three methods. A + call like + + a[1:2] = b + + is translated to + + a[slice(1, 2, None)] = b + + and so forth. Missing slice items are always filled in with "None". + +object.__getitem__(self, key) + + Called to implement evaluation of "self[key]". For *sequence* + types, the accepted keys should be integers. Optionally, they may + support "slice" objects as well. Negative index support is also + optional. If *key* is of an inappropriate type, "TypeError" may be + raised; if *key* is a value outside the set of indexes for the + sequence (after any special interpretation of negative values), + "IndexError" should be raised. For *mapping* types, if *key* is + missing (not in the container), "KeyError" should be raised. + + Note: + + "for" loops expect that an "IndexError" will be raised for + illegal indexes to allow proper detection of the end of the + sequence. + + Note: + + When subscripting a *class*, the special class method + "__class_getitem__()" may be called instead of "__getitem__()". + See __class_getitem__ versus __getitem__ for more details. + +object.__setitem__(self, key, value) + + Called to implement assignment to "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support changes to the values for keys, or if new keys + can be added, or for sequences if elements can be replaced. The + same exceptions should be raised for improper *key* values as for + the "__getitem__()" method. + +object.__delitem__(self, key) + + Called to implement deletion of "self[key]". Same note as for + "__getitem__()". This should only be implemented for mappings if + the objects support removal of keys, or for sequences if elements + can be removed from the sequence. The same exceptions should be + raised for improper *key* values as for the "__getitem__()" method. + +object.__missing__(self, key) + + Called by "dict"."__getitem__()" to implement "self[key]" for dict + subclasses when key is not in the dictionary. + +object.__iter__(self) + + This method is called when an *iterator* is required for a + container. This method should return a new iterator object that can + iterate over all the objects in the container. For mappings, it + should iterate over the keys of the container. + +object.__reversed__(self) + + Called (if present) by the "reversed()" built-in to implement + reverse iteration. It should return a new iterator object that + iterates over all the objects in the container in reverse order. + + If the "__reversed__()" method is not provided, the "reversed()" + built-in will fall back to using the sequence protocol ("__len__()" + and "__getitem__()"). Objects that support the sequence protocol + should only provide "__reversed__()" if they can provide an + implementation that is more efficient than the one provided by + "reversed()". + +The membership test operators ("in" and "not in") are normally +implemented as an iteration through a container. However, container +objects can supply the following special method with a more efficient +implementation, which also does not require the object be iterable. + +object.__contains__(self, item) + + Called to implement membership test operators. Should return true + if *item* is in *self*, false otherwise. For mapping objects, this + should consider the keys of the mapping rather than the values or + the key-item pairs. + + For objects that don’t define "__contains__()", the membership test + first tries iteration via "__iter__()", then the old sequence + iteration protocol via "__getitem__()", see this section in the + language reference. + + +Emulating numeric types +======================= + +The following methods can be defined to emulate numeric objects. +Methods corresponding to operations that are not supported by the +particular kind of number implemented (e.g., bitwise operations for +non-integral numbers) should be left undefined. + +object.__add__(self, other) +object.__sub__(self, other) +object.__mul__(self, other) +object.__matmul__(self, other) +object.__truediv__(self, other) +object.__floordiv__(self, other) +object.__mod__(self, other) +object.__divmod__(self, other) +object.__pow__(self, other[, modulo]) +object.__lshift__(self, other) +object.__rshift__(self, other) +object.__and__(self, other) +object.__xor__(self, other) +object.__or__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|"). For instance, to + evaluate the expression "x + y", where *x* is an instance of a + class that has an "__add__()" method, "type(x).__add__(x, y)" is + called. The "__divmod__()" method should be the equivalent to + using "__floordiv__()" and "__mod__()"; it should not be related to + "__truediv__()". Note that "__pow__()" should be defined to accept + an optional third argument if the ternary version of the built-in + "pow()" function is to be supported. + + If one of those methods does not support the operation with the + supplied arguments, it should return "NotImplemented". + +object.__radd__(self, other) +object.__rsub__(self, other) +object.__rmul__(self, other) +object.__rmatmul__(self, other) +object.__rtruediv__(self, other) +object.__rfloordiv__(self, other) +object.__rmod__(self, other) +object.__rdivmod__(self, other) +object.__rpow__(self, other[, modulo]) +object.__rlshift__(self, other) +object.__rrshift__(self, other) +object.__rand__(self, other) +object.__rxor__(self, other) +object.__ror__(self, other) + + These methods are called to implement the binary arithmetic + operations ("+", "-", "*", "@", "/", "//", "%", "divmod()", + "pow()", "**", "<<", ">>", "&", "^", "|") with reflected (swapped) + operands. These functions are only called if the operands are of + different types, when the left operand does not support the + corresponding operation [3], or the right operand’s class is + derived from the left operand’s class. [4] For instance, to + evaluate the expression "x - y", where *y* is an instance of a + class that has an "__rsub__()" method, "type(y).__rsub__(y, x)" is + called if "type(x).__sub__(x, y)" returns "NotImplemented" or + "type(y)" is a subclass of "type(x)". [5] + + Note that ternary "pow()" will not try calling "__rpow__()" (the + coercion rules would become too complicated). + + Note: + + If the right operand’s type is a subclass of the left operand’s + type and that subclass provides a different implementation of the + reflected method for the operation, this method will be called + before the left operand’s non-reflected method. This behavior + allows subclasses to override their ancestors’ operations. + +object.__iadd__(self, other) +object.__isub__(self, other) +object.__imul__(self, other) +object.__imatmul__(self, other) +object.__itruediv__(self, other) +object.__ifloordiv__(self, other) +object.__imod__(self, other) +object.__ipow__(self, other[, modulo]) +object.__ilshift__(self, other) +object.__irshift__(self, other) +object.__iand__(self, other) +object.__ixor__(self, other) +object.__ior__(self, other) + + These methods are called to implement the augmented arithmetic + assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", "**=", + "<<=", ">>=", "&=", "^=", "|="). These methods should attempt to + do the operation in-place (modifying *self*) and return the result + (which could be, but does not have to be, *self*). If a specific + method is not defined, or if that method returns "NotImplemented", + the augmented assignment falls back to the normal methods. For + instance, if *x* is an instance of a class with an "__iadd__()" + method, "x += y" is equivalent to "x = x.__iadd__(y)" . If + "__iadd__()" does not exist, or if "x.__iadd__(y)" returns + "NotImplemented", "x.__add__(y)" and "y.__radd__(x)" are + considered, as with the evaluation of "x + y". In certain + situations, augmented assignment can result in unexpected errors + (see Why does a_tuple[i] += [‘item’] raise an exception when the + addition works?), but this behavior is in fact part of the data + model. + +object.__neg__(self) +object.__pos__(self) +object.__abs__(self) +object.__invert__(self) + + Called to implement the unary arithmetic operations ("-", "+", + "abs()" and "~"). + +object.__complex__(self) +object.__int__(self) +object.__float__(self) + + Called to implement the built-in functions "complex()", "int()" and + "float()". Should return a value of the appropriate type. + +object.__index__(self) + + Called to implement "operator.index()", and whenever Python needs + to losslessly convert the numeric object to an integer object (such + as in slicing, or in the built-in "bin()", "hex()" and "oct()" + functions). Presence of this method indicates that the numeric + object is an integer type. Must return an integer. + + If "__int__()", "__float__()" and "__complex__()" are not defined + then corresponding built-in functions "int()", "float()" and + "complex()" fall back to "__index__()". + +object.__round__(self[, ndigits]) +object.__trunc__(self) +object.__floor__(self) +object.__ceil__(self) + + Called to implement the built-in function "round()" and "math" + functions "trunc()", "floor()" and "ceil()". Unless *ndigits* is + passed to "__round__()" all these methods should return the value + of the object truncated to an "Integral" (typically an "int"). + + Changed in version 3.14: "int()" no longer delegates to the + "__trunc__()" method. + + +With Statement Context Managers +=============================== + +A *context manager* is an object that defines the runtime context to +be established when executing a "with" statement. The context manager +handles the entry into, and the exit from, the desired runtime context +for the execution of the block of code. Context managers are normally +invoked using the "with" statement (described in section The with +statement), but can also be used by directly invoking their methods. + +Typical uses of context managers include saving and restoring various +kinds of global state, locking and unlocking resources, closing opened +files, etc. + +For more information on context managers, see Context Manager Types. +The "object" class itself does not provide the context manager +methods. + +object.__enter__(self) + + Enter the runtime context related to this object. The "with" + statement will bind this method’s return value to the target(s) + specified in the "as" clause of the statement, if any. + +object.__exit__(self, exc_type, exc_value, traceback) + + Exit the runtime context related to this object. The parameters + describe the exception that caused the context to be exited. If the + context was exited without an exception, all three arguments will + be "None". + + If an exception is supplied, and the method wishes to suppress the + exception (i.e., prevent it from being propagated), it should + return a true value. Otherwise, the exception will be processed + normally upon exit from this method. + + Note that "__exit__()" methods should not reraise the passed-in + exception; this is the caller’s responsibility. + +See also: + + **PEP 343** - The “with” statement + The specification, background, and examples for the Python "with" + statement. + + +Customizing positional arguments in class pattern matching +========================================================== + +When using a class name in a pattern, positional arguments in the +pattern are not allowed by default, i.e. "case MyClass(x, y)" is +typically invalid without special support in "MyClass". To be able to +use that kind of pattern, the class needs to define a *__match_args__* +attribute. + +object.__match_args__ + + This class variable can be assigned a tuple of strings. When this + class is used in a class pattern with positional arguments, each + positional argument will be converted into a keyword argument, + using the corresponding value in *__match_args__* as the keyword. + The absence of this attribute is equivalent to setting it to "()". + +For example, if "MyClass.__match_args__" is "("left", "center", +"right")" that means that "case MyClass(x, y)" is equivalent to "case +MyClass(left=x, center=y)". Note that the number of arguments in the +pattern must be smaller than or equal to the number of elements in +*__match_args__*; if it is larger, the pattern match attempt will +raise a "TypeError". + +Added in version 3.10. + +See also: + + **PEP 634** - Structural Pattern Matching + The specification for the Python "match" statement. + + +Emulating buffer types +====================== + +The buffer protocol provides a way for Python objects to expose +efficient access to a low-level memory array. This protocol is +implemented by builtin types such as "bytes" and "memoryview", and +third-party libraries may define additional buffer types. + +While buffer types are usually implemented in C, it is also possible +to implement the protocol in Python. + +object.__buffer__(self, flags) + + Called when a buffer is requested from *self* (for example, by the + "memoryview" constructor). The *flags* argument is an integer + representing the kind of buffer requested, affecting for example + whether the returned buffer is read-only or writable. + "inspect.BufferFlags" provides a convenient way to interpret the + flags. The method must return a "memoryview" object. + +object.__release_buffer__(self, buffer) + + Called when a buffer is no longer needed. The *buffer* argument is + a "memoryview" object that was previously returned by + "__buffer__()". The method must release any resources associated + with the buffer. This method should return "None". Buffer objects + that do not need to perform any cleanup are not required to + implement this method. + +Added in version 3.12. + +See also: + + **PEP 688** - Making the buffer protocol accessible in Python + Introduces the Python "__buffer__" and "__release_buffer__" + methods. + + "collections.abc.Buffer" + ABC for buffer types. + + +Annotations +=========== + +Functions, classes, and modules may contain *annotations*, which are a +way to associate information (usually *type hints*) with a symbol. + +object.__annotations__ + + This attribute contains the annotations for an object. It is lazily + evaluated, so accessing the attribute may execute arbitrary code + and raise exceptions. If evaluation is successful, the attribute is + set to a dictionary mapping from variable names to annotations. + + Changed in version 3.14: Annotations are now lazily evaluated. + +object.__annotate__(format) + + An *annotate function*. Returns a new dictionary object mapping + attribute/parameter names to their annotation values. + + Takes a format parameter specifying the format in which annotations + values should be provided. It must be a member of the + "annotationlib.Format" enum, or an integer with a value + corresponding to a member of the enum. + + If an annotate function doesn’t support the requested format, it + must raise "NotImplementedError". Annotate functions must always + support "VALUE" format; they must not raise "NotImplementedError()" + when called with this format. + + When called with "VALUE" format, an annotate function may raise + "NameError"; it must not raise "NameError" when called requesting + any other format. + + If an object does not have any annotations, "__annotate__" should + preferably be set to "None" (it can’t be deleted), rather than set + to a function that returns an empty dict. + + Added in version 3.14. + +See also: + + **PEP 649** — Deferred evaluation of annotation using descriptors + Introduces lazy evaluation of annotations and the "__annotate__" + function. + + +Special method lookup +===================== + +For custom classes, implicit invocations of special methods are only +guaranteed to work correctly if defined on an object’s type, not in +the object’s instance dictionary. That behaviour is the reason why +the following code raises an exception: + + >>> class C: + ... pass + ... + >>> c = C() + >>> c.__len__ = lambda: 5 + >>> len(c) + Traceback (most recent call last): + File "", line 1, in + TypeError: object of type 'C' has no len() + +The rationale behind this behaviour lies with a number of special +methods such as "__hash__()" and "__repr__()" that are implemented by +all objects, including type objects. If the implicit lookup of these +methods used the conventional lookup process, they would fail when +invoked on the type object itself: + + >>> 1 .__hash__() == hash(1) + True + >>> int.__hash__() == hash(int) + Traceback (most recent call last): + File "", line 1, in + TypeError: descriptor '__hash__' of 'int' object needs an argument + +Incorrectly attempting to invoke an unbound method of a class in this +way is sometimes referred to as ‘metaclass confusion’, and is avoided +by bypassing the instance when looking up special methods: + + >>> type(1).__hash__(1) == hash(1) + True + >>> type(int).__hash__(int) == hash(int) + True + +In addition to bypassing any instance attributes in the interest of +correctness, implicit special method lookup generally also bypasses +the "__getattribute__()" method even of the object’s metaclass: + + >>> class Meta(type): + ... def __getattribute__(*args): + ... print("Metaclass getattribute invoked") + ... return type.__getattribute__(*args) + ... + >>> class C(object, metaclass=Meta): + ... def __len__(self): + ... return 10 + ... def __getattribute__(*args): + ... print("Class getattribute invoked") + ... return object.__getattribute__(*args) + ... + >>> c = C() + >>> c.__len__() # Explicit lookup via instance + Class getattribute invoked + 10 + >>> type(c).__len__(c) # Explicit lookup via type + Metaclass getattribute invoked + 10 + >>> len(c) # Implicit lookup + 10 + +Bypassing the "__getattribute__()" machinery in this fashion provides +significant scope for speed optimisations within the interpreter, at +the cost of some flexibility in the handling of special methods (the +special method *must* be set on the class object itself in order to be +consistently invoked by the interpreter). +''', + 'string-methods': r'''String Methods +************** + +Strings implement all of the common sequence operations, along with +the additional methods described below. + +Strings also support two styles of string formatting, one providing a +large degree of flexibility and customization (see "str.format()", +Format String Syntax and Custom String Formatting) and the other based +on C "printf" style formatting that handles a narrower range of types +and is slightly harder to use correctly, but is often faster for the +cases it can handle (printf-style String Formatting). + +The Text Processing Services section of the standard library covers a +number of other modules that provide various text related utilities +(including regular expression support in the "re" module). + +str.capitalize() + + Return a copy of the string with its first character capitalized + and the rest lowercased. + + Changed in version 3.8: The first character is now put into + titlecase rather than uppercase. This means that characters like + digraphs will only have their first letter capitalized, instead of + the full character. + +str.casefold() + + Return a casefolded copy of the string. Casefolded strings may be + used for caseless matching. + + Casefolding is similar to lowercasing but more aggressive because + it is intended to remove all case distinctions in a string. For + example, the German lowercase letter "'ß'" is equivalent to ""ss"". + Since it is already lowercase, "lower()" would do nothing to "'ß'"; + "casefold()" converts it to ""ss"". + + The casefolding algorithm is described in section 3.13 ‘Default + Case Folding’ of the Unicode Standard. + + Added in version 3.3. + +str.center(width[, fillchar]) + + Return centered in a string of length *width*. Padding is done + using the specified *fillchar* (default is an ASCII space). The + original string is returned if *width* is less than or equal to + "len(s)". + +str.count(sub[, start[, end]]) + + Return the number of non-overlapping occurrences of substring *sub* + in the range [*start*, *end*]. Optional arguments *start* and + *end* are interpreted as in slice notation. + + If *sub* is empty, returns the number of empty strings between + characters which is the length of the string plus one. + +str.encode(encoding='utf-8', errors='strict') + + Return the string encoded to "bytes". + + *encoding* defaults to "'utf-8'"; see Standard Encodings for + possible values. + + *errors* controls how encoding errors are handled. If "'strict'" + (the default), a "UnicodeError" exception is raised. Other possible + values are "'ignore'", "'replace'", "'xmlcharrefreplace'", + "'backslashreplace'" and any other name registered via + "codecs.register_error()". See Error Handlers for details. + + For performance reasons, the value of *errors* is not checked for + validity unless an encoding error actually occurs, Python + Development Mode is enabled or a debug build is used. + + Changed in version 3.1: Added support for keyword arguments. + + Changed in version 3.9: The value of the *errors* argument is now + checked in Python Development Mode and in debug mode. + +str.endswith(suffix[, start[, end]]) + + Return "True" if the string ends with the specified *suffix*, + otherwise return "False". *suffix* can also be a tuple of suffixes + to look for. With optional *start*, test beginning at that + position. With optional *end*, stop comparing at that position. + +str.expandtabs(tabsize=8) + + Return a copy of the string where all tab characters are replaced + by one or more spaces, depending on the current column and the + given tab size. Tab positions occur every *tabsize* characters + (default is 8, giving tab positions at columns 0, 8, 16 and so on). + To expand the string, the current column is set to zero and the + string is examined character by character. If the character is a + tab ("\t"), one or more space characters are inserted in the result + until the current column is equal to the next tab position. (The + tab character itself is not copied.) If the character is a newline + ("\n") or return ("\r"), it is copied and the current column is + reset to zero. Any other character is copied unchanged and the + current column is incremented by one regardless of how the + character is represented when printed. + + >>> '01\t012\t0123\t01234'.expandtabs() + '01 012 0123 01234' + >>> '01\t012\t0123\t01234'.expandtabs(4) + '01 012 0123 01234' + +str.find(sub[, start[, end]]) + + Return the lowest index in the string where substring *sub* is + found within the slice "s[start:end]". Optional arguments *start* + and *end* are interpreted as in slice notation. Return "-1" if + *sub* is not found. + + Note: + + The "find()" method should be used only if you need to know the + position of *sub*. To check if *sub* is a substring or not, use + the "in" operator: + + >>> 'Py' in 'Python' + True + +str.format(*args, **kwargs) + + Perform a string formatting operation. The string on which this + method is called can contain literal text or replacement fields + delimited by braces "{}". Each replacement field contains either + the numeric index of a positional argument, or the name of a + keyword argument. Returns a copy of the string where each + replacement field is replaced with the string value of the + corresponding argument. + + >>> "The sum of 1 + 2 is {0}".format(1+2) + 'The sum of 1 + 2 is 3' + + See Format String Syntax for a description of the various + formatting options that can be specified in format strings. + + Note: + + When formatting a number ("int", "float", "complex", + "decimal.Decimal" and subclasses) with the "n" type (ex: + "'{:n}'.format(1234)"), the function temporarily sets the + "LC_CTYPE" locale to the "LC_NUMERIC" locale to decode + "decimal_point" and "thousands_sep" fields of "localeconv()" if + they are non-ASCII or longer than 1 byte, and the "LC_NUMERIC" + locale is different than the "LC_CTYPE" locale. This temporary + change affects other threads. + + Changed in version 3.7: When formatting a number with the "n" type, + the function sets temporarily the "LC_CTYPE" locale to the + "LC_NUMERIC" locale in some cases. + +str.format_map(mapping, /) + + Similar to "str.format(**mapping)", except that "mapping" is used + directly and not copied to a "dict". This is useful if for example + "mapping" is a dict subclass: + + >>> class Default(dict): + ... def __missing__(self, key): + ... return key + ... + >>> '{name} was born in {country}'.format_map(Default(name='Guido')) + 'Guido was born in country' + + Added in version 3.2. + +str.index(sub[, start[, end]]) + + Like "find()", but raise "ValueError" when the substring is not + found. + +str.isalnum() + + Return "True" if all characters in the string are alphanumeric and + there is at least one character, "False" otherwise. A character + "c" is alphanumeric if one of the following returns "True": + "c.isalpha()", "c.isdecimal()", "c.isdigit()", or "c.isnumeric()". + +str.isalpha() + + Return "True" if all characters in the string are alphabetic and + there is at least one character, "False" otherwise. Alphabetic + characters are those characters defined in the Unicode character + database as “Letter”, i.e., those with general category property + being one of “Lm”, “Lt”, “Lu”, “Ll”, or “Lo”. Note that this is + different from the Alphabetic property defined in the section 4.10 + ‘Letters, Alphabetic, and Ideographic’ of the Unicode Standard. + +str.isascii() + + Return "True" if the string is empty or all characters in the + string are ASCII, "False" otherwise. ASCII characters have code + points in the range U+0000-U+007F. + + Added in version 3.7. + +str.isdecimal() + + Return "True" if all characters in the string are decimal + characters and there is at least one character, "False" otherwise. + Decimal characters are those that can be used to form numbers in + base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. Formally a decimal + character is a character in the Unicode General Category “Nd”. + +str.isdigit() + + Return "True" if all characters in the string are digits and there + is at least one character, "False" otherwise. Digits include + decimal characters and digits that need special handling, such as + the compatibility superscript digits. This covers digits which + cannot be used to form numbers in base 10, like the Kharosthi + numbers. Formally, a digit is a character that has the property + value Numeric_Type=Digit or Numeric_Type=Decimal. + +str.isidentifier() + + Return "True" if the string is a valid identifier according to the + language definition, section Identifiers and keywords. + + "keyword.iskeyword()" can be used to test whether string "s" is a + reserved identifier, such as "def" and "class". + + Example: + + >>> from keyword import iskeyword + + >>> 'hello'.isidentifier(), iskeyword('hello') + (True, False) + >>> 'def'.isidentifier(), iskeyword('def') + (True, True) + +str.islower() + + Return "True" if all cased characters [4] in the string are + lowercase and there is at least one cased character, "False" + otherwise. + +str.isnumeric() + + Return "True" if all characters in the string are numeric + characters, and there is at least one character, "False" otherwise. + Numeric characters include digit characters, and all characters + that have the Unicode numeric value property, e.g. U+2155, VULGAR + FRACTION ONE FIFTH. Formally, numeric characters are those with + the property value Numeric_Type=Digit, Numeric_Type=Decimal or + Numeric_Type=Numeric. + +str.isprintable() + + Return "True" if all characters in the string are printable or the + string is empty, "False" otherwise. Nonprintable characters are + those characters defined in the Unicode character database as + “Other” or “Separator”, excepting the ASCII space (0x20) which is + considered printable. (Note that printable characters in this + context are those which should not be escaped when "repr()" is + invoked on a string. It has no bearing on the handling of strings + written to "sys.stdout" or "sys.stderr".) + +str.isspace() + + Return "True" if there are only whitespace characters in the string + and there is at least one character, "False" otherwise. + + A character is *whitespace* if in the Unicode character database + (see "unicodedata"), either its general category is "Zs" + (“Separator, space”), or its bidirectional class is one of "WS", + "B", or "S". + +str.istitle() + + Return "True" if the string is a titlecased string and there is at + least one character, for example uppercase characters may only + follow uncased characters and lowercase characters only cased ones. + Return "False" otherwise. + +str.isupper() + + Return "True" if all cased characters [4] in the string are + uppercase and there is at least one cased character, "False" + otherwise. + + >>> 'BANANA'.isupper() + True + >>> 'banana'.isupper() + False + >>> 'baNana'.isupper() + False + >>> ' '.isupper() + False + +str.join(iterable) + + Return a string which is the concatenation of the strings in + *iterable*. A "TypeError" will be raised if there are any non- + string values in *iterable*, including "bytes" objects. The + separator between elements is the string providing this method. + +str.ljust(width[, fillchar]) + + Return the string left justified in a string of length *width*. + Padding is done using the specified *fillchar* (default is an ASCII + space). The original string is returned if *width* is less than or + equal to "len(s)". + +str.lower() + + Return a copy of the string with all the cased characters [4] + converted to lowercase. + + The lowercasing algorithm used is described in section 3.13 + ‘Default Case Folding’ of the Unicode Standard. + +str.lstrip([chars]) + + Return a copy of the string with leading characters removed. The + *chars* argument is a string specifying the set of characters to be + removed. If omitted or "None", the *chars* argument defaults to + removing whitespace. The *chars* argument is not a prefix; rather, + all combinations of its values are stripped: + + >>> ' spacious '.lstrip() + 'spacious ' + >>> 'www.example.com'.lstrip('cmowz.') + 'example.com' + + See "str.removeprefix()" for a method that will remove a single + prefix string rather than all of a set of characters. For example: + + >>> 'Arthur: three!'.lstrip('Arthur: ') + 'ee!' + >>> 'Arthur: three!'.removeprefix('Arthur: ') + 'three!' + +static str.maketrans(x[, y[, z]]) + + This static method returns a translation table usable for + "str.translate()". + + If there is only one argument, it must be a dictionary mapping + Unicode ordinals (integers) or characters (strings of length 1) to + Unicode ordinals, strings (of arbitrary lengths) or "None". + Character keys will then be converted to ordinals. + + If there are two arguments, they must be strings of equal length, + and in the resulting dictionary, each character in x will be mapped + to the character at the same position in y. If there is a third + argument, it must be a string, whose characters will be mapped to + "None" in the result. + +str.partition(sep) + + Split the string at the first occurrence of *sep*, and return a + 3-tuple containing the part before the separator, the separator + itself, and the part after the separator. If the separator is not + found, return a 3-tuple containing the string itself, followed by + two empty strings. + +str.removeprefix(prefix, /) + + If the string starts with the *prefix* string, return + "string[len(prefix):]". Otherwise, return a copy of the original + string: + + >>> 'TestHook'.removeprefix('Test') + 'Hook' + >>> 'BaseTestCase'.removeprefix('Test') + 'BaseTestCase' + + Added in version 3.9. + +str.removesuffix(suffix, /) + + If the string ends with the *suffix* string and that *suffix* is + not empty, return "string[:-len(suffix)]". Otherwise, return a copy + of the original string: + + >>> 'MiscTests'.removesuffix('Tests') + 'Misc' + >>> 'TmpDirMixin'.removesuffix('Tests') + 'TmpDirMixin' + + Added in version 3.9. + +str.replace(old, new, count=-1) + + Return a copy of the string with all occurrences of substring *old* + replaced by *new*. If *count* is given, only the first *count* + occurrences are replaced. If *count* is not specified or "-1", then + all occurrences are replaced. + + Changed in version 3.13: *count* is now supported as a keyword + argument. + +str.rfind(sub[, start[, end]]) + + Return the highest index in the string where substring *sub* is + found, such that *sub* is contained within "s[start:end]". + Optional arguments *start* and *end* are interpreted as in slice + notation. Return "-1" on failure. + +str.rindex(sub[, start[, end]]) + + Like "rfind()" but raises "ValueError" when the substring *sub* is + not found. + +str.rjust(width[, fillchar]) + + Return the string right justified in a string of length *width*. + Padding is done using the specified *fillchar* (default is an ASCII + space). The original string is returned if *width* is less than or + equal to "len(s)". + +str.rpartition(sep) + + Split the string at the last occurrence of *sep*, and return a + 3-tuple containing the part before the separator, the separator + itself, and the part after the separator. If the separator is not + found, return a 3-tuple containing two empty strings, followed by + the string itself. + +str.rsplit(sep=None, maxsplit=-1) + + Return a list of the words in the string, using *sep* as the + delimiter string. If *maxsplit* is given, at most *maxsplit* splits + are done, the *rightmost* ones. If *sep* is not specified or + "None", any whitespace string is a separator. Except for splitting + from the right, "rsplit()" behaves like "split()" which is + described in detail below. + +str.rstrip([chars]) + + Return a copy of the string with trailing characters removed. The + *chars* argument is a string specifying the set of characters to be + removed. If omitted or "None", the *chars* argument defaults to + removing whitespace. The *chars* argument is not a suffix; rather, + all combinations of its values are stripped: + + >>> ' spacious '.rstrip() + ' spacious' + >>> 'mississippi'.rstrip('ipz') + 'mississ' + + See "str.removesuffix()" for a method that will remove a single + suffix string rather than all of a set of characters. For example: + + >>> 'Monty Python'.rstrip(' Python') + 'M' + >>> 'Monty Python'.removesuffix(' Python') + 'Monty' + +str.split(sep=None, maxsplit=-1) + + Return a list of the words in the string, using *sep* as the + delimiter string. If *maxsplit* is given, at most *maxsplit* + splits are done (thus, the list will have at most "maxsplit+1" + elements). If *maxsplit* is not specified or "-1", then there is + no limit on the number of splits (all possible splits are made). + + If *sep* is given, consecutive delimiters are not grouped together + and are deemed to delimit empty strings (for example, + "'1,,2'.split(',')" returns "['1', '', '2']"). The *sep* argument + may consist of multiple characters as a single delimiter (to split + with multiple delimiters, use "re.split()"). Splitting an empty + string with a specified separator returns "['']". + + For example: + + >>> '1,2,3'.split(',') + ['1', '2', '3'] + >>> '1,2,3'.split(',', maxsplit=1) + ['1', '2,3'] + >>> '1,2,,3,'.split(',') + ['1', '2', '', '3', ''] + >>> '1<>2<>3<4'.split('<>') + ['1', '2', '3<4'] + + If *sep* is not specified or is "None", a different splitting + algorithm is applied: runs of consecutive whitespace are regarded + as a single separator, and the result will contain no empty strings + at the start or end if the string has leading or trailing + whitespace. Consequently, splitting an empty string or a string + consisting of just whitespace with a "None" separator returns "[]". + + For example: + + >>> '1 2 3'.split() + ['1', '2', '3'] + >>> '1 2 3'.split(maxsplit=1) + ['1', '2 3'] + >>> ' 1 2 3 '.split() + ['1', '2', '3'] + +str.splitlines(keepends=False) + + Return a list of the lines in the string, breaking at line + boundaries. Line breaks are not included in the resulting list + unless *keepends* is given and true. + + This method splits on the following line boundaries. In + particular, the boundaries are a superset of *universal newlines*. + + +-------------------------+-------------------------------+ + | Representation | Description | + |=========================|===============================| + | "\n" | Line Feed | + +-------------------------+-------------------------------+ + | "\r" | Carriage Return | + +-------------------------+-------------------------------+ + | "\r\n" | Carriage Return + Line Feed | + +-------------------------+-------------------------------+ + | "\v" or "\x0b" | Line Tabulation | + +-------------------------+-------------------------------+ + | "\f" or "\x0c" | Form Feed | + +-------------------------+-------------------------------+ + | "\x1c" | File Separator | + +-------------------------+-------------------------------+ + | "\x1d" | Group Separator | + +-------------------------+-------------------------------+ + | "\x1e" | Record Separator | + +-------------------------+-------------------------------+ + | "\x85" | Next Line (C1 Control Code) | + +-------------------------+-------------------------------+ + | "\u2028" | Line Separator | + +-------------------------+-------------------------------+ + | "\u2029" | Paragraph Separator | + +-------------------------+-------------------------------+ + + Changed in version 3.2: "\v" and "\f" added to list of line + boundaries. + + For example: + + >>> 'ab c\n\nde fg\rkl\r\n'.splitlines() + ['ab c', '', 'de fg', 'kl'] + >>> 'ab c\n\nde fg\rkl\r\n'.splitlines(keepends=True) + ['ab c\n', '\n', 'de fg\r', 'kl\r\n'] + + Unlike "split()" when a delimiter string *sep* is given, this + method returns an empty list for the empty string, and a terminal + line break does not result in an extra line: + + >>> "".splitlines() + [] + >>> "One line\n".splitlines() + ['One line'] + + For comparison, "split('\n')" gives: + + >>> ''.split('\n') + [''] + >>> 'Two lines\n'.split('\n') + ['Two lines', ''] + +str.startswith(prefix[, start[, end]]) + + Return "True" if string starts with the *prefix*, otherwise return + "False". *prefix* can also be a tuple of prefixes to look for. + With optional *start*, test string beginning at that position. + With optional *end*, stop comparing string at that position. + +str.strip([chars]) + + Return a copy of the string with the leading and trailing + characters removed. The *chars* argument is a string specifying the + set of characters to be removed. If omitted or "None", the *chars* + argument defaults to removing whitespace. The *chars* argument is + not a prefix or suffix; rather, all combinations of its values are + stripped: + + >>> ' spacious '.strip() + 'spacious' + >>> 'www.example.com'.strip('cmowz.') + 'example' + + The outermost leading and trailing *chars* argument values are + stripped from the string. Characters are removed from the leading + end until reaching a string character that is not contained in the + set of characters in *chars*. A similar action takes place on the + trailing end. For example: + + >>> comment_string = '#....... Section 3.2.1 Issue #32 .......' + >>> comment_string.strip('.#! ') + 'Section 3.2.1 Issue #32' + +str.swapcase() + + Return a copy of the string with uppercase characters converted to + lowercase and vice versa. Note that it is not necessarily true that + "s.swapcase().swapcase() == s". + +str.title() + + Return a titlecased version of the string where words start with an + uppercase character and the remaining characters are lowercase. + + For example: + + >>> 'Hello world'.title() + 'Hello World' + + The algorithm uses a simple language-independent definition of a + word as groups of consecutive letters. The definition works in + many contexts but it means that apostrophes in contractions and + possessives form word boundaries, which may not be the desired + result: + + >>> "they're bill's friends from the UK".title() + "They'Re Bill'S Friends From The Uk" + + The "string.capwords()" function does not have this problem, as it + splits words on spaces only. + + Alternatively, a workaround for apostrophes can be constructed + using regular expressions: + + >>> import re + >>> def titlecase(s): + ... return re.sub(r"[A-Za-z]+('[A-Za-z]+)?", + ... lambda mo: mo.group(0).capitalize(), + ... s) + ... + >>> titlecase("they're bill's friends.") + "They're Bill's Friends." + +str.translate(table) + + Return a copy of the string in which each character has been mapped + through the given translation table. The table must be an object + that implements indexing via "__getitem__()", typically a *mapping* + or *sequence*. When indexed by a Unicode ordinal (an integer), the + table object can do any of the following: return a Unicode ordinal + or a string, to map the character to one or more other characters; + return "None", to delete the character from the return string; or + raise a "LookupError" exception, to map the character to itself. + + You can use "str.maketrans()" to create a translation map from + character-to-character mappings in different formats. + + See also the "codecs" module for a more flexible approach to custom + character mappings. + +str.upper() + + Return a copy of the string with all the cased characters [4] + converted to uppercase. Note that "s.upper().isupper()" might be + "False" if "s" contains uncased characters or if the Unicode + category of the resulting character(s) is not “Lu” (Letter, + uppercase), but e.g. “Lt” (Letter, titlecase). + + The uppercasing algorithm used is described in section 3.13 + ‘Default Case Folding’ of the Unicode Standard. + +str.zfill(width) + + Return a copy of the string left filled with ASCII "'0'" digits to + make a string of length *width*. A leading sign prefix + ("'+'"/"'-'") is handled by inserting the padding *after* the sign + character rather than before. The original string is returned if + *width* is less than or equal to "len(s)". + + For example: + + >>> "42".zfill(5) + '00042' + >>> "-42".zfill(5) + '-0042' +''', + 'strings': '''String and Bytes literals +************************* + +String literals are described by the following lexical definitions: + + stringliteral ::= [stringprefix](shortstring | longstring) + stringprefix ::= "r" | "u" | "R" | "U" | "f" | "F" + | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | "Rf" | "RF" + shortstring ::= "'" shortstringitem* "'" | '"' shortstringitem* '"' + longstring ::= "\'\'\'" longstringitem* "\'\'\'" | '"""' longstringitem* '"""' + shortstringitem ::= shortstringchar | stringescapeseq + longstringitem ::= longstringchar | stringescapeseq + shortstringchar ::= + longstringchar ::= + stringescapeseq ::= "\\" + + bytesliteral ::= bytesprefix(shortbytes | longbytes) + bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB" + shortbytes ::= "'" shortbytesitem* "'" | '"' shortbytesitem* '"' + longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | '"""' longbytesitem* '"""' + shortbytesitem ::= shortbyteschar | bytesescapeseq + longbytesitem ::= longbyteschar | bytesescapeseq + shortbyteschar ::= + longbyteschar ::= + bytesescapeseq ::= "\\" + +One syntactic restriction not indicated by these productions is that +whitespace is not allowed between the "stringprefix" or "bytesprefix" +and the rest of the literal. The source character set is defined by +the encoding declaration; it is UTF-8 if no encoding declaration is +given in the source file; see section Encoding declarations. + +In plain English: Both types of literals can be enclosed in matching +single quotes ("'") or double quotes ("""). They can also be enclosed +in matching groups of three single or double quotes (these are +generally referred to as *triple-quoted strings*). The backslash ("\\") +character is used to give special meaning to otherwise ordinary +characters like "n", which means ‘newline’ when escaped ("\\n"). It can +also be used to escape characters that otherwise have a special +meaning, such as newline, backslash itself, or the quote character. +See escape sequences below for examples. + +Bytes literals are always prefixed with "'b'" or "'B'"; they produce +an instance of the "bytes" type instead of the "str" type. They may +only contain ASCII characters; bytes with a numeric value of 128 or +greater must be expressed with escapes. + +Both string and bytes literals may optionally be prefixed with a +letter "'r'" or "'R'"; such constructs are called *raw string +literals* and *raw bytes literals* respectively and treat backslashes +as literal characters. As a result, in raw string literals, "'\\U'" +and "'\\u'" escapes are not treated specially. + +Added in version 3.3: The "'rb'" prefix of raw bytes literals has been +added as a synonym of "'br'".Support for the unicode legacy literal +("u'value'") was reintroduced to simplify the maintenance of dual +Python 2.x and 3.x codebases. See **PEP 414** for more information. + +A string literal with "'f'" or "'F'" in its prefix is a *formatted +string literal*; see f-strings. The "'f'" may be combined with "'r'", +but not with "'b'" or "'u'", therefore raw formatted strings are +possible, but formatted bytes literals are not. + +In triple-quoted literals, unescaped newlines and quotes are allowed +(and are retained), except that three unescaped quotes in a row +terminate the literal. (A “quote” is the character used to open the +literal, i.e. either "'" or """.) + + +Escape sequences +================ + +Unless an "'r'" or "'R'" prefix is present, escape sequences in string +and bytes literals are interpreted according to rules similar to those +used by Standard C. The recognized escape sequences are: + ++---------------------------+-----------------------------------+---------+ +| Escape Sequence | Meaning | Notes | +|===========================|===================================|=========| +| "\\" | Backslash and newline ignored | (1) | ++---------------------------+-----------------------------------+---------+ +| "\\\\" | Backslash ("\\") | | ++---------------------------+-----------------------------------+---------+ +| "\\'" | Single quote ("'") | | ++---------------------------+-----------------------------------+---------+ +| "\\"" | Double quote (""") | | ++---------------------------+-----------------------------------+---------+ +| "\\a" | ASCII Bell (BEL) | | ++---------------------------+-----------------------------------+---------+ +| "\\b" | ASCII Backspace (BS) | | ++---------------------------+-----------------------------------+---------+ +| "\\f" | ASCII Formfeed (FF) | | ++---------------------------+-----------------------------------+---------+ +| "\\n" | ASCII Linefeed (LF) | | ++---------------------------+-----------------------------------+---------+ +| "\\r" | ASCII Carriage Return (CR) | | ++---------------------------+-----------------------------------+---------+ +| "\\t" | ASCII Horizontal Tab (TAB) | | ++---------------------------+-----------------------------------+---------+ +| "\\v" | ASCII Vertical Tab (VT) | | ++---------------------------+-----------------------------------+---------+ +| "\\*ooo*" | Character with octal value *ooo* | (2,4) | ++---------------------------+-----------------------------------+---------+ +| "\\x*hh*" | Character with hex value *hh* | (3,4) | ++---------------------------+-----------------------------------+---------+ + +Escape sequences only recognized in string literals are: + ++---------------------------+-----------------------------------+---------+ +| Escape Sequence | Meaning | Notes | +|===========================|===================================|=========| +| "\\N{*name*}" | Character named *name* in the | (5) | +| | Unicode database | | ++---------------------------+-----------------------------------+---------+ +| "\\u*xxxx*" | Character with 16-bit hex value | (6) | +| | *xxxx* | | ++---------------------------+-----------------------------------+---------+ +| "\\U*xxxxxxxx*" | Character with 32-bit hex value | (7) | +| | *xxxxxxxx* | | ++---------------------------+-----------------------------------+---------+ + +Notes: + +1. A backslash can be added at the end of a line to ignore the + newline: + + >>> 'This string will not include \\ + ... backslashes or newline characters.' + 'This string will not include backslashes or newline characters.' + + The same result can be achieved using triple-quoted strings, or + parentheses and string literal concatenation. + +2. As in Standard C, up to three octal digits are accepted. + + Changed in version 3.11: Octal escapes with value larger than + "0o377" produce a "DeprecationWarning". + + Changed in version 3.12: Octal escapes with value larger than + "0o377" produce a "SyntaxWarning". In a future Python version they + will be eventually a "SyntaxError". + +3. Unlike in Standard C, exactly two hex digits are required. + +4. In a bytes literal, hexadecimal and octal escapes denote the byte + with the given value. In a string literal, these escapes denote a + Unicode character with the given value. + +5. Changed in version 3.3: Support for name aliases [1] has been + added. + +6. Exactly four hex digits are required. + +7. Any Unicode character can be encoded this way. Exactly eight hex + digits are required. + +Unlike Standard C, all unrecognized escape sequences are left in the +string unchanged, i.e., *the backslash is left in the result*. (This +behavior is useful when debugging: if an escape sequence is mistyped, +the resulting output is more easily recognized as broken.) It is also +important to note that the escape sequences only recognized in string +literals fall into the category of unrecognized escapes for bytes +literals. + +Changed in version 3.6: Unrecognized escape sequences produce a +"DeprecationWarning". + +Changed in version 3.12: Unrecognized escape sequences produce a +"SyntaxWarning". In a future Python version they will be eventually a +"SyntaxError". + +Even in a raw literal, quotes can be escaped with a backslash, but the +backslash remains in the result; for example, "r"\\""" is a valid +string literal consisting of two characters: a backslash and a double +quote; "r"\\"" is not a valid string literal (even a raw string cannot +end in an odd number of backslashes). Specifically, *a raw literal +cannot end in a single backslash* (since the backslash would escape +the following quote character). Note also that a single backslash +followed by a newline is interpreted as those two characters as part +of the literal, *not* as a line continuation. +''', + 'subscriptions': r'''Subscriptions +************* + +The subscription of an instance of a container class will generally +select an element from the container. The subscription of a *generic +class* will generally return a GenericAlias object. + + subscription ::= primary "[" flexible_expression_list "]" + +When an object is subscripted, the interpreter will evaluate the +primary and the expression list. + +The primary must evaluate to an object that supports subscription. An +object may support subscription through defining one or both of +"__getitem__()" and "__class_getitem__()". When the primary is +subscripted, the evaluated result of the expression list will be +passed to one of these methods. For more details on when +"__class_getitem__" is called instead of "__getitem__", see +__class_getitem__ versus __getitem__. + +If the expression list contains at least one comma, or if any of the +expressions are starred, the expression list will evaluate to a +"tuple" containing the items of the expression list. Otherwise, the +expression list will evaluate to the value of the list’s sole member. + +Changed in version 3.11: Expressions in an expression list may be +starred. See **PEP 646**. + +For built-in objects, there are two types of objects that support +subscription via "__getitem__()": + +1. Mappings. If the primary is a *mapping*, the expression list must + evaluate to an object whose value is one of the keys of the + mapping, and the subscription selects the value in the mapping that + corresponds to that key. An example of a builtin mapping class is + the "dict" class. + +2. Sequences. If the primary is a *sequence*, the expression list must + evaluate to an "int" or a "slice" (as discussed in the following + section). Examples of builtin sequence classes include the "str", + "list" and "tuple" classes. + +The formal syntax makes no special provision for negative indices in +*sequences*. However, built-in sequences all provide a "__getitem__()" +method that interprets negative indices by adding the length of the +sequence to the index so that, for example, "x[-1]" selects the last +item of "x". The resulting value must be a nonnegative integer less +than the number of items in the sequence, and the subscription selects +the item whose index is that value (counting from zero). Since the +support for negative indices and slicing occurs in the object’s +"__getitem__()" method, subclasses overriding this method will need to +explicitly add that support. + +A "string" is a special kind of sequence whose items are *characters*. +A character is not a separate data type but a string of exactly one +character. +''', + 'truth': r'''Truth Value Testing +******************* + +Any object can be tested for truth value, for use in an "if" or +"while" condition or as operand of the Boolean operations below. + +By default, an object is considered true unless its class defines +either a "__bool__()" method that returns "False" or a "__len__()" +method that returns zero, when called with the object. [1] Here are +most of the built-in objects considered false: + +* constants defined to be false: "None" and "False" + +* zero of any numeric type: "0", "0.0", "0j", "Decimal(0)", + "Fraction(0, 1)" + +* empty sequences and collections: "''", "()", "[]", "{}", "set()", + "range(0)" + +Operations and built-in functions that have a Boolean result always +return "0" or "False" for false and "1" or "True" for true, unless +otherwise stated. (Important exception: the Boolean operations "or" +and "and" always return one of their operands.) +''', + 'try': r'''The "try" statement +******************* + +The "try" statement specifies exception handlers and/or cleanup code +for a group of statements: + + try_stmt ::= try1_stmt | try2_stmt | try3_stmt + try1_stmt ::= "try" ":" suite + ("except" [expression ["as" identifier]] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try2_stmt ::= "try" ":" suite + ("except" "*" expression ["as" identifier] ":" suite)+ + ["else" ":" suite] + ["finally" ":" suite] + try3_stmt ::= "try" ":" suite + "finally" ":" suite + +Additional information on exceptions can be found in section +Exceptions, and information on using the "raise" statement to generate +exceptions may be found in section The raise statement. + + +"except" clause +=============== + +The "except" clause(s) specify one or more exception handlers. When no +exception occurs in the "try" clause, no exception handler is +executed. When an exception occurs in the "try" suite, a search for an +exception handler is started. This search inspects the "except" +clauses in turn until one is found that matches the exception. An +expression-less "except" clause, if present, must be last; it matches +any exception. + +For an "except" clause with an expression, the expression must +evaluate to an exception type or a tuple of exception types. The +raised exception matches an "except" clause whose expression evaluates +to the class or a *non-virtual base class* of the exception object, or +to a tuple that contains such a class. + +If no "except" clause matches the exception, the search for an +exception handler continues in the surrounding code and on the +invocation stack. [1] + +If the evaluation of an expression in the header of an "except" clause +raises an exception, the original search for a handler is canceled and +a search starts for the new exception in the surrounding code and on +the call stack (it is treated as if the entire "try" statement raised +the exception). + +When a matching "except" clause is found, the exception is assigned to +the target specified after the "as" keyword in that "except" clause, +if present, and the "except" clause’s suite is executed. All "except" +clauses must have an executable block. When the end of this block is +reached, execution continues normally after the entire "try" +statement. (This means that if two nested handlers exist for the same +exception, and the exception occurs in the "try" clause of the inner +handler, the outer handler will not handle the exception.) + +When an exception has been assigned using "as target", it is cleared +at the end of the "except" clause. This is as if + + except E as N: + foo + +was translated to + + except E as N: + try: + foo + finally: + del N + +This means the exception must be assigned to a different name to be +able to refer to it after the "except" clause. Exceptions are cleared +because with the traceback attached to them, they form a reference +cycle with the stack frame, keeping all locals in that frame alive +until the next garbage collection occurs. + +Before an "except" clause’s suite is executed, the exception is stored +in the "sys" module, where it can be accessed from within the body of +the "except" clause by calling "sys.exception()". When leaving an +exception handler, the exception stored in the "sys" module is reset +to its previous value: + + >>> print(sys.exception()) + None + >>> try: + ... raise TypeError + ... except: + ... print(repr(sys.exception())) + ... try: + ... raise ValueError + ... except: + ... print(repr(sys.exception())) + ... print(repr(sys.exception())) + ... + TypeError() + ValueError() + TypeError() + >>> print(sys.exception()) + None + + +"except*" clause +================ + +The "except*" clause(s) are used for handling "ExceptionGroup"s. The +exception type for matching is interpreted as in the case of "except", +but in the case of exception groups we can have partial matches when +the type matches some of the exceptions in the group. This means that +multiple "except*" clauses can execute, each handling part of the +exception group. Each clause executes at most once and handles an +exception group of all matching exceptions. Each exception in the +group is handled by at most one "except*" clause, the first that +matches it. + + >>> try: + ... raise ExceptionGroup("eg", + ... [ValueError(1), TypeError(2), OSError(3), OSError(4)]) + ... except* TypeError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... except* OSError as e: + ... print(f'caught {type(e)} with nested {e.exceptions}') + ... + caught with nested (TypeError(2),) + caught with nested (OSError(3), OSError(4)) + + Exception Group Traceback (most recent call last): + | File "", line 2, in + | ExceptionGroup: eg + +-+---------------- 1 ---------------- + | ValueError: 1 + +------------------------------------ + +Any remaining exceptions that were not handled by any "except*" clause +are re-raised at the end, along with all exceptions that were raised +from within the "except*" clauses. If this list contains more than one +exception to reraise, they are combined into an exception group. + +If the raised exception is not an exception group and its type matches +one of the "except*" clauses, it is caught and wrapped by an exception +group with an empty message string. + + >>> try: + ... raise BlockingIOError + ... except* BlockingIOError as e: + ... print(repr(e)) + ... + ExceptionGroup('', (BlockingIOError())) + +An "except*" clause must have a matching expression; it cannot be +"except*:". Furthermore, this expression cannot contain exception +group types, because that would have ambiguous semantics. + +It is not possible to mix "except" and "except*" in the same "try". +"break", "continue" and "return" cannot appear in an "except*" clause. + + +"else" clause +============= + +The optional "else" clause is executed if the control flow leaves the +"try" suite, no exception was raised, and no "return", "continue", or +"break" statement was executed. Exceptions in the "else" clause are +not handled by the preceding "except" clauses. + + +"finally" clause +================ + +If "finally" is present, it specifies a ‘cleanup’ handler. The "try" +clause is executed, including any "except" and "else" clauses. If an +exception occurs in any of the clauses and is not handled, the +exception is temporarily saved. The "finally" clause is executed. If +there is a saved exception it is re-raised at the end of the "finally" +clause. If the "finally" clause raises another exception, the saved +exception is set as the context of the new exception. If the "finally" +clause executes a "return", "break" or "continue" statement, the saved +exception is discarded: + + >>> def f(): + ... try: + ... 1/0 + ... finally: + ... return 42 + ... + >>> f() + 42 + +The exception information is not available to the program during +execution of the "finally" clause. + +When a "return", "break" or "continue" statement is executed in the +"try" suite of a "try"…"finally" statement, the "finally" clause is +also executed ‘on the way out.’ + +The return value of a function is determined by the last "return" +statement executed. Since the "finally" clause always executes, a +"return" statement executed in the "finally" clause will always be the +last one executed: + + >>> def foo(): + ... try: + ... return 'try' + ... finally: + ... return 'finally' + ... + >>> foo() + 'finally' + +Changed in version 3.8: Prior to Python 3.8, a "continue" statement +was illegal in the "finally" clause due to a problem with the +implementation. +''', + 'types': r'''The standard type hierarchy +*************************** + +Below is a list of the types that are built into Python. Extension +modules (written in C, Java, or other languages, depending on the +implementation) can define additional types. Future versions of +Python may add types to the type hierarchy (e.g., rational numbers, +efficiently stored arrays of integers, etc.), although such additions +will often be provided via the standard library instead. + +Some of the type descriptions below contain a paragraph listing +‘special attributes.’ These are attributes that provide access to the +implementation and are not intended for general use. Their definition +may change in the future. + + +None +==== + +This type has a single value. There is a single object with this +value. This object is accessed through the built-in name "None". It is +used to signify the absence of a value in many situations, e.g., it is +returned from functions that don’t explicitly return anything. Its +truth value is false. + + +NotImplemented +============== + +This type has a single value. There is a single object with this +value. This object is accessed through the built-in name +"NotImplemented". Numeric methods and rich comparison methods should +return this value if they do not implement the operation for the +operands provided. (The interpreter will then try the reflected +operation, or some other fallback, depending on the operator.) It +should not be evaluated in a boolean context. + +See Implementing the arithmetic operations for more details. + +Changed in version 3.9: Evaluating "NotImplemented" in a boolean +context was deprecated. + +Changed in version 3.14: Evaluating "NotImplemented" in a boolean +context now raises a "TypeError". It previously evaluated to "True" +and emitted a "DeprecationWarning" since Python 3.9. + + +Ellipsis +======== + +This type has a single value. There is a single object with this +value. This object is accessed through the literal "..." or the built- +in name "Ellipsis". Its truth value is true. + + +"numbers.Number" +================ + +These are created by numeric literals and returned as results by +arithmetic operators and arithmetic built-in functions. Numeric +objects are immutable; once created their value never changes. Python +numbers are of course strongly related to mathematical numbers, but +subject to the limitations of numerical representation in computers. + +The string representations of the numeric classes, computed by +"__repr__()" and "__str__()", have the following properties: + +* They are valid numeric literals which, when passed to their class + constructor, produce an object having the value of the original + numeric. + +* The representation is in base 10, when possible. + +* Leading zeros, possibly excepting a single zero before a decimal + point, are not shown. + +* Trailing zeros, possibly excepting a single zero after a decimal + point, are not shown. + +* A sign is shown only when the number is negative. + +Python distinguishes between integers, floating-point numbers, and +complex numbers: + + +"numbers.Integral" +------------------ + +These represent elements from the mathematical set of integers +(positive and negative). + +Note: + + The rules for integer representation are intended to give the most + meaningful interpretation of shift and mask operations involving + negative integers. + +There are two types of integers: + +Integers ("int") + These represent numbers in an unlimited range, subject to available + (virtual) memory only. For the purpose of shift and mask + operations, a binary representation is assumed, and negative + numbers are represented in a variant of 2’s complement which gives + the illusion of an infinite string of sign bits extending to the + left. + +Booleans ("bool") + These represent the truth values False and True. The two objects + representing the values "False" and "True" are the only Boolean + objects. The Boolean type is a subtype of the integer type, and + Boolean values behave like the values 0 and 1, respectively, in + almost all contexts, the exception being that when converted to a + string, the strings ""False"" or ""True"" are returned, + respectively. + + +"numbers.Real" ("float") +------------------------ + +These represent machine-level double precision floating-point numbers. +You are at the mercy of the underlying machine architecture (and C or +Java implementation) for the accepted range and handling of overflow. +Python does not support single-precision floating-point numbers; the +savings in processor and memory usage that are usually the reason for +using these are dwarfed by the overhead of using objects in Python, so +there is no reason to complicate the language with two kinds of +floating-point numbers. + + +"numbers.Complex" ("complex") +----------------------------- + +These represent complex numbers as a pair of machine-level double +precision floating-point numbers. The same caveats apply as for +floating-point numbers. The real and imaginary parts of a complex +number "z" can be retrieved through the read-only attributes "z.real" +and "z.imag". + + +Sequences +========= + +These represent finite ordered sets indexed by non-negative numbers. +The built-in function "len()" returns the number of items of a +sequence. When the length of a sequence is *n*, the index set contains +the numbers 0, 1, …, *n*-1. Item *i* of sequence *a* is selected by +"a[i]". Some sequences, including built-in sequences, interpret +negative subscripts by adding the sequence length. For example, +"a[-2]" equals "a[n-2]", the second to last item of sequence a with +length "n". + +Sequences also support slicing: "a[i:j]" selects all items with index +*k* such that *i* "<=" *k* "<" *j*. When used as an expression, a +slice is a sequence of the same type. The comment above about negative +indexes also applies to negative slice positions. + +Some sequences also support “extended slicing” with a third “step” +parameter: "a[i:j:k]" selects all items of *a* with index *x* where "x += i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*. + +Sequences are distinguished according to their mutability: + + +Immutable sequences +------------------- + +An object of an immutable sequence type cannot change once it is +created. (If the object contains references to other objects, these +other objects may be mutable and may be changed; however, the +collection of objects directly referenced by an immutable object +cannot change.) + +The following types are immutable sequences: + +Strings + A string is a sequence of values that represent Unicode code + points. All the code points in the range "U+0000 - U+10FFFF" can be + represented in a string. Python doesn’t have a char type; instead, + every code point in the string is represented as a string object + with length "1". The built-in function "ord()" converts a code + point from its string form to an integer in the range "0 - 10FFFF"; + "chr()" converts an integer in the range "0 - 10FFFF" to the + corresponding length "1" string object. "str.encode()" can be used + to convert a "str" to "bytes" using the given text encoding, and + "bytes.decode()" can be used to achieve the opposite. + +Tuples + The items of a tuple are arbitrary Python objects. Tuples of two or + more items are formed by comma-separated lists of expressions. A + tuple of one item (a ‘singleton’) can be formed by affixing a comma + to an expression (an expression by itself does not create a tuple, + since parentheses must be usable for grouping of expressions). An + empty tuple can be formed by an empty pair of parentheses. + +Bytes + A bytes object is an immutable array. The items are 8-bit bytes, + represented by integers in the range 0 <= x < 256. Bytes literals + (like "b'abc'") and the built-in "bytes()" constructor can be used + to create bytes objects. Also, bytes objects can be decoded to + strings via the "decode()" method. + + +Mutable sequences +----------------- + +Mutable sequences can be changed after they are created. The +subscription and slicing notations can be used as the target of +assignment and "del" (delete) statements. + +Note: + + The "collections" and "array" module provide additional examples of + mutable sequence types. + +There are currently two intrinsic mutable sequence types: + +Lists + The items of a list are arbitrary Python objects. Lists are formed + by placing a comma-separated list of expressions in square + brackets. (Note that there are no special cases needed to form + lists of length 0 or 1.) + +Byte Arrays + A bytearray object is a mutable array. They are created by the + built-in "bytearray()" constructor. Aside from being mutable (and + hence unhashable), byte arrays otherwise provide the same interface + and functionality as immutable "bytes" objects. + + +Set types +========= + +These represent unordered, finite sets of unique, immutable objects. +As such, they cannot be indexed by any subscript. However, they can be +iterated over, and the built-in function "len()" returns the number of +items in a set. Common uses for sets are fast membership testing, +removing duplicates from a sequence, and computing mathematical +operations such as intersection, union, difference, and symmetric +difference. + +For set elements, the same immutability rules apply as for dictionary +keys. Note that numeric types obey the normal rules for numeric +comparison: if two numbers compare equal (e.g., "1" and "1.0"), only +one of them can be contained in a set. + +There are currently two intrinsic set types: + +Sets + These represent a mutable set. They are created by the built-in + "set()" constructor and can be modified afterwards by several + methods, such as "add()". + +Frozen sets + These represent an immutable set. They are created by the built-in + "frozenset()" constructor. As a frozenset is immutable and + *hashable*, it can be used again as an element of another set, or + as a dictionary key. + + +Mappings +======== + +These represent finite sets of objects indexed by arbitrary index +sets. The subscript notation "a[k]" selects the item indexed by "k" +from the mapping "a"; this can be used in expressions and as the +target of assignments or "del" statements. The built-in function +"len()" returns the number of items in a mapping. + +There is currently a single intrinsic mapping type: + + +Dictionaries +------------ + +These represent finite sets of objects indexed by nearly arbitrary +values. The only types of values not acceptable as keys are values +containing lists or dictionaries or other mutable types that are +compared by value rather than by object identity, the reason being +that the efficient implementation of dictionaries requires a key’s +hash value to remain constant. Numeric types used for keys obey the +normal rules for numeric comparison: if two numbers compare equal +(e.g., "1" and "1.0") then they can be used interchangeably to index +the same dictionary entry. + +Dictionaries preserve insertion order, meaning that keys will be +produced in the same order they were added sequentially over the +dictionary. Replacing an existing key does not change the order, +however removing a key and re-inserting it will add it to the end +instead of keeping its old place. + +Dictionaries are mutable; they can be created by the "{}" notation +(see section Dictionary displays). + +The extension modules "dbm.ndbm" and "dbm.gnu" provide additional +examples of mapping types, as does the "collections" module. + +Changed in version 3.7: Dictionaries did not preserve insertion order +in versions of Python before 3.6. In CPython 3.6, insertion order was +preserved, but it was considered an implementation detail at that time +rather than a language guarantee. + + +Callable types +============== + +These are the types to which the function call operation (see section +Calls) can be applied: + + +User-defined functions +---------------------- + +A user-defined function object is created by a function definition +(see section Function definitions). It should be called with an +argument list containing the same number of items as the function’s +formal parameter list. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| function.__globals__ | A reference to the "dictionary" that holds the | +| | function’s global variables – the global namespace | +| | of the module in which the function was defined. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__closure__ | "None" or a "tuple" of cells that contain bindings | +| | for the names specified in the "co_freevars" | +| | attribute of the function’s "code object". A cell | +| | object has the attribute "cell_contents". This can | +| | be used to get the value of the cell, as well as | +| | set the value. | ++----------------------------------------------------+----------------------------------------------------+ + + +Special writable attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Most of these attributes check the type of the assigned value: + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| function.__doc__ | The function’s documentation string, or "None" if | +| | unavailable. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__name__ | The function’s name. See also: "__name__ | +| | attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| function.__qualname__ | The function’s *qualified name*. See also: | +| | "__qualname__ attributes". Added in version 3.3. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__module__ | The name of the module the function was defined | +| | in, or "None" if unavailable. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__defaults__ | A "tuple" containing default *parameter* values | +| | for those parameters that have defaults, or "None" | +| | if no parameters have a default value. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__code__ | The code object representing the compiled function | +| | body. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__dict__ | The namespace supporting arbitrary function | +| | attributes. See also: "__dict__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| function.__annotations__ | A "dictionary" containing annotations of | +| | *parameters*. The keys of the dictionary are the | +| | parameter names, and "'return'" for the return | +| | annotation, if provided. See also: | +| | "object.__annotations__". Changed in version | +| | 3.14: Annotations are now lazily evaluated. See | +| | **PEP 649**. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__annotate__ | The *annotate function* for this function, or | +| | "None" if the function has no annotations. See | +| | "object.__annotate__". Added in version 3.14. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__kwdefaults__ | A "dictionary" containing defaults for keyword- | +| | only *parameters*. | ++----------------------------------------------------+----------------------------------------------------+ +| function.__type_params__ | A "tuple" containing the type parameters of a | +| | generic function. Added in version 3.12. | ++----------------------------------------------------+----------------------------------------------------+ + +Function objects also support getting and setting arbitrary +attributes, which can be used, for example, to attach metadata to +functions. Regular attribute dot-notation is used to get and set such +attributes. + +**CPython implementation detail:** CPython’s current implementation +only supports function attributes on user-defined functions. Function +attributes on built-in functions may be supported in the future. + +Additional information about a function’s definition can be retrieved +from its code object (accessible via the "__code__" attribute). + + +Instance methods +---------------- + +An instance method object combines a class, a class instance and any +callable object (normally a user-defined function). + +Special read-only attributes: + ++----------------------------------------------------+----------------------------------------------------+ +| method.__self__ | Refers to the class instance object to which the | +| | method is bound | ++----------------------------------------------------+----------------------------------------------------+ +| method.__func__ | Refers to the original function object | ++----------------------------------------------------+----------------------------------------------------+ +| method.__doc__ | The method’s documentation (same as | +| | "method.__func__.__doc__"). A "string" if the | +| | original function had a docstring, else "None". | ++----------------------------------------------------+----------------------------------------------------+ +| method.__name__ | The name of the method (same as | +| | "method.__func__.__name__") | ++----------------------------------------------------+----------------------------------------------------+ +| method.__module__ | The name of the module the method was defined in, | +| | or "None" if unavailable. | ++----------------------------------------------------+----------------------------------------------------+ + +Methods also support accessing (but not setting) the arbitrary +function attributes on the underlying function object. + +User-defined method objects may be created when getting an attribute +of a class (perhaps via an instance of that class), if that attribute +is a user-defined function object or a "classmethod" object. + +When an instance method object is created by retrieving a user-defined +function object from a class via one of its instances, its "__self__" +attribute is the instance, and the method object is said to be +*bound*. The new method’s "__func__" attribute is the original +function object. + +When an instance method object is created by retrieving a +"classmethod" object from a class or instance, its "__self__" +attribute is the class itself, and its "__func__" attribute is the +function object underlying the class method. + +When an instance method object is called, the underlying function +("__func__") is called, inserting the class instance ("__self__") in +front of the argument list. For instance, when "C" is a class which +contains a definition for a function "f()", and "x" is an instance of +"C", calling "x.f(1)" is equivalent to calling "C.f(x, 1)". + +When an instance method object is derived from a "classmethod" object, +the “class instance” stored in "__self__" will actually be the class +itself, so that calling either "x.f(1)" or "C.f(1)" is equivalent to +calling "f(C,1)" where "f" is the underlying function. + +It is important to note that user-defined functions which are +attributes of a class instance are not converted to bound methods; +this *only* happens when the function is an attribute of the class. + + +Generator functions +------------------- + +A function or method which uses the "yield" statement (see section The +yield statement) is called a *generator function*. Such a function, +when called, always returns an *iterator* object which can be used to +execute the body of the function: calling the iterator’s +"iterator.__next__()" method will cause the function to execute until +it provides a value using the "yield" statement. When the function +executes a "return" statement or falls off the end, a "StopIteration" +exception is raised and the iterator will have reached the end of the +set of values to be returned. + + +Coroutine functions +------------------- + +A function or method which is defined using "async def" is called a +*coroutine function*. Such a function, when called, returns a +*coroutine* object. It may contain "await" expressions, as well as +"async with" and "async for" statements. See also the Coroutine +Objects section. + + +Asynchronous generator functions +-------------------------------- + +A function or method which is defined using "async def" and which uses +the "yield" statement is called a *asynchronous generator function*. +Such a function, when called, returns an *asynchronous iterator* +object which can be used in an "async for" statement to execute the +body of the function. + +Calling the asynchronous iterator’s "aiterator.__anext__" method will +return an *awaitable* which when awaited will execute until it +provides a value using the "yield" expression. When the function +executes an empty "return" statement or falls off the end, a +"StopAsyncIteration" exception is raised and the asynchronous iterator +will have reached the end of the set of values to be yielded. + + +Built-in functions +------------------ + +A built-in function object is a wrapper around a C function. Examples +of built-in functions are "len()" and "math.sin()" ("math" is a +standard built-in module). The number and type of the arguments are +determined by the C function. Special read-only attributes: + +* "__doc__" is the function’s documentation string, or "None" if + unavailable. See "function.__doc__". + +* "__name__" is the function’s name. See "function.__name__". + +* "__self__" is set to "None" (but see the next item). + +* "__module__" is the name of the module the function was defined in + or "None" if unavailable. See "function.__module__". + + +Built-in methods +---------------- + +This is really a different disguise of a built-in function, this time +containing an object passed to the C function as an implicit extra +argument. An example of a built-in method is "alist.append()", +assuming *alist* is a list object. In this case, the special read-only +attribute "__self__" is set to the object denoted by *alist*. (The +attribute has the same semantics as it does with "other instance +methods".) + + +Classes +------- + +Classes are callable. These objects normally act as factories for new +instances of themselves, but variations are possible for class types +that override "__new__()". The arguments of the call are passed to +"__new__()" and, in the typical case, to "__init__()" to initialize +the new instance. + + +Class Instances +--------------- + +Instances of arbitrary classes can be made callable by defining a +"__call__()" method in their class. + + +Modules +======= + +Modules are a basic organizational unit of Python code, and are +created by the import system as invoked either by the "import" +statement, or by calling functions such as "importlib.import_module()" +and built-in "__import__()". A module object has a namespace +implemented by a "dictionary" object (this is the dictionary +referenced by the "__globals__" attribute of functions defined in the +module). Attribute references are translated to lookups in this +dictionary, e.g., "m.x" is equivalent to "m.__dict__["x"]". A module +object does not contain the code object used to initialize the module +(since it isn’t needed once the initialization is done). + +Attribute assignment updates the module’s namespace dictionary, e.g., +"m.x = 1" is equivalent to "m.__dict__["x"] = 1". + + +Import-related attributes on module objects +------------------------------------------- + +Module objects have the following attributes that relate to the import +system. When a module is created using the machinery associated with +the import system, these attributes are filled in based on the +module’s *spec*, before the *loader* executes and loads the module. + +To create a module dynamically rather than using the import system, +it’s recommended to use "importlib.util.module_from_spec()", which +will set the various import-controlled attributes to appropriate +values. It’s also possible to use the "types.ModuleType" constructor +to create modules directly, but this technique is more error-prone, as +most attributes must be manually set on the module object after it has +been created when using this approach. + +Caution: + + With the exception of "__name__", it is **strongly** recommended + that you rely on "__spec__" and its attributes instead of any of the + other individual attributes listed in this subsection. Note that + updating an attribute on "__spec__" will not update the + corresponding attribute on the module itself: + + >>> import typing + >>> typing.__name__, typing.__spec__.name + ('typing', 'typing') + >>> typing.__spec__.name = 'spelling' + >>> typing.__name__, typing.__spec__.name + ('typing', 'spelling') + >>> typing.__name__ = 'keyboard_smashing' + >>> typing.__name__, typing.__spec__.name + ('keyboard_smashing', 'spelling') + +module.__name__ + + The name used to uniquely identify the module in the import system. + For a directly executed module, this will be set to ""__main__"". + + This attribute must be set to the fully qualified name of the + module. It is expected to match the value of + "module.__spec__.name". + +module.__spec__ + + A record of the module’s import-system-related state. + + Set to the "module spec" that was used when importing the module. + See Module specs for more details. + + Added in version 3.4. + +module.__package__ + + The *package* a module belongs to. + + If the module is top-level (that is, not a part of any specific + package) then the attribute should be set to "''" (the empty + string). Otherwise, it should be set to the name of the module’s + package (which can be equal to "module.__name__" if the module + itself is a package). See **PEP 366** for further details. + + This attribute is used instead of "__name__" to calculate explicit + relative imports for main modules. It defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor; use "importlib.util.module_from_spec()" instead to + ensure the attribute is set to a "str". + + It is **strongly** recommended that you use + "module.__spec__.parent" instead of "module.__package__". + "__package__" is now only used as a fallback if "__spec__.parent" + is not set, and this fallback path is deprecated. + + Changed in version 3.4: This attribute now defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor. Previously the attribute was optional. + + Changed in version 3.6: The value of "__package__" is expected to + be the same as "__spec__.parent". "__package__" is now only used as + a fallback during import resolution if "__spec__.parent" is not + defined. + + Changed in version 3.10: "ImportWarning" is raised if an import + resolution falls back to "__package__" instead of + "__spec__.parent". + + Changed in version 3.12: Raise "DeprecationWarning" instead of + "ImportWarning" when falling back to "__package__" during import + resolution. + + Deprecated since version 3.13, will be removed in version 3.15: + "__package__" will cease to be set or taken into consideration by + the import system or standard library. + +module.__loader__ + + The *loader* object that the import machinery used to load the + module. + + This attribute is mostly useful for introspection, but can be used + for additional loader-specific functionality, for example getting + data associated with a loader. + + "__loader__" defaults to "None" for modules created dynamically + using the "types.ModuleType" constructor; use + "importlib.util.module_from_spec()" instead to ensure the attribute + is set to a *loader* object. + + It is **strongly** recommended that you use + "module.__spec__.loader" instead of "module.__loader__". + + Changed in version 3.4: This attribute now defaults to "None" for + modules created dynamically using the "types.ModuleType" + constructor. Previously the attribute was optional. + + Deprecated since version 3.12, will be removed in version 3.16: + Setting "__loader__" on a module while failing to set + "__spec__.loader" is deprecated. In Python 3.16, "__loader__" will + cease to be set or taken into consideration by the import system or + the standard library. + +module.__path__ + + A (possibly empty) *sequence* of strings enumerating the locations + where the package’s submodules will be found. Non-package modules + should not have a "__path__" attribute. See __path__ attributes on + modules for more details. + + It is **strongly** recommended that you use + "module.__spec__.submodule_search_locations" instead of + "module.__path__". + +module.__file__ + +module.__cached__ + + "__file__" and "__cached__" are both optional attributes that may + or may not be set. Both attributes should be a "str" when they are + available. + + "__file__" indicates the pathname of the file from which the module + was loaded (if loaded from a file), or the pathname of the shared + library file for extension modules loaded dynamically from a shared + library. It might be missing for certain types of modules, such as + C modules that are statically linked into the interpreter, and the + import system may opt to leave it unset if it has no semantic + meaning (for example, a module loaded from a database). + + If "__file__" is set then the "__cached__" attribute might also be + set, which is the path to any compiled version of the code (for + example, a byte-compiled file). The file does not need to exist to + set this attribute; the path can simply point to where the compiled + file *would* exist (see **PEP 3147**). + + Note that "__cached__" may be set even if "__file__" is not set. + However, that scenario is quite atypical. Ultimately, the *loader* + is what makes use of the module spec provided by the *finder* (from + which "__file__" and "__cached__" are derived). So if a loader can + load from a cached module but otherwise does not load from a file, + that atypical scenario may be appropriate. + + It is **strongly** recommended that you use + "module.__spec__.cached" instead of "module.__cached__". + + Deprecated since version 3.13, will be removed in version 3.15: + Setting "__cached__" on a module while failing to set + "__spec__.cached" is deprecated. In Python 3.15, "__cached__" will + cease to be set or taken into consideration by the import system or + standard library. + + +Other writable attributes on module objects +------------------------------------------- + +As well as the import-related attributes listed above, module objects +also have the following writable attributes: + +module.__doc__ + + The module’s documentation string, or "None" if unavailable. See + also: "__doc__ attributes". + +module.__annotations__ + + A dictionary containing *variable annotations* collected during + module body execution. For best practices on working with + "__annotations__", see "annotationlib". + + Changed in version 3.14: Annotations are now lazily evaluated. See + **PEP 649**. + +module.__annotate__ + + The *annotate function* for this module, or "None" if the module + has no annotations. See also: "__annotate__" attributes. + + Added in version 3.14. + + +Module dictionaries +------------------- + +Module objects also have the following special read-only attribute: + +module.__dict__ + + The module’s namespace as a dictionary object. Uniquely among the + attributes listed here, "__dict__" cannot be accessed as a global + variable from within a module; it can only be accessed as an + attribute on module objects. + + **CPython implementation detail:** Because of the way CPython + clears module dictionaries, the module dictionary will be cleared + when the module falls out of scope even if the dictionary still has + live references. To avoid this, copy the dictionary or keep the + module around while using its dictionary directly. + + +Custom classes +============== + +Custom class types are typically created by class definitions (see +section Class definitions). A class has a namespace implemented by a +dictionary object. Class attribute references are translated to +lookups in this dictionary, e.g., "C.x" is translated to +"C.__dict__["x"]" (although there are a number of hooks which allow +for other means of locating attributes). When the attribute name is +not found there, the attribute search continues in the base classes. +This search of the base classes uses the C3 method resolution order +which behaves correctly even in the presence of ‘diamond’ inheritance +structures where there are multiple inheritance paths leading back to +a common ancestor. Additional details on the C3 MRO used by Python can +be found at The Python 2.3 Method Resolution Order. + +When a class attribute reference (for class "C", say) would yield a +class method object, it is transformed into an instance method object +whose "__self__" attribute is "C". When it would yield a +"staticmethod" object, it is transformed into the object wrapped by +the static method object. See section Implementing Descriptors for +another way in which attributes retrieved from a class may differ from +those actually contained in its "__dict__". + +Class attribute assignments update the class’s dictionary, never the +dictionary of a base class. + +A class object can be called (see above) to yield a class instance +(see below). + + +Special attributes +------------------ + ++----------------------------------------------------+----------------------------------------------------+ +| Attribute | Meaning | +|====================================================|====================================================| +| type.__name__ | The class’s name. See also: "__name__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__qualname__ | The class’s *qualified name*. See also: | +| | "__qualname__ attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__module__ | The name of the module in which the class was | +| | defined. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__dict__ | A "mapping proxy" providing a read-only view of | +| | the class’s namespace. See also: "__dict__ | +| | attributes". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__bases__ | A "tuple" containing the class’s bases. In most | +| | cases, for a class defined as "class X(A, B, C)", | +| | "X.__bases__" will be exactly equal to "(A, B, | +| | C)". | ++----------------------------------------------------+----------------------------------------------------+ +| type.__doc__ | The class’s documentation string, or "None" if | +| | undefined. Not inherited by subclasses. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__annotations__ | A dictionary containing *variable annotations* | +| | collected during class body execution. See also: | +| | "__annotations__ attributes". For best practices | +| | on working with "__annotations__", please see | +| | "annotationlib". Caution: Accessing the | +| | "__annotations__" attribute of a class object | +| | directly may yield incorrect results in the | +| | presence of metaclasses. In addition, the | +| | attribute may not exist for some classes. Use | +| | "annotationlib.get_annotations()" to retrieve | +| | class annotations safely. Changed in version | +| | 3.14: Annotations are now lazily evaluated. See | +| | **PEP 649**. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__annotate__() | The *annotate function* for this class, or "None" | +| | if the class has no annotations. See also: | +| | "__annotate__ attributes". Caution: Accessing | +| | the "__annotate__" attribute of a class object | +| | directly may yield incorrect results in the | +| | presence of metaclasses. Use | +| | "annotationlib.get_annotate_function()" to | +| | retrieve the annotate function safely. Added in | +| | version 3.14. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__type_params__ | A "tuple" containing the type parameters of a | +| | generic class. Added in version 3.12. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__static_attributes__ | A "tuple" containing names of attributes of this | +| | class which are assigned through "self.X" from any | +| | function in its body. Added in version 3.13. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__firstlineno__ | The line number of the first line of the class | +| | definition, including decorators. Setting the | +| | "__module__" attribute removes the | +| | "__firstlineno__" item from the type’s dictionary. | +| | Added in version 3.13. | ++----------------------------------------------------+----------------------------------------------------+ +| type.__mro__ | The "tuple" of classes that are considered when | +| | looking for base classes during method resolution. | ++----------------------------------------------------+----------------------------------------------------+ + + +Special methods +--------------- + +In addition to the special attributes described above, all Python +classes also have the following two methods available: + +type.mro() + + This method can be overridden by a metaclass to customize the + method resolution order for its instances. It is called at class + instantiation, and its result is stored in "__mro__". + +type.__subclasses__() + + Each class keeps a list of weak references to its immediate + subclasses. This method returns a list of all those references + still alive. The list is in definition order. Example: + + >>> class A: pass + >>> class B(A): pass + >>> A.__subclasses__() + [] + + +Class instances +=============== + +A class instance is created by calling a class object (see above). A +class instance has a namespace implemented as a dictionary which is +the first place in which attribute references are searched. When an +attribute is not found there, and the instance’s class has an +attribute by that name, the search continues with the class +attributes. If a class attribute is found that is a user-defined +function object, it is transformed into an instance method object +whose "__self__" attribute is the instance. Static method and class +method objects are also transformed; see above under “Classes”. See +section Implementing Descriptors for another way in which attributes +of a class retrieved via its instances may differ from the objects +actually stored in the class’s "__dict__". If no class attribute is +found, and the object’s class has a "__getattr__()" method, that is +called to satisfy the lookup. + +Attribute assignments and deletions update the instance’s dictionary, +never a class’s dictionary. If the class has a "__setattr__()" or +"__delattr__()" method, this is called instead of updating the +instance dictionary directly. + +Class instances can pretend to be numbers, sequences, or mappings if +they have methods with certain special names. See section Special +method names. + + +Special attributes +------------------ + +object.__class__ + + The class to which a class instance belongs. + +object.__dict__ + + A dictionary or other mapping object used to store an object’s + (writable) attributes. Not all instances have a "__dict__" + attribute; see the section on __slots__ for more details. + + +I/O objects (also known as file objects) +======================================== + +A *file object* represents an open file. Various shortcuts are +available to create file objects: the "open()" built-in function, and +also "os.popen()", "os.fdopen()", and the "makefile()" method of +socket objects (and perhaps by other functions or methods provided by +extension modules). + +The objects "sys.stdin", "sys.stdout" and "sys.stderr" are initialized +to file objects corresponding to the interpreter’s standard input, +output and error streams; they are all open in text mode and therefore +follow the interface defined by the "io.TextIOBase" abstract class. + + +Internal types +============== + +A few types used internally by the interpreter are exposed to the +user. Their definitions may change with future versions of the +interpreter, but they are mentioned here for completeness. + + +Code objects +------------ + +Code objects represent *byte-compiled* executable Python code, or +*bytecode*. The difference between a code object and a function object +is that the function object contains an explicit reference to the +function’s globals (the module in which it was defined), while a code +object contains no context; also the default argument values are +stored in the function object, not in the code object (because they +represent values calculated at run-time). Unlike function objects, +code objects are immutable and contain no references (directly or +indirectly) to mutable objects. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_name | The function name | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_qualname | The fully qualified function name Added in | +| | version 3.11. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_argcount | The total number of positional *parameters* | +| | (including positional-only parameters and | +| | parameters with default values) that the function | +| | has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_posonlyargcount | The number of positional-only *parameters* | +| | (including arguments with default values) that the | +| | function has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_kwonlyargcount | The number of keyword-only *parameters* (including | +| | arguments with default values) that the function | +| | has | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_nlocals | The number of local variables used by the function | +| | (including parameters) | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_varnames | A "tuple" containing the names of the local | +| | variables in the function (starting with the | +| | parameter names) | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_cellvars | A "tuple" containing the names of local variables | +| | that are referenced from at least one *nested | +| | scope* inside the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_freevars | A "tuple" containing the names of *free (closure) | +| | variables* that a *nested scope* references in an | +| | outer scope. See also "function.__closure__". | +| | Note: references to global and builtin names are | +| | *not* included. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_code | A string representing the sequence of *bytecode* | +| | instructions in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_consts | A "tuple" containing the literals used by the | +| | *bytecode* in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_names | A "tuple" containing the names used by the | +| | *bytecode* in the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_filename | The name of the file from which the code was | +| | compiled | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_firstlineno | The line number of the first line of the function | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_lnotab | A string encoding the mapping from *bytecode* | +| | offsets to line numbers. For details, see the | +| | source code of the interpreter. Deprecated since | +| | version 3.12: This attribute of code objects is | +| | deprecated, and may be removed in Python 3.15. | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_stacksize | The required stack size of the code object | ++----------------------------------------------------+----------------------------------------------------+ +| codeobject.co_flags | An "integer" encoding a number of flags for the | +| | interpreter. | ++----------------------------------------------------+----------------------------------------------------+ + +The following flag bits are defined for "co_flags": bit "0x04" is set +if the function uses the "*arguments" syntax to accept an arbitrary +number of positional arguments; bit "0x08" is set if the function uses +the "**keywords" syntax to accept arbitrary keyword arguments; bit +"0x20" is set if the function is a generator. See Code Objects Bit +Flags for details on the semantics of each flags that might be +present. + +Future feature declarations ("from __future__ import division") also +use bits in "co_flags" to indicate whether a code object was compiled +with a particular feature enabled: bit "0x2000" is set if the function +was compiled with future division enabled; bits "0x10" and "0x1000" +were used in earlier versions of Python. + +Other bits in "co_flags" are reserved for internal use. + +If a code object represents a function and has a docstring, the first +item in "co_consts" is the docstring of the function. + + +Methods on code objects +~~~~~~~~~~~~~~~~~~~~~~~ + +codeobject.co_positions() + + Returns an iterable over the source code positions of each + *bytecode* instruction in the code object. + + The iterator returns "tuple"s containing the "(start_line, + end_line, start_column, end_column)". The *i-th* tuple corresponds + to the position of the source code that compiled to the *i-th* code + unit. Column information is 0-indexed utf-8 byte offsets on the + given source line. + + This positional information can be missing. A non-exhaustive lists + of cases where this may happen: + + * Running the interpreter with "-X" "no_debug_ranges". + + * Loading a pyc file compiled while using "-X" "no_debug_ranges". + + * Position tuples corresponding to artificial instructions. + + * Line and column numbers that can’t be represented due to + implementation specific limitations. + + When this occurs, some or all of the tuple elements can be "None". + + Added in version 3.11. + + Note: + + This feature requires storing column positions in code objects + which may result in a small increase of disk usage of compiled + Python files or interpreter memory usage. To avoid storing the + extra information and/or deactivate printing the extra traceback + information, the "-X" "no_debug_ranges" command line flag or the + "PYTHONNODEBUGRANGES" environment variable can be used. + +codeobject.co_lines() + + Returns an iterator that yields information about successive ranges + of *bytecode*s. Each item yielded is a "(start, end, lineno)" + "tuple": + + * "start" (an "int") represents the offset (inclusive) of the start + of the *bytecode* range + + * "end" (an "int") represents the offset (exclusive) of the end of + the *bytecode* range + + * "lineno" is an "int" representing the line number of the + *bytecode* range, or "None" if the bytecodes in the given range + have no line number + + The items yielded will have the following properties: + + * The first range yielded will have a "start" of 0. + + * The "(start, end)" ranges will be non-decreasing and consecutive. + That is, for any pair of "tuple"s, the "start" of the second will + be equal to the "end" of the first. + + * No range will be backwards: "end >= start" for all triples. + + * The last "tuple" yielded will have "end" equal to the size of the + *bytecode*. + + Zero-width ranges, where "start == end", are allowed. Zero-width + ranges are used for lines that are present in the source code, but + have been eliminated by the *bytecode* compiler. + + Added in version 3.10. + + See also: + + **PEP 626** - Precise line numbers for debugging and other tools. + The PEP that introduced the "co_lines()" method. + +codeobject.replace(**kwargs) + + Return a copy of the code object with new values for the specified + fields. + + Code objects are also supported by the generic function + "copy.replace()". + + Added in version 3.8. + + +Frame objects +------------- + +Frame objects represent execution frames. They may occur in traceback +objects, and are also passed to registered trace functions. + + +Special read-only attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_back | Points to the previous stack frame (towards the | +| | caller), or "None" if this is the bottom stack | +| | frame | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_code | The code object being executed in this frame. | +| | Accessing this attribute raises an auditing event | +| | "object.__getattr__" with arguments "obj" and | +| | ""f_code"". | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_locals | The mapping used by the frame to look up local | +| | variables. If the frame refers to an *optimized | +| | scope*, this may return a write-through proxy | +| | object. Changed in version 3.13: Return a proxy | +| | for optimized scopes. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_globals | The dictionary used by the frame to look up global | +| | variables | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_builtins | The dictionary used by the frame to look up built- | +| | in (intrinsic) names | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_lasti | The “precise instruction” of the frame object | +| | (this is an index into the *bytecode* string of | +| | the code object) | ++----------------------------------------------------+----------------------------------------------------+ + + +Special writable attributes +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace | If not "None", this is a function called for | +| | various events during code execution (this is used | +| | by debuggers). Normally an event is triggered for | +| | each new source line (see "f_trace_lines"). | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace_lines | Set this attribute to "False" to disable | +| | triggering a tracing event for each source line. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_trace_opcodes | Set this attribute to "True" to allow per-opcode | +| | events to be requested. Note that this may lead to | +| | undefined interpreter behaviour if exceptions | +| | raised by the trace function escape to the | +| | function being traced. | ++----------------------------------------------------+----------------------------------------------------+ +| frame.f_lineno | The current line number of the frame – writing to | +| | this from within a trace function jumps to the | +| | given line (only for the bottom-most frame). A | +| | debugger can implement a Jump command (aka Set | +| | Next Statement) by writing to this attribute. | ++----------------------------------------------------+----------------------------------------------------+ + + +Frame object methods +~~~~~~~~~~~~~~~~~~~~ + +Frame objects support one method: + +frame.clear() + + This method clears all references to local variables held by the + frame. Also, if the frame belonged to a *generator*, the generator + is finalized. This helps break reference cycles involving frame + objects (for example when catching an exception and storing its + traceback for later use). + + "RuntimeError" is raised if the frame is currently executing or + suspended. + + Added in version 3.4. + + Changed in version 3.13: Attempting to clear a suspended frame + raises "RuntimeError" (as has always been the case for executing + frames). + + +Traceback objects +----------------- + +Traceback objects represent the stack trace of an exception. A +traceback object is implicitly created when an exception occurs, and +may also be explicitly created by calling "types.TracebackType". + +Changed in version 3.7: Traceback objects can now be explicitly +instantiated from Python code. + +For implicitly created tracebacks, when the search for an exception +handler unwinds the execution stack, at each unwound level a traceback +object is inserted in front of the current traceback. When an +exception handler is entered, the stack trace is made available to the +program. (See section The try statement.) It is accessible as the +third item of the tuple returned by "sys.exc_info()", and as the +"__traceback__" attribute of the caught exception. + +When the program contains no suitable handler, the stack trace is +written (nicely formatted) to the standard error stream; if the +interpreter is interactive, it is also made available to the user as +"sys.last_traceback". + +For explicitly created tracebacks, it is up to the creator of the +traceback to determine how the "tb_next" attributes should be linked +to form a full stack trace. + +Special read-only attributes: + ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_frame | Points to the execution frame of the current | +| | level. Accessing this attribute raises an | +| | auditing event "object.__getattr__" with arguments | +| | "obj" and ""tb_frame"". | ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_lineno | Gives the line number where the exception occurred | ++----------------------------------------------------+----------------------------------------------------+ +| traceback.tb_lasti | Indicates the “precise instruction”. | ++----------------------------------------------------+----------------------------------------------------+ + +The line number and last instruction in the traceback may differ from +the line number of its frame object if the exception occurred in a +"try" statement with no matching except clause or with a "finally" +clause. + +traceback.tb_next + + The special writable attribute "tb_next" is the next level in the + stack trace (towards the frame where the exception occurred), or + "None" if there is no next level. + + Changed in version 3.7: This attribute is now writable + + +Slice objects +------------- + +Slice objects are used to represent slices for "__getitem__()" +methods. They are also created by the built-in "slice()" function. + +Special read-only attributes: "start" is the lower bound; "stop" is +the upper bound; "step" is the step value; each is "None" if omitted. +These attributes can have any type. + +Slice objects support one method: + +slice.indices(self, length) + + This method takes a single integer argument *length* and computes + information about the slice that the slice object would describe if + applied to a sequence of *length* items. It returns a tuple of + three integers; respectively these are the *start* and *stop* + indices and the *step* or stride length of the slice. Missing or + out-of-bounds indices are handled in a manner consistent with + regular slices. + + +Static method objects +--------------------- + +Static method objects provide a way of defeating the transformation of +function objects to method objects described above. A static method +object is a wrapper around any other object, usually a user-defined +method object. When a static method object is retrieved from a class +or a class instance, the object actually returned is the wrapped +object, which is not subject to any further transformation. Static +method objects are also callable. Static method objects are created by +the built-in "staticmethod()" constructor. + + +Class method objects +-------------------- + +A class method object, like a static method object, is a wrapper +around another object that alters the way in which that object is +retrieved from classes and class instances. The behaviour of class +method objects upon such retrieval is described above, under “instance +methods”. Class method objects are created by the built-in +"classmethod()" constructor. +''', + 'typesfunctions': r'''Functions +********* + +Function objects are created by function definitions. The only +operation on a function object is to call it: "func(argument-list)". + +There are really two flavors of function objects: built-in functions +and user-defined functions. Both support the same operation (to call +the function), but the implementation is different, hence the +different object types. + +See Function definitions for more information. +''', + 'typesmapping': r'''Mapping Types — "dict" +********************** + +A *mapping* object maps *hashable* values to arbitrary objects. +Mappings are mutable objects. There is currently only one standard +mapping type, the *dictionary*. (For other containers see the built- +in "list", "set", and "tuple" classes, and the "collections" module.) + +A dictionary’s keys are *almost* arbitrary values. Values that are +not *hashable*, that is, values containing lists, dictionaries or +other mutable types (that are compared by value rather than by object +identity) may not be used as keys. Values that compare equal (such as +"1", "1.0", and "True") can be used interchangeably to index the same +dictionary entry. + +class dict(**kwargs) +class dict(mapping, **kwargs) +class dict(iterable, **kwargs) + + Return a new dictionary initialized from an optional positional + argument and a possibly empty set of keyword arguments. + + Dictionaries can be created by several means: + + * Use a comma-separated list of "key: value" pairs within braces: + "{'jack': 4098, 'sjoerd': 4127}" or "{4098: 'jack', 4127: + 'sjoerd'}" + + * Use a dict comprehension: "{}", "{x: x ** 2 for x in range(10)}" + + * Use the type constructor: "dict()", "dict([('foo', 100), ('bar', + 200)])", "dict(foo=100, bar=200)" + + If no positional argument is given, an empty dictionary is created. + If a positional argument is given and it defines a "keys()" method, + a dictionary is created by calling "__getitem__()" on the argument + with each returned key from the method. Otherwise, the positional + argument must be an *iterable* object. Each item in the iterable + must itself be an iterable with exactly two elements. The first + element of each item becomes a key in the new dictionary, and the + second element the corresponding value. If a key occurs more than + once, the last value for that key becomes the corresponding value + in the new dictionary. + + If keyword arguments are given, the keyword arguments and their + values are added to the dictionary created from the positional + argument. If a key being added is already present, the value from + the keyword argument replaces the value from the positional + argument. + + To illustrate, the following examples all return a dictionary equal + to "{"one": 1, "two": 2, "three": 3}": + + >>> a = dict(one=1, two=2, three=3) + >>> b = {'one': 1, 'two': 2, 'three': 3} + >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3])) + >>> d = dict([('two', 2), ('one', 1), ('three', 3)]) + >>> e = dict({'three': 3, 'one': 1, 'two': 2}) + >>> f = dict({'one': 1, 'three': 3}, two=2) + >>> a == b == c == d == e == f + True + + Providing keyword arguments as in the first example only works for + keys that are valid Python identifiers. Otherwise, any valid keys + can be used. + + These are the operations that dictionaries support (and therefore, + custom mapping types should support too): + + list(d) + + Return a list of all the keys used in the dictionary *d*. + + len(d) + + Return the number of items in the dictionary *d*. + + d[key] + + Return the item of *d* with key *key*. Raises a "KeyError" if + *key* is not in the map. + + If a subclass of dict defines a method "__missing__()" and *key* + is not present, the "d[key]" operation calls that method with + the key *key* as argument. The "d[key]" operation then returns + or raises whatever is returned or raised by the + "__missing__(key)" call. No other operations or methods invoke + "__missing__()". If "__missing__()" is not defined, "KeyError" + is raised. "__missing__()" must be a method; it cannot be an + instance variable: + + >>> class Counter(dict): + ... def __missing__(self, key): + ... return 0 + ... + >>> c = Counter() + >>> c['red'] + 0 + >>> c['red'] += 1 + >>> c['red'] + 1 + + The example above shows part of the implementation of + "collections.Counter". A different "__missing__" method is used + by "collections.defaultdict". + + d[key] = value + + Set "d[key]" to *value*. + + del d[key] + + Remove "d[key]" from *d*. Raises a "KeyError" if *key* is not + in the map. + + key in d + + Return "True" if *d* has a key *key*, else "False". + + key not in d + + Equivalent to "not key in d". + + iter(d) + + Return an iterator over the keys of the dictionary. This is a + shortcut for "iter(d.keys())". + + clear() + + Remove all items from the dictionary. + + copy() + + Return a shallow copy of the dictionary. + + classmethod fromkeys(iterable, value=None, /) + + Create a new dictionary with keys from *iterable* and values set + to *value*. + + "fromkeys()" is a class method that returns a new dictionary. + *value* defaults to "None". All of the values refer to just a + single instance, so it generally doesn’t make sense for *value* + to be a mutable object such as an empty list. To get distinct + values, use a dict comprehension instead. + + get(key, default=None) + + Return the value for *key* if *key* is in the dictionary, else + *default*. If *default* is not given, it defaults to "None", so + that this method never raises a "KeyError". + + items() + + Return a new view of the dictionary’s items ("(key, value)" + pairs). See the documentation of view objects. + + keys() + + Return a new view of the dictionary’s keys. See the + documentation of view objects. + + pop(key[, default]) + + If *key* is in the dictionary, remove it and return its value, + else return *default*. If *default* is not given and *key* is + not in the dictionary, a "KeyError" is raised. + + popitem() + + Remove and return a "(key, value)" pair from the dictionary. + Pairs are returned in LIFO (last-in, first-out) order. + + "popitem()" is useful to destructively iterate over a + dictionary, as often used in set algorithms. If the dictionary + is empty, calling "popitem()" raises a "KeyError". + + Changed in version 3.7: LIFO order is now guaranteed. In prior + versions, "popitem()" would return an arbitrary key/value pair. + + reversed(d) + + Return a reverse iterator over the keys of the dictionary. This + is a shortcut for "reversed(d.keys())". + + Added in version 3.8. + + setdefault(key, default=None) + + If *key* is in the dictionary, return its value. If not, insert + *key* with a value of *default* and return *default*. *default* + defaults to "None". + + update([other]) + + Update the dictionary with the key/value pairs from *other*, + overwriting existing keys. Return "None". + + "update()" accepts either another object with a "keys()" method + (in which case "__getitem__()" is called with every key returned + from the method) or an iterable of key/value pairs (as tuples or + other iterables of length two). If keyword arguments are + specified, the dictionary is then updated with those key/value + pairs: "d.update(red=1, blue=2)". + + values() + + Return a new view of the dictionary’s values. See the + documentation of view objects. + + An equality comparison between one "dict.values()" view and + another will always return "False". This also applies when + comparing "dict.values()" to itself: + + >>> d = {'a': 1} + >>> d.values() == d.values() + False + + d | other + + Create a new dictionary with the merged keys and values of *d* + and *other*, which must both be dictionaries. The values of + *other* take priority when *d* and *other* share keys. + + Added in version 3.9. + + d |= other + + Update the dictionary *d* with keys and values from *other*, + which may be either a *mapping* or an *iterable* of key/value + pairs. The values of *other* take priority when *d* and *other* + share keys. + + Added in version 3.9. + + Dictionaries compare equal if and only if they have the same "(key, + value)" pairs (regardless of ordering). Order comparisons (‘<’, + ‘<=’, ‘>=’, ‘>’) raise "TypeError". + + Dictionaries preserve insertion order. Note that updating a key + does not affect the order. Keys added after deletion are inserted + at the end. + + >>> d = {"one": 1, "two": 2, "three": 3, "four": 4} + >>> d + {'one': 1, 'two': 2, 'three': 3, 'four': 4} + >>> list(d) + ['one', 'two', 'three', 'four'] + >>> list(d.values()) + [1, 2, 3, 4] + >>> d["one"] = 42 + >>> d + {'one': 42, 'two': 2, 'three': 3, 'four': 4} + >>> del d["two"] + >>> d["two"] = None + >>> d + {'one': 42, 'three': 3, 'four': 4, 'two': None} + + Changed in version 3.7: Dictionary order is guaranteed to be + insertion order. This behavior was an implementation detail of + CPython from 3.6. + + Dictionaries and dictionary views are reversible. + + >>> d = {"one": 1, "two": 2, "three": 3, "four": 4} + >>> d + {'one': 1, 'two': 2, 'three': 3, 'four': 4} + >>> list(reversed(d)) + ['four', 'three', 'two', 'one'] + >>> list(reversed(d.values())) + [4, 3, 2, 1] + >>> list(reversed(d.items())) + [('four', 4), ('three', 3), ('two', 2), ('one', 1)] + + Changed in version 3.8: Dictionaries are now reversible. + +See also: + + "types.MappingProxyType" can be used to create a read-only view of a + "dict". + + +Dictionary view objects +======================= + +The objects returned by "dict.keys()", "dict.values()" and +"dict.items()" are *view objects*. They provide a dynamic view on the +dictionary’s entries, which means that when the dictionary changes, +the view reflects these changes. + +Dictionary views can be iterated over to yield their respective data, +and support membership tests: + +len(dictview) + + Return the number of entries in the dictionary. + +iter(dictview) + + Return an iterator over the keys, values or items (represented as + tuples of "(key, value)") in the dictionary. + + Keys and values are iterated over in insertion order. This allows + the creation of "(value, key)" pairs using "zip()": "pairs = + zip(d.values(), d.keys())". Another way to create the same list is + "pairs = [(v, k) for (k, v) in d.items()]". + + Iterating views while adding or deleting entries in the dictionary + may raise a "RuntimeError" or fail to iterate over all entries. + + Changed in version 3.7: Dictionary order is guaranteed to be + insertion order. + +x in dictview + + Return "True" if *x* is in the underlying dictionary’s keys, values + or items (in the latter case, *x* should be a "(key, value)" + tuple). + +reversed(dictview) + + Return a reverse iterator over the keys, values or items of the + dictionary. The view will be iterated in reverse order of the + insertion. + + Changed in version 3.8: Dictionary views are now reversible. + +dictview.mapping + + Return a "types.MappingProxyType" that wraps the original + dictionary to which the view refers. + + Added in version 3.10. + +Keys views are set-like since their entries are unique and *hashable*. +Items views also have set-like operations since the (key, value) pairs +are unique and the keys are hashable. If all values in an items view +are hashable as well, then the items view can interoperate with other +sets. (Values views are not treated as set-like since the entries are +generally not unique.) For set-like views, all of the operations +defined for the abstract base class "collections.abc.Set" are +available (for example, "==", "<", or "^"). While using set +operators, set-like views accept any iterable as the other operand, +unlike sets which only accept sets as the input. + +An example of dictionary view usage: + + >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500} + >>> keys = dishes.keys() + >>> values = dishes.values() + + >>> # iteration + >>> n = 0 + >>> for val in values: + ... n += val + ... + >>> print(n) + 504 + + >>> # keys and values are iterated over in the same order (insertion order) + >>> list(keys) + ['eggs', 'sausage', 'bacon', 'spam'] + >>> list(values) + [2, 1, 1, 500] + + >>> # view objects are dynamic and reflect dict changes + >>> del dishes['eggs'] + >>> del dishes['sausage'] + >>> list(keys) + ['bacon', 'spam'] + + >>> # set operations + >>> keys & {'eggs', 'bacon', 'salad'} + {'bacon'} + >>> keys ^ {'sausage', 'juice'} == {'juice', 'sausage', 'bacon', 'spam'} + True + >>> keys | ['juice', 'juice', 'juice'] == {'bacon', 'spam', 'juice'} + True + + >>> # get back a read-only proxy for the original dictionary + >>> values.mapping + mappingproxy({'bacon': 1, 'spam': 500}) + >>> values.mapping['spam'] + 500 +''', + 'typesmethods': r'''Methods +******* + +Methods are functions that are called using the attribute notation. +There are two flavors: built-in methods (such as "append()" on lists) +and class instance method. Built-in methods are described with the +types that support them. + +If you access a method (a function defined in a class namespace) +through an instance, you get a special object: a *bound method* (also +called instance method) object. When called, it will add the "self" +argument to the argument list. Bound methods have two special read- +only attributes: "m.__self__" is the object on which the method +operates, and "m.__func__" is the function implementing the method. +Calling "m(arg-1, arg-2, ..., arg-n)" is completely equivalent to +calling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)". + +Like function objects, bound method objects support getting arbitrary +attributes. However, since method attributes are actually stored on +the underlying function object ("method.__func__"), setting method +attributes on bound methods is disallowed. Attempting to set an +attribute on a method results in an "AttributeError" being raised. In +order to set a method attribute, you need to explicitly set it on the +underlying function object: + + >>> class C: + ... def method(self): + ... pass + ... + >>> c = C() + >>> c.method.whoami = 'my name is method' # can't set on the method + Traceback (most recent call last): + File "", line 1, in + AttributeError: 'method' object has no attribute 'whoami' + >>> c.method.__func__.whoami = 'my name is method' + >>> c.method.whoami + 'my name is method' + +See Instance methods for more information. +''', + 'typesmodules': r'''Modules +******* + +The only special operation on a module is attribute access: "m.name", +where *m* is a module and *name* accesses a name defined in *m*’s +symbol table. Module attributes can be assigned to. (Note that the +"import" statement is not, strictly speaking, an operation on a module +object; "import foo" does not require a module object named *foo* to +exist, rather it requires an (external) *definition* for a module +named *foo* somewhere.) + +A special attribute of every module is "__dict__". This is the +dictionary containing the module’s symbol table. Modifying this +dictionary will actually change the module’s symbol table, but direct +assignment to the "__dict__" attribute is not possible (you can write +"m.__dict__['a'] = 1", which defines "m.a" to be "1", but you can’t +write "m.__dict__ = {}"). Modifying "__dict__" directly is not +recommended. + +Modules built into the interpreter are written like this: "". If loaded from a file, they are written as +"". +''', + 'typesseq': r'''Sequence Types — "list", "tuple", "range" +***************************************** + +There are three basic sequence types: lists, tuples, and range +objects. Additional sequence types tailored for processing of binary +data and text strings are described in dedicated sections. + + +Common Sequence Operations +========================== + +The operations in the following table are supported by most sequence +types, both mutable and immutable. The "collections.abc.Sequence" ABC +is provided to make it easier to correctly implement these operations +on custom sequence types. + +This table lists the sequence operations sorted in ascending priority. +In the table, *s* and *t* are sequences of the same type, *n*, *i*, +*j* and *k* are integers and *x* is an arbitrary object that meets any +type and value restrictions imposed by *s*. + +The "in" and "not in" operations have the same priorities as the +comparison operations. The "+" (concatenation) and "*" (repetition) +operations have the same priority as the corresponding numeric +operations. [3] + ++----------------------------+----------------------------------+------------+ +| Operation | Result | Notes | +|============================|==================================|============| +| "x in s" | "True" if an item of *s* is | (1) | +| | equal to *x*, else "False" | | ++----------------------------+----------------------------------+------------+ +| "x not in s" | "False" if an item of *s* is | (1) | +| | equal to *x*, else "True" | | ++----------------------------+----------------------------------+------------+ +| "s + t" | the concatenation of *s* and *t* | (6)(7) | ++----------------------------+----------------------------------+------------+ +| "s * n" or "n * s" | equivalent to adding *s* to | (2)(7) | +| | itself *n* times | | ++----------------------------+----------------------------------+------------+ +| "s[i]" | *i*th item of *s*, origin 0 | (3) | ++----------------------------+----------------------------------+------------+ +| "s[i:j]" | slice of *s* from *i* to *j* | (3)(4) | ++----------------------------+----------------------------------+------------+ +| "s[i:j:k]" | slice of *s* from *i* to *j* | (3)(5) | +| | with step *k* | | ++----------------------------+----------------------------------+------------+ +| "len(s)" | length of *s* | | ++----------------------------+----------------------------------+------------+ +| "min(s)" | smallest item of *s* | | ++----------------------------+----------------------------------+------------+ +| "max(s)" | largest item of *s* | | ++----------------------------+----------------------------------+------------+ +| "s.index(x[, i[, j]])" | index of the first occurrence of | (8) | +| | *x* in *s* (at or after index | | +| | *i* and before index *j*) | | ++----------------------------+----------------------------------+------------+ +| "s.count(x)" | total number of occurrences of | | +| | *x* in *s* | | ++----------------------------+----------------------------------+------------+ + +Sequences of the same type also support comparisons. In particular, +tuples and lists are compared lexicographically by comparing +corresponding elements. This means that to compare equal, every +element must compare equal and the two sequences must be of the same +type and have the same length. (For full details see Comparisons in +the language reference.) + +Forward and reversed iterators over mutable sequences access values +using an index. That index will continue to march forward (or +backward) even if the underlying sequence is mutated. The iterator +terminates only when an "IndexError" or a "StopIteration" is +encountered (or when the index drops below zero). + +Notes: + +1. While the "in" and "not in" operations are used only for simple + containment testing in the general case, some specialised sequences + (such as "str", "bytes" and "bytearray") also use them for + subsequence testing: + + >>> "gg" in "eggs" + True + +2. Values of *n* less than "0" are treated as "0" (which yields an + empty sequence of the same type as *s*). Note that items in the + sequence *s* are not copied; they are referenced multiple times. + This often haunts new Python programmers; consider: + + >>> lists = [[]] * 3 + >>> lists + [[], [], []] + >>> lists[0].append(3) + >>> lists + [[3], [3], [3]] + + What has happened is that "[[]]" is a one-element list containing + an empty list, so all three elements of "[[]] * 3" are references + to this single empty list. Modifying any of the elements of + "lists" modifies this single list. You can create a list of + different lists this way: + + >>> lists = [[] for i in range(3)] + >>> lists[0].append(3) + >>> lists[1].append(5) + >>> lists[2].append(7) + >>> lists + [[3], [5], [7]] + + Further explanation is available in the FAQ entry How do I create a + multidimensional list?. + +3. If *i* or *j* is negative, the index is relative to the end of + sequence *s*: "len(s) + i" or "len(s) + j" is substituted. But + note that "-0" is still "0". + +4. The slice of *s* from *i* to *j* is defined as the sequence of + items with index *k* such that "i <= k < j". If *i* or *j* is + greater than "len(s)", use "len(s)". If *i* is omitted or "None", + use "0". If *j* is omitted or "None", use "len(s)". If *i* is + greater than or equal to *j*, the slice is empty. + +5. The slice of *s* from *i* to *j* with step *k* is defined as the + sequence of items with index "x = i + n*k" such that "0 <= n < + (j-i)/k". In other words, the indices are "i", "i+k", "i+2*k", + "i+3*k" and so on, stopping when *j* is reached (but never + including *j*). When *k* is positive, *i* and *j* are reduced to + "len(s)" if they are greater. When *k* is negative, *i* and *j* are + reduced to "len(s) - 1" if they are greater. If *i* or *j* are + omitted or "None", they become “end” values (which end depends on + the sign of *k*). Note, *k* cannot be zero. If *k* is "None", it + is treated like "1". + +6. Concatenating immutable sequences always results in a new object. + This means that building up a sequence by repeated concatenation + will have a quadratic runtime cost in the total sequence length. + To get a linear runtime cost, you must switch to one of the + alternatives below: + + * if concatenating "str" objects, you can build a list and use + "str.join()" at the end or else write to an "io.StringIO" + instance and retrieve its value when complete + + * if concatenating "bytes" objects, you can similarly use + "bytes.join()" or "io.BytesIO", or you can do in-place + concatenation with a "bytearray" object. "bytearray" objects are + mutable and have an efficient overallocation mechanism + + * if concatenating "tuple" objects, extend a "list" instead + + * for other types, investigate the relevant class documentation + +7. Some sequence types (such as "range") only support item sequences + that follow specific patterns, and hence don’t support sequence + concatenation or repetition. + +8. "index" raises "ValueError" when *x* is not found in *s*. Not all + implementations support passing the additional arguments *i* and + *j*. These arguments allow efficient searching of subsections of + the sequence. Passing the extra arguments is roughly equivalent to + using "s[i:j].index(x)", only without copying any data and with the + returned index being relative to the start of the sequence rather + than the start of the slice. + + +Immutable Sequence Types +======================== + +The only operation that immutable sequence types generally implement +that is not also implemented by mutable sequence types is support for +the "hash()" built-in. + +This support allows immutable sequences, such as "tuple" instances, to +be used as "dict" keys and stored in "set" and "frozenset" instances. + +Attempting to hash an immutable sequence that contains unhashable +values will result in "TypeError". + + +Mutable Sequence Types +====================== + +The operations in the following table are defined on mutable sequence +types. The "collections.abc.MutableSequence" ABC is provided to make +it easier to correctly implement these operations on custom sequence +types. + +In the table *s* is an instance of a mutable sequence type, *t* is any +iterable object and *x* is an arbitrary object that meets any type and +value restrictions imposed by *s* (for example, "bytearray" only +accepts integers that meet the value restriction "0 <= x <= 255"). + ++--------------------------------+----------------------------------+-----------------------+ +| Operation | Result | Notes | +|================================|==================================|=======================| +| "s[i] = x" | item *i* of *s* is replaced by | | +| | *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j] = t" | slice of *s* from *i* to *j* is | | +| | replaced by the contents of the | | +| | iterable *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j]" | same as "s[i:j] = []" | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) | +| | replaced by those of *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j:k]" | removes the elements of | | +| | "s[i:j:k]" from the list | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.append(x)" | appends *x* to the end of the | | +| | sequence (same as | | +| | "s[len(s):len(s)] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.clear()" | removes all items from *s* (same | (5) | +| | as "del s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.copy()" | creates a shallow copy of *s* | (5) | +| | (same as "s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.extend(t)" or "s += t" | extends *s* with the contents of | | +| | *t* (for the most part the same | | +| | as "s[len(s):len(s)] = t") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s *= n" | updates *s* with its contents | (6) | +| | repeated *n* times | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.insert(i, x)" | inserts *x* into *s* at the | | +| | index given by *i* (same as | | +| | "s[i:i] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.pop()" or "s.pop(i)" | retrieves the item at *i* and | (2) | +| | also removes it from *s* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.remove(x)" | removes the first item from *s* | (3) | +| | where "s[i]" is equal to *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.reverse()" | reverses the items of *s* in | (4) | +| | place | | ++--------------------------------+----------------------------------+-----------------------+ + +Notes: + +1. If *k* is not equal to "1", *t* must have the same length as the + slice it is replacing. + +2. The optional argument *i* defaults to "-1", so that by default the + last item is removed and returned. + +3. "remove()" raises "ValueError" when *x* is not found in *s*. + +4. The "reverse()" method modifies the sequence in place for economy + of space when reversing a large sequence. To remind users that it + operates by side effect, it does not return the reversed sequence. + +5. "clear()" and "copy()" are included for consistency with the + interfaces of mutable containers that don’t support slicing + operations (such as "dict" and "set"). "copy()" is not part of the + "collections.abc.MutableSequence" ABC, but most concrete mutable + sequence classes provide it. + + Added in version 3.3: "clear()" and "copy()" methods. + +6. The value *n* is an integer, or an object implementing + "__index__()". Zero and negative values of *n* clear the sequence. + Items in the sequence are not copied; they are referenced multiple + times, as explained for "s * n" under Common Sequence Operations. + + +Lists +===== + +Lists are mutable sequences, typically used to store collections of +homogeneous items (where the precise degree of similarity will vary by +application). + +class list([iterable]) + + Lists may be constructed in several ways: + + * Using a pair of square brackets to denote the empty list: "[]" + + * Using square brackets, separating items with commas: "[a]", "[a, + b, c]" + + * Using a list comprehension: "[x for x in iterable]" + + * Using the type constructor: "list()" or "list(iterable)" + + The constructor builds a list whose items are the same and in the + same order as *iterable*’s items. *iterable* may be either a + sequence, a container that supports iteration, or an iterator + object. If *iterable* is already a list, a copy is made and + returned, similar to "iterable[:]". For example, "list('abc')" + returns "['a', 'b', 'c']" and "list( (1, 2, 3) )" returns "[1, 2, + 3]". If no argument is given, the constructor creates a new empty + list, "[]". + + Many other operations also produce lists, including the "sorted()" + built-in. + + Lists implement all of the common and mutable sequence operations. + Lists also provide the following additional method: + + sort(*, key=None, reverse=False) + + This method sorts the list in place, using only "<" comparisons + between items. Exceptions are not suppressed - if any comparison + operations fail, the entire sort operation will fail (and the + list will likely be left in a partially modified state). + + "sort()" accepts two arguments that can only be passed by + keyword (keyword-only arguments): + + *key* specifies a function of one argument that is used to + extract a comparison key from each list element (for example, + "key=str.lower"). The key corresponding to each item in the list + is calculated once and then used for the entire sorting process. + The default value of "None" means that list items are sorted + directly without calculating a separate key value. + + The "functools.cmp_to_key()" utility is available to convert a + 2.x style *cmp* function to a *key* function. + + *reverse* is a boolean value. If set to "True", then the list + elements are sorted as if each comparison were reversed. + + This method modifies the sequence in place for economy of space + when sorting a large sequence. To remind users that it operates + by side effect, it does not return the sorted sequence (use + "sorted()" to explicitly request a new sorted list instance). + + The "sort()" method is guaranteed to be stable. A sort is + stable if it guarantees not to change the relative order of + elements that compare equal — this is helpful for sorting in + multiple passes (for example, sort by department, then by salary + grade). + + For sorting examples and a brief sorting tutorial, see Sorting + Techniques. + + **CPython implementation detail:** While a list is being sorted, + the effect of attempting to mutate, or even inspect, the list is + undefined. The C implementation of Python makes the list appear + empty for the duration, and raises "ValueError" if it can detect + that the list has been mutated during a sort. + + +Tuples +====== + +Tuples are immutable sequences, typically used to store collections of +heterogeneous data (such as the 2-tuples produced by the "enumerate()" +built-in). Tuples are also used for cases where an immutable sequence +of homogeneous data is needed (such as allowing storage in a "set" or +"dict" instance). + +class tuple([iterable]) + + Tuples may be constructed in a number of ways: + + * Using a pair of parentheses to denote the empty tuple: "()" + + * Using a trailing comma for a singleton tuple: "a," or "(a,)" + + * Separating items with commas: "a, b, c" or "(a, b, c)" + + * Using the "tuple()" built-in: "tuple()" or "tuple(iterable)" + + The constructor builds a tuple whose items are the same and in the + same order as *iterable*’s items. *iterable* may be either a + sequence, a container that supports iteration, or an iterator + object. If *iterable* is already a tuple, it is returned + unchanged. For example, "tuple('abc')" returns "('a', 'b', 'c')" + and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument is + given, the constructor creates a new empty tuple, "()". + + Note that it is actually the comma which makes a tuple, not the + parentheses. The parentheses are optional, except in the empty + tuple case, or when they are needed to avoid syntactic ambiguity. + For example, "f(a, b, c)" is a function call with three arguments, + while "f((a, b, c))" is a function call with a 3-tuple as the sole + argument. + + Tuples implement all of the common sequence operations. + +For heterogeneous collections of data where access by name is clearer +than access by index, "collections.namedtuple()" may be a more +appropriate choice than a simple tuple object. + + +Ranges +====== + +The "range" type represents an immutable sequence of numbers and is +commonly used for looping a specific number of times in "for" loops. + +class range(stop) +class range(start, stop[, step]) + + The arguments to the range constructor must be integers (either + built-in "int" or any object that implements the "__index__()" + special method). If the *step* argument is omitted, it defaults to + "1". If the *start* argument is omitted, it defaults to "0". If + *step* is zero, "ValueError" is raised. + + For a positive *step*, the contents of a range "r" are determined + by the formula "r[i] = start + step*i" where "i >= 0" and "r[i] < + stop". + + For a negative *step*, the contents of the range are still + determined by the formula "r[i] = start + step*i", but the + constraints are "i >= 0" and "r[i] > stop". + + A range object will be empty if "r[0]" does not meet the value + constraint. Ranges do support negative indices, but these are + interpreted as indexing from the end of the sequence determined by + the positive indices. + + Ranges containing absolute values larger than "sys.maxsize" are + permitted but some features (such as "len()") may raise + "OverflowError". + + Range examples: + + >>> list(range(10)) + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] + >>> list(range(1, 11)) + [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + >>> list(range(0, 30, 5)) + [0, 5, 10, 15, 20, 25] + >>> list(range(0, 10, 3)) + [0, 3, 6, 9] + >>> list(range(0, -10, -1)) + [0, -1, -2, -3, -4, -5, -6, -7, -8, -9] + >>> list(range(0)) + [] + >>> list(range(1, 0)) + [] + + Ranges implement all of the common sequence operations except + concatenation and repetition (due to the fact that range objects + can only represent sequences that follow a strict pattern and + repetition and concatenation will usually violate that pattern). + + start + + The value of the *start* parameter (or "0" if the parameter was + not supplied) + + stop + + The value of the *stop* parameter + + step + + The value of the *step* parameter (or "1" if the parameter was + not supplied) + +The advantage of the "range" type over a regular "list" or "tuple" is +that a "range" object will always take the same (small) amount of +memory, no matter the size of the range it represents (as it only +stores the "start", "stop" and "step" values, calculating individual +items and subranges as needed). + +Range objects implement the "collections.abc.Sequence" ABC, and +provide features such as containment tests, element index lookup, +slicing and support for negative indices (see Sequence Types — list, +tuple, range): + +>>> r = range(0, 20, 2) +>>> r +range(0, 20, 2) +>>> 11 in r +False +>>> 10 in r +True +>>> r.index(10) +5 +>>> r[5] +10 +>>> r[:5] +range(0, 10, 2) +>>> r[-1] +18 + +Testing range objects for equality with "==" and "!=" compares them as +sequences. That is, two range objects are considered equal if they +represent the same sequence of values. (Note that two range objects +that compare equal might have different "start", "stop" and "step" +attributes, for example "range(0) == range(2, 1, 3)" or "range(0, 3, +2) == range(0, 4, 2)".) + +Changed in version 3.2: Implement the Sequence ABC. Support slicing +and negative indices. Test "int" objects for membership in constant +time instead of iterating through all items. + +Changed in version 3.3: Define ‘==’ and ‘!=’ to compare range objects +based on the sequence of values they define (instead of comparing +based on object identity).Added the "start", "stop" and "step" +attributes. + +See also: + + * The linspace recipe shows how to implement a lazy version of range + suitable for floating-point applications. +''', + 'typesseq-mutable': r'''Mutable Sequence Types +********************** + +The operations in the following table are defined on mutable sequence +types. The "collections.abc.MutableSequence" ABC is provided to make +it easier to correctly implement these operations on custom sequence +types. + +In the table *s* is an instance of a mutable sequence type, *t* is any +iterable object and *x* is an arbitrary object that meets any type and +value restrictions imposed by *s* (for example, "bytearray" only +accepts integers that meet the value restriction "0 <= x <= 255"). + ++--------------------------------+----------------------------------+-----------------------+ +| Operation | Result | Notes | +|================================|==================================|=======================| +| "s[i] = x" | item *i* of *s* is replaced by | | +| | *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j] = t" | slice of *s* from *i* to *j* is | | +| | replaced by the contents of the | | +| | iterable *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j]" | same as "s[i:j] = []" | | ++--------------------------------+----------------------------------+-----------------------+ +| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) | +| | replaced by those of *t* | | ++--------------------------------+----------------------------------+-----------------------+ +| "del s[i:j:k]" | removes the elements of | | +| | "s[i:j:k]" from the list | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.append(x)" | appends *x* to the end of the | | +| | sequence (same as | | +| | "s[len(s):len(s)] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.clear()" | removes all items from *s* (same | (5) | +| | as "del s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.copy()" | creates a shallow copy of *s* | (5) | +| | (same as "s[:]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.extend(t)" or "s += t" | extends *s* with the contents of | | +| | *t* (for the most part the same | | +| | as "s[len(s):len(s)] = t") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s *= n" | updates *s* with its contents | (6) | +| | repeated *n* times | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.insert(i, x)" | inserts *x* into *s* at the | | +| | index given by *i* (same as | | +| | "s[i:i] = [x]") | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.pop()" or "s.pop(i)" | retrieves the item at *i* and | (2) | +| | also removes it from *s* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.remove(x)" | removes the first item from *s* | (3) | +| | where "s[i]" is equal to *x* | | ++--------------------------------+----------------------------------+-----------------------+ +| "s.reverse()" | reverses the items of *s* in | (4) | +| | place | | ++--------------------------------+----------------------------------+-----------------------+ + +Notes: + +1. If *k* is not equal to "1", *t* must have the same length as the + slice it is replacing. + +2. The optional argument *i* defaults to "-1", so that by default the + last item is removed and returned. + +3. "remove()" raises "ValueError" when *x* is not found in *s*. + +4. The "reverse()" method modifies the sequence in place for economy + of space when reversing a large sequence. To remind users that it + operates by side effect, it does not return the reversed sequence. + +5. "clear()" and "copy()" are included for consistency with the + interfaces of mutable containers that don’t support slicing + operations (such as "dict" and "set"). "copy()" is not part of the + "collections.abc.MutableSequence" ABC, but most concrete mutable + sequence classes provide it. + + Added in version 3.3: "clear()" and "copy()" methods. + +6. The value *n* is an integer, or an object implementing + "__index__()". Zero and negative values of *n* clear the sequence. + Items in the sequence are not copied; they are referenced multiple + times, as explained for "s * n" under Common Sequence Operations. +''', + 'unary': r'''Unary arithmetic and bitwise operations +*************************************** + +All unary arithmetic and bitwise operations have the same priority: + + u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr + +The unary "-" (minus) operator yields the negation of its numeric +argument; the operation can be overridden with the "__neg__()" special +method. + +The unary "+" (plus) operator yields its numeric argument unchanged; +the operation can be overridden with the "__pos__()" special method. + +The unary "~" (invert) operator yields the bitwise inversion of its +integer argument. The bitwise inversion of "x" is defined as +"-(x+1)". It only applies to integral numbers or to custom objects +that override the "__invert__()" special method. + +In all three cases, if the argument does not have the proper type, a +"TypeError" exception is raised. +''', + 'while': r'''The "while" statement +********************* + +The "while" statement is used for repeated execution as long as an +expression is true: + + while_stmt ::= "while" assignment_expression ":" suite + ["else" ":" suite] + +This repeatedly tests the expression and, if it is true, executes the +first suite; if the expression is false (which may be the first time +it is tested) the suite of the "else" clause, if present, is executed +and the loop terminates. + +A "break" statement executed in the first suite terminates the loop +without executing the "else" clause’s suite. A "continue" statement +executed in the first suite skips the rest of the suite and goes back +to testing the expression. +''', + 'with': r'''The "with" statement +******************** + +The "with" statement is used to wrap the execution of a block with +methods defined by a context manager (see section With Statement +Context Managers). This allows common "try"…"except"…"finally" usage +patterns to be encapsulated for convenient reuse. + + with_stmt ::= "with" ( "(" with_stmt_contents ","? ")" | with_stmt_contents ) ":" suite + with_stmt_contents ::= with_item ("," with_item)* + with_item ::= expression ["as" target] + +The execution of the "with" statement with one “item” proceeds as +follows: + +1. The context expression (the expression given in the "with_item") is + evaluated to obtain a context manager. + +2. The context manager’s "__enter__()" is loaded for later use. + +3. The context manager’s "__exit__()" is loaded for later use. + +4. The context manager’s "__enter__()" method is invoked. + +5. If a target was included in the "with" statement, the return value + from "__enter__()" is assigned to it. + + Note: + + The "with" statement guarantees that if the "__enter__()" method + returns without an error, then "__exit__()" will always be + called. Thus, if an error occurs during the assignment to the + target list, it will be treated the same as an error occurring + within the suite would be. See step 7 below. + +6. The suite is executed. + +7. The context manager’s "__exit__()" method is invoked. If an + exception caused the suite to be exited, its type, value, and + traceback are passed as arguments to "__exit__()". Otherwise, three + "None" arguments are supplied. + + If the suite was exited due to an exception, and the return value + from the "__exit__()" method was false, the exception is reraised. + If the return value was true, the exception is suppressed, and + execution continues with the statement following the "with" + statement. + + If the suite was exited for any reason other than an exception, the + return value from "__exit__()" is ignored, and execution proceeds + at the normal location for the kind of exit that was taken. + +The following code: + + with EXPRESSION as TARGET: + SUITE + +is semantically equivalent to: + + manager = (EXPRESSION) + enter = type(manager).__enter__ + exit = type(manager).__exit__ + value = enter(manager) + hit_except = False + + try: + TARGET = value + SUITE + except: + hit_except = True + if not exit(manager, *sys.exc_info()): + raise + finally: + if not hit_except: + exit(manager, None, None, None) + +With more than one item, the context managers are processed as if +multiple "with" statements were nested: + + with A() as a, B() as b: + SUITE + +is semantically equivalent to: + + with A() as a: + with B() as b: + SUITE + +You can also write multi-item context managers in multiple lines if +the items are surrounded by parentheses. For example: + + with ( + A() as a, + B() as b, + ): + SUITE + +Changed in version 3.1: Support for multiple context expressions. + +Changed in version 3.10: Support for using grouping parentheses to +break the statement in multiple lines. + +See also: + + **PEP 343** - The “with” statement + The specification, background, and examples for the Python "with" + statement. +''', + 'yield': r'''The "yield" statement +********************* + + yield_stmt ::= yield_expression + +A "yield" statement is semantically equivalent to a yield expression. +The "yield" statement can be used to omit the parentheses that would +otherwise be required in the equivalent yield expression statement. +For example, the yield statements + + yield + yield from + +are equivalent to the yield expression statements + + (yield ) + (yield from ) + +Yield expressions and statements are only used when defining a +*generator* function, and are only used in the body of the generator +function. Using "yield" in a function definition is sufficient to +cause that definition to create a generator function instead of a +normal function. + +For full details of "yield" semantics, refer to the Yield expressions +section. +''', +} diff --git a/Lib/socket.py b/Lib/socket.py index be37c24d6174a2..727b0e75f03595 100644 --- a/Lib/socket.py +++ b/Lib/socket.py @@ -937,7 +937,9 @@ def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False, # Fail later on bind(), for platforms which may not # support this option. pass - if reuse_port: + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if reuse_port and family in (AF_INET, AF_INET6): sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1) if has_ipv6 and family == AF_INET6: if dualstack_ipv6: diff --git a/Lib/socketserver.py b/Lib/socketserver.py index cd028ef1c63b85..35b2723de3babe 100644 --- a/Lib/socketserver.py +++ b/Lib/socketserver.py @@ -468,7 +468,12 @@ def server_bind(self): """ if self.allow_reuse_address and hasattr(socket, "SO_REUSEADDR"): self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - if self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT"): + # Since Linux 6.12.9, SO_REUSEPORT is not allowed + # on other address families than AF_INET/AF_INET6. + if ( + self.allow_reuse_port and hasattr(socket, "SO_REUSEPORT") + and self.address_family in (socket.AF_INET, socket.AF_INET6) + ): self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) self.socket.bind(self.server_address) self.server_address = self.socket.getsockname() diff --git a/Lib/sysconfig/__init__.py b/Lib/sysconfig/__init__.py index 7a4a8f65a5eb3e..3c3c9796ec3307 100644 --- a/Lib/sysconfig/__init__.py +++ b/Lib/sysconfig/__init__.py @@ -220,8 +220,15 @@ def _safe_realpath(path): def is_python_build(check_home=None): if check_home is not None: import warnings - warnings.warn("check_home argument is deprecated and ignored.", - DeprecationWarning, stacklevel=2) + warnings.warn( + ( + 'The check_home argument of sysconfig.is_python_build is ' + 'deprecated and its value is ignored. ' + 'It will be removed in Python 3.15.' + ), + DeprecationWarning, + stacklevel=2, + ) for fn in ("Setup", "Setup.local"): if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): return True @@ -716,8 +723,20 @@ def expand_makefile_vars(s, vars): variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'. """ + + import warnings + warnings.warn( + 'sysconfig.expand_makefile_vars is deprecated and will be removed in ' + 'Python 3.16. Use sysconfig.get_paths(vars=...) instead.', + DeprecationWarning, + stacklevel=2, + ) + import re + _findvar1_rx = r"\$\(([A-Za-z][A-Za-z0-9_]*)\)" + _findvar2_rx = r"\${([A-Za-z][A-Za-z0-9_]*)}" + # This algorithm does multiple expansion, so if vars['foo'] contains # "${bar}", it will expand ${foo} to ${bar}, and then expand # ${bar}... and so forth. This is fine as long as 'vars' comes from diff --git a/Lib/test/_test_eintr.py b/Lib/test/_test_eintr.py index 493932d6c6d441..ae799808ca067e 100644 --- a/Lib/test/_test_eintr.py +++ b/Lib/test/_test_eintr.py @@ -152,6 +152,37 @@ def test_read(self): self.assertEqual(data, os.read(rd, len(data))) self.assertEqual(proc.wait(), 0) + def test_readinto(self): + rd, wr = os.pipe() + self.addCleanup(os.close, rd) + # wr closed explicitly by parent + + # the payload below are smaller than PIPE_BUF, hence the writes will be + # atomic + datas = [b"hello", b"world", b"spam"] + + code = '\n'.join(( + 'import os, sys, time', + '', + 'wr = int(sys.argv[1])', + 'datas = %r' % datas, + 'sleep_time = %r' % self.sleep_time, + '', + 'for data in datas:', + ' # let the parent block on read()', + ' time.sleep(sleep_time)', + ' os.write(wr, data)', + )) + + proc = self.subprocess(code, str(wr), pass_fds=[wr]) + with kill_on_error(proc): + os.close(wr) + for data in datas: + buffer = bytearray(len(data)) + self.assertEqual(os.readinto(rd, buffer), len(data)) + self.assertEqual(buffer, data) + self.assertEqual(proc.wait(), 0) + def test_write(self): rd, wr = os.pipe() self.addCleanup(os.close, wr) diff --git a/Lib/test/_test_embed_set_config.py b/Lib/test/_test_embed_set_config.py deleted file mode 100644 index 7edb35da463aa0..00000000000000 --- a/Lib/test/_test_embed_set_config.py +++ /dev/null @@ -1,291 +0,0 @@ -# bpo-42260: Test _PyInterpreterState_GetConfigCopy() -# and _PyInterpreterState_SetConfig(). -# -# Test run in a subprocess since set_config(get_config()) -# does reset sys attributes to their state of the Python startup -# (before the site module is run). - -import _testinternalcapi -import sys -import unittest -from test import support -from test.support import MS_WINDOWS - - -MAX_HASH_SEED = 4294967295 - - -BOOL_OPTIONS = [ - 'isolated', - 'use_environment', - 'dev_mode', - 'install_signal_handlers', - 'use_hash_seed', - 'faulthandler', - 'import_time', - 'code_debug_ranges', - 'show_ref_count', - 'dump_refs', - 'malloc_stats', - 'parse_argv', - 'site_import', - 'warn_default_encoding', - 'inspect', - 'interactive', - 'parser_debug', - 'write_bytecode', - 'quiet', - 'user_site_directory', - 'configure_c_stdio', - 'buffered_stdio', - 'use_frozen_modules', - 'safe_path', - 'pathconfig_warnings', - 'module_search_paths_set', - 'skip_source_first_line', - '_install_importlib', - '_init_main', - '_is_python_build', -] -if MS_WINDOWS: - BOOL_OPTIONS.append('legacy_windows_stdio') - - -class SetConfigTests(unittest.TestCase): - def setUp(self): - self.old_config = _testinternalcapi.get_config() - self.sys_copy = dict(sys.__dict__) - - def tearDown(self): - _testinternalcapi.reset_path_config() - _testinternalcapi.set_config(self.old_config) - sys.__dict__.clear() - sys.__dict__.update(self.sys_copy) - - def set_config(self, **kwargs): - _testinternalcapi.set_config(self.old_config | kwargs) - - def check(self, **kwargs): - self.set_config(**kwargs) - for key, value in kwargs.items(): - self.assertEqual(getattr(sys, key), value, - (key, value)) - - def test_set_invalid(self): - invalid_uint = -1 - NULL = None - invalid_wstr = NULL - # PyWideStringList strings must be non-NULL - invalid_wstrlist = ["abc", NULL, "def"] - - type_tests = [] - value_tests = [ - # enum - ('_config_init', 0), - ('_config_init', 4), - # unsigned long - ("hash_seed", -1), - ("hash_seed", MAX_HASH_SEED + 1), - ] - - # int (unsigned) - int_options = [ - '_config_init', - 'bytes_warning', - 'optimization_level', - 'tracemalloc', - 'verbose', - ] - int_options.extend(BOOL_OPTIONS) - for key in int_options: - value_tests.append((key, invalid_uint)) - type_tests.append((key, "abc")) - type_tests.append((key, 2.0)) - - # wchar_t* - for key in ( - 'filesystem_encoding', - 'filesystem_errors', - 'stdio_encoding', - 'stdio_errors', - 'check_hash_pycs_mode', - 'program_name', - 'platlibdir', - # optional wstr: - # 'pythonpath_env' - # 'home' - # 'pycache_prefix' - # 'run_command' - # 'run_module' - # 'run_filename' - # 'executable' - # 'prefix' - # 'exec_prefix' - # 'base_executable' - # 'base_prefix' - # 'base_exec_prefix' - ): - value_tests.append((key, invalid_wstr)) - type_tests.append((key, b'bytes')) - type_tests.append((key, 123)) - - # PyWideStringList - for key in ( - 'orig_argv', - 'argv', - 'xoptions', - 'warnoptions', - 'module_search_paths', - ): - if key != 'xoptions': - value_tests.append((key, invalid_wstrlist)) - type_tests.append((key, 123)) - type_tests.append((key, "abc")) - type_tests.append((key, [123])) - type_tests.append((key, [b"bytes"])) - - - if MS_WINDOWS: - value_tests.append(('legacy_windows_stdio', invalid_uint)) - - for exc_type, tests in ( - (ValueError, value_tests), - (TypeError, type_tests), - ): - for key, value in tests: - config = self.old_config | {key: value} - with self.subTest(key=key, value=value, exc_type=exc_type): - with self.assertRaises(exc_type): - _testinternalcapi.set_config(config) - - def test_flags(self): - bool_options = set(BOOL_OPTIONS) - for sys_attr, key, value in ( - ("debug", "parser_debug", 2), - ("inspect", "inspect", 3), - ("interactive", "interactive", 4), - ("optimize", "optimization_level", 5), - ("verbose", "verbose", 6), - ("bytes_warning", "bytes_warning", 7), - ("quiet", "quiet", 8), - ("isolated", "isolated", 9), - ): - with self.subTest(sys=sys_attr, key=key, value=value): - self.set_config(**{key: value, 'parse_argv': 0}) - if key in bool_options: - self.assertEqual(getattr(sys.flags, sys_attr), int(bool(value))) - else: - self.assertEqual(getattr(sys.flags, sys_attr), value) - - self.set_config(write_bytecode=0) - self.assertEqual(sys.flags.dont_write_bytecode, True) - self.assertEqual(sys.dont_write_bytecode, True) - - self.set_config(write_bytecode=1) - self.assertEqual(sys.flags.dont_write_bytecode, False) - self.assertEqual(sys.dont_write_bytecode, False) - - self.set_config(user_site_directory=0, isolated=0) - self.assertEqual(sys.flags.no_user_site, 1) - self.set_config(user_site_directory=1, isolated=0) - self.assertEqual(sys.flags.no_user_site, 0) - - self.set_config(site_import=0) - self.assertEqual(sys.flags.no_site, 1) - self.set_config(site_import=1) - self.assertEqual(sys.flags.no_site, 0) - - self.set_config(dev_mode=0) - self.assertEqual(sys.flags.dev_mode, False) - self.set_config(dev_mode=1) - self.assertEqual(sys.flags.dev_mode, True) - - self.set_config(use_environment=0, isolated=0) - self.assertEqual(sys.flags.ignore_environment, 1) - self.set_config(use_environment=1, isolated=0) - self.assertEqual(sys.flags.ignore_environment, 0) - - self.set_config(use_hash_seed=1, hash_seed=0) - self.assertEqual(sys.flags.hash_randomization, 0) - self.set_config(use_hash_seed=0, hash_seed=0) - self.assertEqual(sys.flags.hash_randomization, 1) - self.set_config(use_hash_seed=1, hash_seed=123) - self.assertEqual(sys.flags.hash_randomization, 1) - - if support.Py_GIL_DISABLED: - self.set_config(enable_gil=-1) - self.assertEqual(sys.flags.gil, None) - self.set_config(enable_gil=0) - self.assertEqual(sys.flags.gil, 0) - self.set_config(enable_gil=1) - self.assertEqual(sys.flags.gil, 1) - else: - # Builds without Py_GIL_DISABLED don't have - # PyConfig.enable_gil. sys.flags.gil is always defined to 1, for - # consistency. - self.assertEqual(sys.flags.gil, 1) - - def test_options(self): - self.check(warnoptions=[]) - self.check(warnoptions=["default", "ignore"]) - - self.set_config(xoptions={}) - self.assertEqual(sys._xoptions, {}) - self.set_config(xoptions={"dev": True, "tracemalloc": "5"}) - self.assertEqual(sys._xoptions, {"dev": True, "tracemalloc": "5"}) - - def test_pathconfig(self): - self.check( - executable='executable', - prefix="prefix", - base_prefix="base_prefix", - exec_prefix="exec_prefix", - base_exec_prefix="base_exec_prefix", - platlibdir="platlibdir") - - self.set_config(base_executable="base_executable") - self.assertEqual(sys._base_executable, "base_executable") - - # When base_xxx is NULL, value is copied from xxxx - self.set_config( - executable='executable', - prefix="prefix", - exec_prefix="exec_prefix", - base_executable=None, - base_prefix=None, - base_exec_prefix=None) - self.assertEqual(sys._base_executable, "executable") - self.assertEqual(sys.base_prefix, "prefix") - self.assertEqual(sys.base_exec_prefix, "exec_prefix") - - def test_path(self): - self.set_config(module_search_paths_set=1, - module_search_paths=['a', 'b', 'c']) - self.assertEqual(sys.path, ['a', 'b', 'c']) - - # sys.path is reset if module_search_paths_set=0 - self.set_config(module_search_paths_set=0, - module_search_paths=['new_path']) - self.assertNotEqual(sys.path, ['a', 'b', 'c']) - self.assertNotEqual(sys.path, ['new_path']) - - def test_argv(self): - self.set_config(parse_argv=0, - argv=['python_program', 'args'], - orig_argv=['orig', 'orig_args']) - self.assertEqual(sys.argv, ['python_program', 'args']) - self.assertEqual(sys.orig_argv, ['orig', 'orig_args']) - - self.set_config(parse_argv=0, - argv=[], - orig_argv=[]) - self.assertEqual(sys.argv, ['']) - self.assertEqual(sys.orig_argv, []) - - def test_pycache_prefix(self): - self.check(pycache_prefix=None) - self.check(pycache_prefix="pycache_prefix") - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index 6df09d891433ea..6b9b21cf7f6a3c 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -187,7 +187,7 @@ class C(A): def test_open(testfn): - # SSLContext.load_dh_params uses _Py_fopen_obj rather than normal open() + # SSLContext.load_dh_params uses Py_fopen() rather than normal open() try: import ssl diff --git a/Lib/test/clinic.test.c b/Lib/test/clinic.test.c index e4f146c0841188..0dfcc281985100 100644 --- a/Lib/test/clinic.test.c +++ b/Lib/test/clinic.test.c @@ -4758,7 +4758,7 @@ static PyObject * Test_cls_with_param_impl(TestObj *self, PyTypeObject *cls, int a); static PyObject * -Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test_cls_with_param(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -4798,7 +4798,7 @@ Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ if (a == -1 && PyErr_Occurred()) { goto exit; } - return_value = Test_cls_with_param_impl(self, cls, a); + return_value = Test_cls_with_param_impl((TestObj *)self, cls, a); exit: return return_value; @@ -4806,7 +4806,7 @@ Test_cls_with_param(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ static PyObject * Test_cls_with_param_impl(TestObj *self, PyTypeObject *cls, int a) -/*[clinic end generated code: output=83a391eea66d08f8 input=af158077bd237ef9]*/ +/*[clinic end generated code: output=7e893134a81fef92 input=af158077bd237ef9]*/ /*[clinic input] @@ -4908,18 +4908,18 @@ static PyObject * Test_cls_no_params_impl(TestObj *self, PyTypeObject *cls); static PyObject * -Test_cls_no_params(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test_cls_no_params(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "cls_no_params() takes no arguments"); return NULL; } - return Test_cls_no_params_impl(self, cls); + return Test_cls_no_params_impl((TestObj *)self, cls); } static PyObject * Test_cls_no_params_impl(TestObj *self, PyTypeObject *cls) -/*[clinic end generated code: output=4d68b4652c144af3 input=e7e2e4e344e96a11]*/ +/*[clinic end generated code: output=8845de054449f40a input=e7e2e4e344e96a11]*/ /*[clinic input] @@ -4945,7 +4945,7 @@ Test_metho_not_default_return_converter(TestObj *self, PyObject *a) PyObject *return_value = NULL; int _return_value; - _return_value = Test_metho_not_default_return_converter_impl(self, a); + _return_value = Test_metho_not_default_return_converter_impl((TestObj *)self, a); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -4957,7 +4957,7 @@ Test_metho_not_default_return_converter(TestObj *self, PyObject *a) static int Test_metho_not_default_return_converter_impl(TestObj *self, PyObject *a) -/*[clinic end generated code: output=3350de11bd538007 input=428657129b521177]*/ +/*[clinic end generated code: output=b2cce75a7af2e6ce input=428657129b521177]*/ /*[clinic input] @@ -4983,7 +4983,7 @@ static PyObject * Test_an_metho_arg_named_arg_impl(TestObj *self, int arg); static PyObject * -Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) +Test_an_metho_arg_named_arg(PyObject *self, PyObject *arg_) { PyObject *return_value = NULL; int arg; @@ -4992,7 +4992,7 @@ Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) if (arg == -1 && PyErr_Occurred()) { goto exit; } - return_value = Test_an_metho_arg_named_arg_impl(self, arg); + return_value = Test_an_metho_arg_named_arg_impl((TestObj *)self, arg); exit: return return_value; @@ -5000,7 +5000,7 @@ Test_an_metho_arg_named_arg(TestObj *self, PyObject *arg_) static PyObject * Test_an_metho_arg_named_arg_impl(TestObj *self, int arg) -/*[clinic end generated code: output=9f04de4a62287e28 input=2a53a57cf5624f95]*/ +/*[clinic end generated code: output=38554f09950d07e7 input=2a53a57cf5624f95]*/ /*[clinic input] @@ -5289,14 +5289,14 @@ static PyObject * Test_meth_coexist_impl(TestObj *self); static PyObject * -Test_meth_coexist(TestObj *self, PyObject *Py_UNUSED(ignored)) +Test_meth_coexist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return Test_meth_coexist_impl(self); + return Test_meth_coexist_impl((TestObj *)self); } static PyObject * Test_meth_coexist_impl(TestObj *self) -/*[clinic end generated code: output=808a293d0cd27439 input=2a1d75b5e6fec6dd]*/ +/*[clinic end generated code: output=7edf4e95b29f06fa input=2a1d75b5e6fec6dd]*/ /*[clinic input] @getter @@ -5317,14 +5317,14 @@ static PyObject * Test_property_get_impl(TestObj *self); static PyObject * -Test_property_get(TestObj *self, void *Py_UNUSED(context)) +Test_property_get(PyObject *self, void *Py_UNUSED(context)) { - return Test_property_get_impl(self); + return Test_property_get_impl((TestObj *)self); } static PyObject * Test_property_get_impl(TestObj *self) -/*[clinic end generated code: output=7cadd0f539805266 input=2d92b3449fbc7d2b]*/ +/*[clinic end generated code: output=b38d68abd3466a6e input=2d92b3449fbc7d2b]*/ /*[clinic input] @setter @@ -5345,18 +5345,18 @@ static int Test_property_set_impl(TestObj *self, PyObject *value); static int -Test_property_set(TestObj *self, PyObject *value, void *Py_UNUSED(context)) +Test_property_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; - return_value = Test_property_set_impl(self, value); + return_value = Test_property_set_impl((TestObj *)self, value); return return_value; } static int Test_property_set_impl(TestObj *self, PyObject *value) -/*[clinic end generated code: output=e4342fe9bb1d7817 input=3bc3f46a23c83a88]*/ +/*[clinic end generated code: output=49f925ab2a33b637 input=3bc3f46a23c83a88]*/ /*[clinic input] @setter @@ -5377,18 +5377,18 @@ static int Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value); static int -Test_setter_first_with_docstr_set(TestObj *self, PyObject *value, void *Py_UNUSED(context)) +Test_setter_first_with_docstr_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; - return_value = Test_setter_first_with_docstr_set_impl(self, value); + return_value = Test_setter_first_with_docstr_set_impl((TestObj *)self, value); return return_value; } static int Test_setter_first_with_docstr_set_impl(TestObj *self, PyObject *value) -/*[clinic end generated code: output=e4d76b558a4061db input=31a045ce11bbe961]*/ +/*[clinic end generated code: output=5aaf44373c0af545 input=31a045ce11bbe961]*/ /*[clinic input] @getter @@ -5418,14 +5418,14 @@ static PyObject * Test_setter_first_with_docstr_get_impl(TestObj *self); static PyObject * -Test_setter_first_with_docstr_get(TestObj *self, void *Py_UNUSED(context)) +Test_setter_first_with_docstr_get(PyObject *self, void *Py_UNUSED(context)) { - return Test_setter_first_with_docstr_get_impl(self); + return Test_setter_first_with_docstr_get_impl((TestObj *)self); } static PyObject * Test_setter_first_with_docstr_get_impl(TestObj *self) -/*[clinic end generated code: output=749a30266f9fb443 input=10af4e43b3cb34dc]*/ +/*[clinic end generated code: output=fe6e3aa844a24920 input=10af4e43b3cb34dc]*/ /*[clinic input] output push @@ -5708,7 +5708,7 @@ Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, Py_ssize_t key_length); static PyObject * -Test__pyarg_parsestackandkeywords(TestObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Test__pyarg_parsestackandkeywords(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -5731,7 +5731,7 @@ Test__pyarg_parsestackandkeywords(TestObj *self, PyTypeObject *cls, PyObject *co &key, &key_length)) { goto exit; } - return_value = Test__pyarg_parsestackandkeywords_impl(self, cls, key, key_length); + return_value = Test__pyarg_parsestackandkeywords_impl((TestObj *)self, cls, key, key_length); exit: return return_value; @@ -5741,7 +5741,7 @@ static PyObject * Test__pyarg_parsestackandkeywords_impl(TestObj *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length) -/*[clinic end generated code: output=4fda8a7f2547137c input=fc72ef4b4cfafabc]*/ +/*[clinic end generated code: output=7060c213d7b8200e input=fc72ef4b4cfafabc]*/ /*[clinic input] diff --git a/Lib/test/libregrtest/single.py b/Lib/test/libregrtest/single.py index 0e174f82abed28..54df688bbc470e 100644 --- a/Lib/test/libregrtest/single.py +++ b/Lib/test/libregrtest/single.py @@ -162,8 +162,8 @@ def test_func(): def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, display_failure: bool = True) -> None: # Handle exceptions, detect environment changes. - ansi = get_colors() - red, reset, yellow = ansi.RED, ansi.RESET, ansi.YELLOW + stdout = get_colors(file=sys.stdout) + stderr = get_colors(file=sys.stderr) # Reset the environment_altered flag to detect if a test altered # the environment @@ -184,18 +184,24 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, _load_run_test(result, runtests) except support.ResourceDenied as exc: if not quiet and not pgo: - print(f"{yellow}{test_name} skipped -- {exc}{reset}", flush=True) + print( + f"{stdout.YELLOW}{test_name} skipped -- {exc}{stdout.RESET}", + flush=True, + ) result.state = State.RESOURCE_DENIED return except unittest.SkipTest as exc: if not quiet and not pgo: - print(f"{yellow}{test_name} skipped -- {exc}{reset}", flush=True) + print( + f"{stdout.YELLOW}{test_name} skipped -- {exc}{stdout.RESET}", + flush=True, + ) result.state = State.SKIPPED return except support.TestFailedWithDetails as exc: - msg = f"{red}test {test_name} failed{reset}" + msg = f"{stderr.RED}test {test_name} failed{stderr.RESET}" if display_failure: - msg = f"{red}{msg} -- {exc}{reset}" + msg = f"{stderr.RED}{msg} -- {exc}{stderr.RESET}" print(msg, file=sys.stderr, flush=True) result.state = State.FAILED result.errors = exc.errors @@ -203,9 +209,9 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, result.stats = exc.stats return except support.TestFailed as exc: - msg = f"{red}test {test_name} failed{reset}" + msg = f"{stderr.RED}test {test_name} failed{stderr.RESET}" if display_failure: - msg = f"{red}{msg} -- {exc}{reset}" + msg = f"{stderr.RED}{msg} -- {exc}{stderr.RESET}" print(msg, file=sys.stderr, flush=True) result.state = State.FAILED result.stats = exc.stats @@ -220,8 +226,11 @@ def _runtest_env_changed_exc(result: TestResult, runtests: RunTests, except: if not pgo: msg = traceback.format_exc() - print(f"{red}test {test_name} crashed -- {msg}{reset}", - file=sys.stderr, flush=True) + print( + f"{stderr.RED}test {test_name} crashed -- {msg}{stderr.RESET}", + file=sys.stderr, + flush=True, + ) result.state = State.UNCAUGHT_EXC return @@ -303,7 +312,7 @@ def run_single_test(test_name: TestName, runtests: RunTests) -> TestResult: If runtests.use_junit, xml_data is a list containing each generated testsuite element. """ - ansi = get_colors() + ansi = get_colors(file=sys.stderr) red, reset, yellow = ansi.BOLD_RED, ansi.RESET, ansi.YELLOW start_time = time.perf_counter() diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index ee9520a8838625..89f2a6b916bfc2 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -61,6 +61,7 @@ "without_optimizer", "force_not_colorized", "force_not_colorized_test_class", + "make_clean_env", "BrokenIter", "in_systemd_nspawn_sync_suppressed", "run_no_yield_async_fn", "run_yielding_async_fn", "async_yield", @@ -505,10 +506,10 @@ def requires_lzma(reason='requires lzma'): def has_no_debug_ranges(): try: - import _testinternalcapi + import _testcapi except ImportError: raise unittest.SkipTest("_testinternalcapi required") - config = _testinternalcapi.get_config() + return not _testcapi.config_get('code_debug_ranges') return not bool(config['code_debug_ranges']) def requires_debug_ranges(reason='requires co_positions / debug_ranges'): @@ -2839,7 +2840,7 @@ def no_color(): from .os_helper import EnvironmentVarGuard with ( - swap_attr(_colorize, "can_colorize", lambda: False), + swap_attr(_colorize, "can_colorize", lambda file=None: False), EnvironmentVarGuard() as env, ): for var in {"FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS"}: @@ -2871,6 +2872,16 @@ def new_setUpClass(cls): return cls +def make_clean_env() -> dict[str, str]: + clean_env = os.environ.copy() + for k in clean_env.copy(): + if k.startswith("PYTHON"): + clean_env.pop(k) + clean_env.pop("FORCE_COLOR", None) + clean_env.pop("NO_COLOR", None) + return clean_env + + def initialized_with_pyrepl(): """Detect whether PyREPL was used during Python initialization.""" # If the main module has a __file__ attribute it's a Python module, which means PyREPL. diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py index 8071c248b9b67e..15dcdc9b1fddfb 100644 --- a/Lib/test/support/os_helper.py +++ b/Lib/test/support/os_helper.py @@ -294,6 +294,33 @@ def skip_unless_working_chmod(test): return test if ok else unittest.skip(msg)(test) +@contextlib.contextmanager +def save_mode(path, *, quiet=False): + """Context manager that restores the mode (permissions) of *path* on exit. + + Arguments: + + path: Path of the file to restore the mode of. + + quiet: if False (the default), the context manager raises an exception + on error. Otherwise, it issues only a warning and keeps the current + working directory the same. + + """ + saved_mode = os.stat(path) + try: + yield + finally: + try: + os.chmod(path, saved_mode.st_mode) + except OSError as exc: + if not quiet: + raise + warnings.warn(f'tests may fail, unable to restore the mode of ' + f'{path!r} to {saved_mode.st_mode}: {exc}', + RuntimeWarning, stacklevel=3) + + # Check whether the current effective user has the capability to override # DAC (discretionary access control). Typically user root is able to # bypass file read, write, and execute permission checks. The capability diff --git a/Lib/test/test__colorize.py b/Lib/test/test__colorize.py index 1871775fa205a2..ff15d83cb078fe 100644 --- a/Lib/test/test__colorize.py +++ b/Lib/test/test__colorize.py @@ -1,68 +1,132 @@ import contextlib +import io import sys import unittest import unittest.mock import _colorize -from test.support import force_not_colorized +from test.support.os_helper import EnvironmentVarGuard -ORIGINAL_CAN_COLORIZE = _colorize.can_colorize +@contextlib.contextmanager +def clear_env(): + with EnvironmentVarGuard() as mock_env: + for var in "FORCE_COLOR", "NO_COLOR", "PYTHON_COLORS": + mock_env.unset(var) + yield mock_env -def setUpModule(): - _colorize.can_colorize = lambda: False - -def tearDownModule(): - _colorize.can_colorize = ORIGINAL_CAN_COLORIZE +def supports_virtual_terminal(): + if sys.platform == "win32": + return unittest.mock.patch("nt._supports_virtual_terminal", return_value=True) + else: + return contextlib.nullcontext() class TestColorizeFunction(unittest.TestCase): - @force_not_colorized def test_colorized_detection_checks_for_environment_variables(self): - flags = unittest.mock.MagicMock(ignore_environment=False) + def check(env, fallback, expected): + with (self.subTest(env=env, fallback=fallback), + clear_env() as mock_env): + mock_env.update(env) + isatty_mock.return_value = fallback + stdout_mock.isatty.return_value = fallback + self.assertEqual(_colorize.can_colorize(), expected) + with (unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, + supports_virtual_terminal()): + stdout_mock.fileno.return_value = 1 + + for fallback in False, True: + check({}, fallback, fallback) + check({'TERM': 'dumb'}, fallback, False) + check({'TERM': 'xterm'}, fallback, fallback) + check({'TERM': ''}, fallback, fallback) + check({'FORCE_COLOR': '1'}, fallback, True) + check({'FORCE_COLOR': '0'}, fallback, True) + check({'NO_COLOR': '1'}, fallback, False) + check({'NO_COLOR': '0'}, fallback, False) + + check({'TERM': 'dumb', 'FORCE_COLOR': '1'}, False, True) + check({'FORCE_COLOR': '1', 'NO_COLOR': '1'}, True, False) + + for ignore_environment in False, True: + # Simulate running with or without `-E`. + flags = unittest.mock.MagicMock(ignore_environment=ignore_environment) + with unittest.mock.patch("sys.flags", flags): + check({'PYTHON_COLORS': '1'}, True, True) + check({'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'PYTHON_COLORS': '0'}, True, ignore_environment) + check({'PYTHON_COLORS': '0'}, False, False) + for fallback in False, True: + check({'PYTHON_COLORS': 'x'}, fallback, fallback) + check({'PYTHON_COLORS': ''}, fallback, fallback) + + check({'TERM': 'dumb', 'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'NO_COLOR': '1', 'PYTHON_COLORS': '1'}, False, not ignore_environment) + check({'FORCE_COLOR': '1', 'PYTHON_COLORS': '0'}, True, ignore_environment) + + @unittest.skipUnless(sys.platform == "win32", "requires Windows") + def test_colorized_detection_checks_on_windows(self): + with (clear_env(), + unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, + supports_virtual_terminal() as vt_mock): + stdout_mock.fileno.return_value = 1 + isatty_mock.return_value = True + stdout_mock.isatty.return_value = True + + vt_mock.return_value = True + self.assertEqual(_colorize.can_colorize(), True) + vt_mock.return_value = False + self.assertEqual(_colorize.can_colorize(), False) + import nt + del nt._supports_virtual_terminal + self.assertEqual(_colorize.can_colorize(), False) + + def test_colorized_detection_checks_for_std_streams(self): + with (clear_env(), + unittest.mock.patch("os.isatty") as isatty_mock, + unittest.mock.patch("sys.stdout") as stdout_mock, unittest.mock.patch("sys.stderr") as stderr_mock, - unittest.mock.patch("sys.flags", flags), - unittest.mock.patch("_colorize.can_colorize", ORIGINAL_CAN_COLORIZE), - (unittest.mock.patch("nt._supports_virtual_terminal", return_value=False) - if sys.platform == "win32" else - contextlib.nullcontext()) as vt_mock): + supports_virtual_terminal()): + stdout_mock.fileno.return_value = 1 + stderr_mock.fileno.side_effect = ZeroDivisionError + stderr_mock.isatty.side_effect = ZeroDivisionError isatty_mock.return_value = True - stderr_mock.fileno.return_value = 2 - stderr_mock.isatty.return_value = True - with unittest.mock.patch("os.environ", {'TERM': 'dumb'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", {'PYTHON_COLORS': '0'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", {'NO_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", - {'NO_COLOR': '1', "PYTHON_COLORS": '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", {'FORCE_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), True) - with unittest.mock.patch("os.environ", - {'FORCE_COLOR': '1', 'NO_COLOR': '1'}): - self.assertEqual(_colorize.can_colorize(), False) - with unittest.mock.patch("os.environ", - {'FORCE_COLOR': '1', "PYTHON_COLORS": '0'}): - self.assertEqual(_colorize.can_colorize(), False) - - with unittest.mock.patch("os.environ", {}): - if sys.platform == "win32": - self.assertEqual(_colorize.can_colorize(), False) - - vt_mock.return_value = True - self.assertEqual(_colorize.can_colorize(), True) - else: - self.assertEqual(_colorize.can_colorize(), True) + stdout_mock.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(), True) + + isatty_mock.return_value = False + stdout_mock.isatty.return_value = False + self.assertEqual(_colorize.can_colorize(), False) + def test_colorized_detection_checks_for_file(self): + with clear_env(), supports_virtual_terminal(): + + with unittest.mock.patch("os.isatty") as isatty_mock: + file = unittest.mock.MagicMock() + file.fileno.return_value = 1 + isatty_mock.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), True) isatty_mock.return_value = False - stderr_mock.isatty.return_value = False - self.assertEqual(_colorize.can_colorize(), False) + self.assertEqual(_colorize.can_colorize(file=file), False) + + # No file.fileno. + with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError): + file = unittest.mock.MagicMock(spec=['isatty']) + file.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), False) + + # file.fileno() raises io.UnsupportedOperation. + with unittest.mock.patch("os.isatty", side_effect=ZeroDivisionError): + file = unittest.mock.MagicMock() + file.fileno.side_effect = io.UnsupportedOperation + file.isatty.return_value = True + self.assertEqual(_colorize.can_colorize(file=file), True) + file.isatty.return_value = False + self.assertEqual(_colorize.can_colorize(file=file), False) if __name__ == "__main__": diff --git a/Lib/test/test__opcode.py b/Lib/test/test__opcode.py index d5cf014d40daf8..95e09500df51d0 100644 --- a/Lib/test/test__opcode.py +++ b/Lib/test/test__opcode.py @@ -38,6 +38,13 @@ def test_is_valid(self): opcodes = [dis.opmap[opname] for opname in names] self.check_bool_function_result(_opcode.is_valid, opcodes, True) + def test_opmaps(self): + def check_roundtrip(name, map): + return self.assertEqual(opcode.opname[map[name]], name) + + check_roundtrip('BINARY_OP', opcode.opmap) + check_roundtrip('BINARY_OP_ADD_INT', opcode._specialized_opmap) + def test_oplists(self): def check_function(self, func, expected): for op in [-10, 520]: diff --git a/Lib/test/test_abc.py b/Lib/test/test_abc.py index 5ce57cc209ea85..e90a8dc617c094 100644 --- a/Lib/test/test_abc.py +++ b/Lib/test/test_abc.py @@ -20,7 +20,7 @@ def test_abstractproperty_basics(self): def foo(self): pass self.assertTrue(foo.__isabstractmethod__) def bar(self): pass - self.assertFalse(hasattr(bar, "__isabstractmethod__")) + self.assertNotHasAttr(bar, "__isabstractmethod__") class C(metaclass=abc_ABCMeta): @abc.abstractproperty @@ -89,7 +89,7 @@ def test_abstractmethod_basics(self): def foo(self): pass self.assertTrue(foo.__isabstractmethod__) def bar(self): pass - self.assertFalse(hasattr(bar, "__isabstractmethod__")) + self.assertNotHasAttr(bar, "__isabstractmethod__") def test_abstractproperty_basics(self): @property @@ -276,21 +276,21 @@ class A(metaclass=abc_ABCMeta): class B(object): pass b = B() - self.assertFalse(issubclass(B, A)) - self.assertFalse(issubclass(B, (A,))) + self.assertNotIsSubclass(B, A) + self.assertNotIsSubclass(B, (A,)) self.assertNotIsInstance(b, A) self.assertNotIsInstance(b, (A,)) B1 = A.register(B) - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) self.assertIsInstance(b, A) self.assertIsInstance(b, (A,)) self.assertIs(B1, B) class C(B): pass c = C() - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) self.assertIsInstance(c, A) self.assertIsInstance(c, (A,)) @@ -301,16 +301,16 @@ class A(metaclass=abc_ABCMeta): class B(object): pass b = B() - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) self.assertIsInstance(b, A) self.assertIsInstance(b, (A,)) @A.register class C(B): pass c = C() - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) self.assertIsInstance(c, A) self.assertIsInstance(c, (A,)) self.assertIs(C, A.register(C)) @@ -321,14 +321,14 @@ class A(metaclass=abc_ABCMeta): class B: pass b = B() - self.assertFalse(isinstance(b, A)) - self.assertFalse(isinstance(b, (A,))) + self.assertNotIsInstance(b, A) + self.assertNotIsInstance(b, (A,)) token_old = abc_get_cache_token() A.register(B) token_new = abc_get_cache_token() self.assertGreater(token_new, token_old) - self.assertTrue(isinstance(b, A)) - self.assertTrue(isinstance(b, (A,))) + self.assertIsInstance(b, A) + self.assertIsInstance(b, (A,)) def test_registration_builtins(self): class A(metaclass=abc_ABCMeta): @@ -336,18 +336,18 @@ class A(metaclass=abc_ABCMeta): A.register(int) self.assertIsInstance(42, A) self.assertIsInstance(42, (A,)) - self.assertTrue(issubclass(int, A)) - self.assertTrue(issubclass(int, (A,))) + self.assertIsSubclass(int, A) + self.assertIsSubclass(int, (A,)) class B(A): pass B.register(str) class C(str): pass self.assertIsInstance("", A) self.assertIsInstance("", (A,)) - self.assertTrue(issubclass(str, A)) - self.assertTrue(issubclass(str, (A,))) - self.assertTrue(issubclass(C, A)) - self.assertTrue(issubclass(C, (A,))) + self.assertIsSubclass(str, A) + self.assertIsSubclass(str, (A,)) + self.assertIsSubclass(C, A) + self.assertIsSubclass(C, (A,)) def test_registration_edge_cases(self): class A(metaclass=abc_ABCMeta): @@ -375,39 +375,39 @@ class A(metaclass=abc_ABCMeta): def test_registration_transitiveness(self): class A(metaclass=abc_ABCMeta): pass - self.assertTrue(issubclass(A, A)) - self.assertTrue(issubclass(A, (A,))) + self.assertIsSubclass(A, A) + self.assertIsSubclass(A, (A,)) class B(metaclass=abc_ABCMeta): pass - self.assertFalse(issubclass(A, B)) - self.assertFalse(issubclass(A, (B,))) - self.assertFalse(issubclass(B, A)) - self.assertFalse(issubclass(B, (A,))) + self.assertNotIsSubclass(A, B) + self.assertNotIsSubclass(A, (B,)) + self.assertNotIsSubclass(B, A) + self.assertNotIsSubclass(B, (A,)) class C(metaclass=abc_ABCMeta): pass A.register(B) class B1(B): pass - self.assertTrue(issubclass(B1, A)) - self.assertTrue(issubclass(B1, (A,))) + self.assertIsSubclass(B1, A) + self.assertIsSubclass(B1, (A,)) class C1(C): pass B1.register(C1) - self.assertFalse(issubclass(C, B)) - self.assertFalse(issubclass(C, (B,))) - self.assertFalse(issubclass(C, B1)) - self.assertFalse(issubclass(C, (B1,))) - self.assertTrue(issubclass(C1, A)) - self.assertTrue(issubclass(C1, (A,))) - self.assertTrue(issubclass(C1, B)) - self.assertTrue(issubclass(C1, (B,))) - self.assertTrue(issubclass(C1, B1)) - self.assertTrue(issubclass(C1, (B1,))) + self.assertNotIsSubclass(C, B) + self.assertNotIsSubclass(C, (B,)) + self.assertNotIsSubclass(C, B1) + self.assertNotIsSubclass(C, (B1,)) + self.assertIsSubclass(C1, A) + self.assertIsSubclass(C1, (A,)) + self.assertIsSubclass(C1, B) + self.assertIsSubclass(C1, (B,)) + self.assertIsSubclass(C1, B1) + self.assertIsSubclass(C1, (B1,)) C1.register(int) class MyInt(int): pass - self.assertTrue(issubclass(MyInt, A)) - self.assertTrue(issubclass(MyInt, (A,))) + self.assertIsSubclass(MyInt, A) + self.assertIsSubclass(MyInt, (A,)) self.assertIsInstance(42, A) self.assertIsInstance(42, (A,)) @@ -467,16 +467,16 @@ def __subclasshook__(cls, C): if cls is A: return 'foo' in C.__dict__ return NotImplemented - self.assertFalse(issubclass(A, A)) - self.assertFalse(issubclass(A, (A,))) + self.assertNotIsSubclass(A, A) + self.assertNotIsSubclass(A, (A,)) class B: foo = 42 - self.assertTrue(issubclass(B, A)) - self.assertTrue(issubclass(B, (A,))) + self.assertIsSubclass(B, A) + self.assertIsSubclass(B, (A,)) class C: spam = 42 - self.assertFalse(issubclass(C, A)) - self.assertFalse(issubclass(C, (A,))) + self.assertNotIsSubclass(C, A) + self.assertNotIsSubclass(C, (A,)) def test_all_new_methods_are_called(self): class A(metaclass=abc_ABCMeta): @@ -493,7 +493,7 @@ class C(A, B): self.assertEqual(B.counter, 1) def test_ABC_has___slots__(self): - self.assertTrue(hasattr(abc.ABC, '__slots__')) + self.assertHasAttr(abc.ABC, '__slots__') def test_tricky_new_works(self): def with_metaclass(meta, *bases): @@ -515,7 +515,7 @@ def foo(self): del A.foo self.assertEqual(A.__abstractmethods__, {'foo'}) - self.assertFalse(hasattr(A, 'foo')) + self.assertNotHasAttr(A, 'foo') abc.update_abstractmethods(A) @@ -588,7 +588,7 @@ def updated_foo(self): A.foo = updated_foo abc.update_abstractmethods(A) A() - self.assertFalse(hasattr(A, '__abstractmethods__')) + self.assertNotHasAttr(A, '__abstractmethods__') def test_update_del_implementation(self): class A(metaclass=abc_ABCMeta): diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index f621f343eb062a..58ea89c4fac833 100755 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -1665,5 +1665,13 @@ def test_tolist(self, size): self.assertEqual(ls[:8], list(example[:8])) self.assertEqual(ls[-8:], list(example[-8:])) + def test_gh_128961(self): + a = array.array('i') + it = iter(a) + list(it) + it.__setstate__(0) + self.assertRaises(StopIteration, next, it) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 1e063c1352ecb9..8cf1f6891faf97 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -833,8 +833,8 @@ async def test(): loop.close() def test_create_named_task_with_custom_factory(self): - def task_factory(loop, coro): - return asyncio.Task(coro, loop=loop) + def task_factory(loop, coro, **kwargs): + return asyncio.Task(coro, loop=loop, **kwargs) async def test(): pass @@ -1345,7 +1345,7 @@ def getaddrinfo_task(*args, **kwds): with self.assertRaises(OSError) as cm: self.loop.run_until_complete(coro) - self.assertTrue(str(cm.exception).startswith('Multiple exceptions: ')) + self.assertStartsWith(str(cm.exception), 'Multiple exceptions: ') self.assertTrue(m_socket.socket.return_value.close.called) coro = self.loop.create_connection( diff --git a/Lib/test/test_asyncio/test_eager_task_factory.py b/Lib/test/test_asyncio/test_eager_task_factory.py index dcf9ff716ad399..10450c11b68279 100644 --- a/Lib/test/test_asyncio/test_eager_task_factory.py +++ b/Lib/test/test_asyncio/test_eager_task_factory.py @@ -302,6 +302,18 @@ async def run(): self.run_coro(run()) + def test_name(self): + name = None + async def coro(): + nonlocal name + name = asyncio.current_task().get_name() + + async def main(): + task = self.loop.create_task(coro(), name="test name") + self.assertEqual(name, "test name") + await task + + self.run_coro(coro()) class AsyncTaskCounter: def __init__(self, loop, *, task_class, eager): diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index ed75b909317357..ada049e9c7d387 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -2184,7 +2184,7 @@ def test_subprocess_stderr(self): transp.close() self.assertEqual(b'OUT:test', proto.data[1]) - self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2]) + self.assertStartsWith(proto.data[2], b'ERR:test') self.assertEqual(0, proto.returncode) @support.requires_subprocess() @@ -2206,8 +2206,7 @@ def test_subprocess_stderr_redirect_to_stdout(self): stdin.write(b'test') self.loop.run_until_complete(proto.completed) - self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'), - proto.data[1]) + self.assertStartsWith(proto.data[1], b'OUT:testERR:test') self.assertEqual(b'', proto.data[2]) transp.close() diff --git a/Lib/test/test_asyncio/test_free_threading.py b/Lib/test/test_asyncio/test_free_threading.py index 8f4bba5f3b97d9..6da398e77e7797 100644 --- a/Lib/test/test_asyncio/test_free_threading.py +++ b/Lib/test/test_asyncio/test_free_threading.py @@ -1,4 +1,5 @@ import asyncio +import threading import unittest from threading import Thread from unittest import TestCase @@ -58,6 +59,42 @@ def runner(): with threading_helper.start_threads(threads): pass + def test_all_tasks_different_thread(self) -> None: + loop = None + started = threading.Event() + done = threading.Event() # used for main task not finishing early + async def coro(): + await asyncio.Future() + + lock = threading.Lock() + tasks = set() + + async def main(): + nonlocal tasks, loop + loop = asyncio.get_running_loop() + started.set() + for i in range(1000): + with lock: + asyncio.create_task(coro()) + tasks = self.all_tasks(loop) + done.wait() + + runner = threading.Thread(target=lambda: asyncio.run(main())) + + def check(): + started.wait() + with lock: + self.assertSetEqual(tasks & self.all_tasks(loop), tasks) + + threads = [threading.Thread(target=check) for _ in range(10)] + runner.start() + + with threading_helper.start_threads(threads): + pass + + done.set() + runner.join() + def test_run_coroutine_threadsafe(self) -> None: results = [] @@ -112,8 +149,8 @@ class TestPyFreeThreading(TestFreeThreading, TestCase): all_tasks = staticmethod(asyncio.tasks._py_all_tasks) current_task = staticmethod(asyncio.tasks._py_current_task) - def factory(self, loop, coro, context=None): - return asyncio.tasks._PyTask(coro, loop=loop, context=context) + def factory(self, loop, coro, **kwargs): + return asyncio.tasks._PyTask(coro, loop=loop, **kwargs) @unittest.skipUnless(hasattr(asyncio.tasks, "_c_all_tasks"), "requires _asyncio") @@ -121,16 +158,16 @@ class TestCFreeThreading(TestFreeThreading, TestCase): all_tasks = staticmethod(getattr(asyncio.tasks, "_c_all_tasks", None)) current_task = staticmethod(getattr(asyncio.tasks, "_c_current_task", None)) - def factory(self, loop, coro, context=None): - return asyncio.tasks._CTask(coro, loop=loop, context=context) + def factory(self, loop, coro, **kwargs): + return asyncio.tasks._CTask(coro, loop=loop, **kwargs) class TestEagerPyFreeThreading(TestPyFreeThreading): - def factory(self, loop, coro, context=None): - return asyncio.tasks._PyTask(coro, loop=loop, context=context, eager_start=True) + def factory(self, loop, coro, eager_start=True, **kwargs): + return asyncio.tasks._PyTask(coro, loop=loop, **kwargs, eager_start=eager_start) @unittest.skipUnless(hasattr(asyncio.tasks, "_c_all_tasks"), "requires _asyncio") class TestEagerCFreeThreading(TestCFreeThreading, TestCase): - def factory(self, loop, coro, context=None): - return asyncio.tasks._CTask(coro, loop=loop, context=context, eager_start=True) + def factory(self, loop, coro, eager_start=True, **kwargs): + return asyncio.tasks._CTask(coro, loop=loop, **kwargs, eager_start=eager_start) diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 84b44011b9a844..01d6230e6dd9a3 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -242,7 +242,7 @@ def test_uninitialized(self): def test_future_cancel_message_getter(self): f = self._new_future(loop=self.loop) - self.assertTrue(hasattr(f, '_cancel_message')) + self.assertHasAttr(f, '_cancel_message') self.assertEqual(f._cancel_message, None) f.cancel('my message') diff --git a/Lib/test/test_asyncio/test_graph.py b/Lib/test/test_asyncio/test_graph.py new file mode 100644 index 00000000000000..fd2160d4ca3137 --- /dev/null +++ b/Lib/test/test_asyncio/test_graph.py @@ -0,0 +1,436 @@ +import asyncio +import io +import unittest + + +# To prevent a warning "test altered the execution environment" +def tearDownModule(): + asyncio._set_event_loop_policy(None) + + +def capture_test_stack(*, fut=None, depth=1): + + def walk(s): + ret = [ + (f"T<{n}>" if '-' not in (n := s.future.get_name()) else 'T') + if isinstance(s.future, asyncio.Task) else 'F' + ] + + ret.append( + [ + ( + f"s {entry.frame.f_code.co_name}" + if entry.frame.f_generator is None else + ( + f"a {entry.frame.f_generator.cr_code.co_name}" + if hasattr(entry.frame.f_generator, 'cr_code') else + f"ag {entry.frame.f_generator.ag_code.co_name}" + ) + ) for entry in s.call_stack + ] + ) + + ret.append( + sorted([ + walk(ab) for ab in s.awaited_by + ], key=lambda entry: entry[0]) + ) + + return ret + + buf = io.StringIO() + asyncio.print_call_graph(fut, file=buf, depth=depth+1) + + stack = asyncio.capture_call_graph(fut, depth=depth) + return walk(stack), buf.getvalue() + + +class CallStackTestBase: + + async def test_stack_tgroup(self): + + stack_for_c5 = None + + def c5(): + nonlocal stack_for_c5 + stack_for_c5 = capture_test_stack(depth=2) + + async def c4(): + await asyncio.sleep(0) + c5() + + async def c3(): + await c4() + + async def c2(): + await c3() + + async def c1(task): + await task + + async def main(): + async with asyncio.TaskGroup() as tg: + task = tg.create_task(c2(), name="c2_root") + tg.create_task(c1(task), name="sub_main_1") + tg.create_task(c1(task), name="sub_main_2") + + await main() + + self.assertEqual(stack_for_c5[0], [ + # task name + 'T', + # call stack + ['s c5', 'a c4', 'a c3', 'a c2'], + # awaited by + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ], + ['T', + ['a c1'], + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ] + ] + ], + ['T', + ['a c1'], + [ + ['T', + ['a _aexit', 'a __aexit__', 'a main', 'a test_stack_tgroup'], [] + ] + ] + ] + ] + ]) + + self.assertIn( + ' async CallStackTestBase.test_stack_tgroup()', + stack_for_c5[1]) + + + async def test_stack_async_gen(self): + + stack_for_gen_nested_call = None + + async def gen_nested_call(): + nonlocal stack_for_gen_nested_call + stack_for_gen_nested_call = capture_test_stack() + + async def gen(): + for num in range(2): + yield num + if num == 1: + await gen_nested_call() + + async def main(): + async for el in gen(): + pass + + await main() + + self.assertEqual(stack_for_gen_nested_call[0], [ + 'T', + [ + 's capture_test_stack', + 'a gen_nested_call', + 'ag gen', + 'a main', + 'a test_stack_async_gen' + ], + [] + ]) + + self.assertIn( + 'async generator CallStackTestBase.test_stack_async_gen..gen()', + stack_for_gen_nested_call[1]) + + async def test_stack_gather(self): + + stack_for_deep = None + + async def deep(): + await asyncio.sleep(0) + nonlocal stack_for_deep + stack_for_deep = capture_test_stack() + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(0) + + async def main(): + await asyncio.gather(c1(), c2()) + + await main() + + self.assertEqual(stack_for_deep[0], [ + 'T', + ['s capture_test_stack', 'a deep', 'a c1'], + [ + ['T', ['a main', 'a test_stack_gather'], []] + ] + ]) + + async def test_stack_shield(self): + + stack_for_shield = None + + async def deep(): + await asyncio.sleep(0) + nonlocal stack_for_shield + stack_for_shield = capture_test_stack() + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def main(): + await asyncio.shield(c1()) + + await main() + + self.assertEqual(stack_for_shield[0], [ + 'T', + ['s capture_test_stack', 'a deep', 'a c1'], + [ + ['T', ['a main', 'a test_stack_shield'], []] + ] + ]) + + async def test_stack_timeout(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + async with asyncio.timeout(1): + await asyncio.sleep(0) + await inner() + + async def main(): + await asyncio.shield(c1()) + + await main() + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [ + ['T', ['a main', 'a test_stack_timeout'], []] + ] + ]) + + async def test_stack_wait(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + async with asyncio.timeout(1): + await asyncio.sleep(0) + await inner() + + async def c2(): + for i in range(3): + await asyncio.sleep(0) + + async def main(t1, t2): + while True: + _, pending = await asyncio.wait([t1, t2]) + if not pending: + break + + t1 = asyncio.create_task(c1()) + t2 = asyncio.create_task(c2()) + try: + await main(t1, t2) + finally: + await t1 + await t2 + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [ + ['T', + ['a _wait', 'a wait', 'a main', 'a test_stack_wait'], + [] + ] + ] + ]) + + async def test_stack_task(self): + + stack_for_inner = None + + async def inner(): + await asyncio.sleep(0) + nonlocal stack_for_inner + stack_for_inner = capture_test_stack() + + async def c1(): + await inner() + + async def c2(): + await asyncio.create_task(c1(), name='there there') + + async def main(): + await c2() + + await main() + + self.assertEqual(stack_for_inner[0], [ + 'T', + ['s capture_test_stack', 'a inner', 'a c1'], + [['T', ['a c2', 'a main', 'a test_stack_task'], []]] + ]) + + async def test_stack_future(self): + + stack_for_fut = None + + async def a2(fut): + await fut + + async def a1(fut): + await a2(fut) + + async def b1(fut): + await fut + + async def main(): + nonlocal stack_for_fut + + fut = asyncio.Future() + async with asyncio.TaskGroup() as g: + g.create_task(a1(fut), name="task A") + g.create_task(b1(fut), name='task B') + + for _ in range(5): + # Do a few iterations to ensure that both a1 and b1 + # await on the future + await asyncio.sleep(0) + + stack_for_fut = capture_test_stack(fut=fut) + fut.set_result(None) + + await main() + + self.assertEqual(stack_for_fut[0], + ['F', + [], + [ + ['T', + ['a a2', 'a a1'], + [['T', ['a test_stack_future'], []]] + ], + ['T', + ['a b1'], + [['T', ['a test_stack_future'], []]] + ], + ]] + ) + + self.assertTrue(stack_for_fut[1].startswith('* Future(id=')) + + +@unittest.skipIf( + not hasattr(asyncio.futures, "_c_future_add_to_awaited_by"), + "C-accelerated asyncio call graph backend missing", +) +class TestCallStackC(CallStackTestBase, unittest.IsolatedAsyncioTestCase): + def setUp(self): + futures = asyncio.futures + tasks = asyncio.tasks + + self._Future = asyncio.Future + asyncio.Future = futures.Future = futures._CFuture + + self._Task = asyncio.Task + asyncio.Task = tasks.Task = tasks._CTask + + self._future_add_to_awaited_by = asyncio.future_add_to_awaited_by + futures.future_add_to_awaited_by = futures._c_future_add_to_awaited_by + asyncio.future_add_to_awaited_by = futures.future_add_to_awaited_by + + self._future_discard_from_awaited_by = asyncio.future_discard_from_awaited_by + futures.future_discard_from_awaited_by = futures._c_future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = futures.future_discard_from_awaited_by + + + def tearDown(self): + futures = asyncio.futures + tasks = asyncio.tasks + + futures.future_discard_from_awaited_by = self._future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = self._future_discard_from_awaited_by + del self._future_discard_from_awaited_by + + futures.future_add_to_awaited_by = self._future_add_to_awaited_by + asyncio.future_add_to_awaited_by = self._future_add_to_awaited_by + del self._future_add_to_awaited_by + + asyncio.Task = self._Task + tasks.Task = self._Task + del self._Task + + asyncio.Future = self._Future + futures.Future = self._Future + del self._Future + + +@unittest.skipIf( + not hasattr(asyncio.futures, "_py_future_add_to_awaited_by"), + "Pure Python asyncio call graph backend missing", +) +class TestCallStackPy(CallStackTestBase, unittest.IsolatedAsyncioTestCase): + def setUp(self): + futures = asyncio.futures + tasks = asyncio.tasks + + self._Future = asyncio.Future + asyncio.Future = futures.Future = futures._PyFuture + + self._Task = asyncio.Task + asyncio.Task = tasks.Task = tasks._PyTask + + self._future_add_to_awaited_by = asyncio.future_add_to_awaited_by + futures.future_add_to_awaited_by = futures._py_future_add_to_awaited_by + asyncio.future_add_to_awaited_by = futures.future_add_to_awaited_by + + self._future_discard_from_awaited_by = asyncio.future_discard_from_awaited_by + futures.future_discard_from_awaited_by = futures._py_future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = futures.future_discard_from_awaited_by + + + def tearDown(self): + futures = asyncio.futures + tasks = asyncio.tasks + + futures.future_discard_from_awaited_by = self._future_discard_from_awaited_by + asyncio.future_discard_from_awaited_by = self._future_discard_from_awaited_by + del self._future_discard_from_awaited_by + + futures.future_add_to_awaited_by = self._future_add_to_awaited_by + asyncio.future_add_to_awaited_by = self._future_add_to_awaited_by + del self._future_add_to_awaited_by + + asyncio.Task = self._Task + tasks.Task = self._Task + del self._Task + + asyncio.Future = self._Future + futures.Future = self._Future + del self._Future diff --git a/Lib/test/test_asyncio/test_locks.py b/Lib/test/test_asyncio/test_locks.py index aabfcd418829b2..3bb3e5c4ca0658 100644 --- a/Lib/test/test_asyncio/test_locks.py +++ b/Lib/test/test_asyncio/test_locks.py @@ -27,11 +27,11 @@ class LockTests(unittest.IsolatedAsyncioTestCase): async def test_repr(self): lock = asyncio.Lock() - self.assertTrue(repr(lock).endswith('[unlocked]>')) + self.assertEndsWith(repr(lock), '[unlocked]>') self.assertTrue(RGX_REPR.match(repr(lock))) await lock.acquire() - self.assertTrue(repr(lock).endswith('[locked]>')) + self.assertEndsWith(repr(lock), '[locked]>') self.assertTrue(RGX_REPR.match(repr(lock))) async def test_lock(self): @@ -286,12 +286,12 @@ class EventTests(unittest.IsolatedAsyncioTestCase): def test_repr(self): ev = asyncio.Event() - self.assertTrue(repr(ev).endswith('[unset]>')) + self.assertEndsWith(repr(ev), '[unset]>') match = RGX_REPR.match(repr(ev)) self.assertEqual(match.group('extras'), 'unset') ev.set() - self.assertTrue(repr(ev).endswith('[set]>')) + self.assertEndsWith(repr(ev), '[set]>') self.assertTrue(RGX_REPR.match(repr(ev))) ev._waiters.append(mock.Mock()) @@ -916,11 +916,11 @@ def test_initial_value_zero(self): async def test_repr(self): sem = asyncio.Semaphore() - self.assertTrue(repr(sem).endswith('[unlocked, value:1]>')) + self.assertEndsWith(repr(sem), '[unlocked, value:1]>') self.assertTrue(RGX_REPR.match(repr(sem))) await sem.acquire() - self.assertTrue(repr(sem).endswith('[locked]>')) + self.assertEndsWith(repr(sem), '[locked]>') self.assertTrue('waiters' not in repr(sem)) self.assertTrue(RGX_REPR.match(repr(sem))) diff --git a/Lib/test/test_asyncio/test_protocols.py b/Lib/test/test_asyncio/test_protocols.py index a8627b5b5b87f2..4484a031988533 100644 --- a/Lib/test/test_asyncio/test_protocols.py +++ b/Lib/test/test_asyncio/test_protocols.py @@ -19,7 +19,7 @@ def test_base_protocol(self): self.assertIsNone(p.connection_lost(f)) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_protocol(self): f = mock.Mock() @@ -30,7 +30,7 @@ def test_protocol(self): self.assertIsNone(p.eof_received()) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_buffered_protocol(self): f = mock.Mock() @@ -41,7 +41,7 @@ def test_buffered_protocol(self): self.assertIsNone(p.buffer_updated(150)) self.assertIsNone(p.pause_writing()) self.assertIsNone(p.resume_writing()) - self.assertFalse(hasattr(p, '__dict__')) + self.assertNotHasAttr(p, '__dict__') def test_datagram_protocol(self): f = mock.Mock() @@ -50,7 +50,7 @@ def test_datagram_protocol(self): self.assertIsNone(dp.connection_lost(f)) self.assertIsNone(dp.error_received(f)) self.assertIsNone(dp.datagram_received(f, f)) - self.assertFalse(hasattr(dp, '__dict__')) + self.assertNotHasAttr(dp, '__dict__') def test_subprocess_protocol(self): f = mock.Mock() @@ -60,7 +60,7 @@ def test_subprocess_protocol(self): self.assertIsNone(sp.pipe_data_received(1, f)) self.assertIsNone(sp.pipe_connection_lost(1, f)) self.assertIsNone(sp.process_exited()) - self.assertFalse(hasattr(sp, '__dict__')) + self.assertNotHasAttr(sp, '__dict__') if __name__ == '__main__': diff --git a/Lib/test/test_asyncio/test_queues.py b/Lib/test/test_asyncio/test_queues.py index 1a8d604faea1fd..090b9774c2289f 100644 --- a/Lib/test/test_asyncio/test_queues.py +++ b/Lib/test/test_asyncio/test_queues.py @@ -18,7 +18,7 @@ async def _test_repr_or_str(self, fn, expect_id): appear in fn(Queue()). """ q = asyncio.Queue() - self.assertTrue(fn(q).startswith('= len(message) - case UnicodeDecodeError(): - # The case "end - start >= len(message)" does not crash. - return end < start - case UnicodeTranslateError(): - # Test "end <= start" because PyCodec_ReplaceErrors checks - # the Unicode kind of a 0-length string which by convention - # is PyUnicode_1BYTE_KIND and not PyUnicode_2BYTE_KIND as - # the handler currently expects. - return end <= start or (end - start) >= len(message) - return False - if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_capi/test_import.py b/Lib/test/test_capi/test_import.py new file mode 100644 index 00000000000000..94f96728d9174b --- /dev/null +++ b/Lib/test/test_capi/test_import.py @@ -0,0 +1,327 @@ +import importlib.util +import os.path +import sys +import types +import unittest +from test.support import os_helper +from test.support import import_helper +from test.support.warnings_helper import check_warnings + +_testlimitedcapi = import_helper.import_module('_testlimitedcapi') +NULL = None + + +class ImportTests(unittest.TestCase): + def test_getmagicnumber(self): + # Test PyImport_GetMagicNumber() + magic = _testlimitedcapi.PyImport_GetMagicNumber() + self.assertEqual(magic, + int.from_bytes(importlib.util.MAGIC_NUMBER, 'little')) + + def test_getmagictag(self): + # Test PyImport_GetMagicTag() + tag = _testlimitedcapi.PyImport_GetMagicTag() + self.assertEqual(tag, sys.implementation.cache_tag) + + def test_getmoduledict(self): + # Test PyImport_GetModuleDict() + modules = _testlimitedcapi.PyImport_GetModuleDict() + self.assertIs(modules, sys.modules) + + def check_import_loaded_module(self, import_module): + for name in ('os', 'sys', 'test', 'unittest'): + with self.subTest(name=name): + self.assertIn(name, sys.modules) + old_module = sys.modules[name] + module = import_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertIs(module, old_module) + + def check_import_fresh_module(self, import_module): + old_modules = dict(sys.modules) + try: + for name in ('colorsys', 'math'): + with self.subTest(name=name): + sys.modules.pop(name, None) + module = import_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertIs(module, sys.modules[name]) + self.assertEqual(module.__name__, name) + finally: + sys.modules.clear() + sys.modules.update(old_modules) + + def test_getmodule(self): + # Test PyImport_GetModule() + getmodule = _testlimitedcapi.PyImport_GetModule + self.check_import_loaded_module(getmodule) + + nonexistent = 'nonexistent' + self.assertNotIn(nonexistent, sys.modules) + self.assertIs(getmodule(nonexistent), KeyError) + self.assertIs(getmodule(''), KeyError) + self.assertIs(getmodule(object()), KeyError) + + self.assertRaises(TypeError, getmodule, []) # unhashable + # CRASHES getmodule(NULL) + + def check_addmodule(self, add_module, accept_nonstr=False): + # create a new module + names = ['nonexistent'] + if accept_nonstr: + names.append(b'\xff') # non-UTF-8 + # PyImport_AddModuleObject() accepts non-string names + names.append(tuple(['hashable non-string'])) + for name in names: + with self.subTest(name=name): + self.assertNotIn(name, sys.modules) + try: + module = add_module(name) + self.assertIsInstance(module, types.ModuleType) + self.assertEqual(module.__name__, name) + self.assertIs(module, sys.modules[name]) + finally: + sys.modules.pop(name, None) + + # get an existing module + self.check_import_loaded_module(add_module) + + def test_addmoduleobject(self): + # Test PyImport_AddModuleObject() + addmoduleobject = _testlimitedcapi.PyImport_AddModuleObject + self.check_addmodule(addmoduleobject, accept_nonstr=True) + + self.assertRaises(TypeError, addmoduleobject, []) # unhashable + # CRASHES addmoduleobject(NULL) + + def test_addmodule(self): + # Test PyImport_AddModule() + addmodule = _testlimitedcapi.PyImport_AddModule + self.check_addmodule(addmodule) + + self.assertRaises(UnicodeDecodeError, addmodule, b'\xff') + # CRASHES addmodule(NULL) + + def test_addmoduleref(self): + # Test PyImport_AddModuleRef() + addmoduleref = _testlimitedcapi.PyImport_AddModuleRef + self.check_addmodule(addmoduleref) + + self.assertRaises(UnicodeDecodeError, addmoduleref, b'\xff') + # CRASHES addmoduleref(NULL) + + def check_import_func(self, import_module): + self.check_import_loaded_module(import_module) + self.check_import_fresh_module(import_module) + self.assertRaises(ModuleNotFoundError, import_module, 'nonexistent') + self.assertRaises(ValueError, import_module, '') + + def test_import(self): + # Test PyImport_Import() + import_ = _testlimitedcapi.PyImport_Import + self.check_import_func(import_) + + self.assertRaises(TypeError, import_, b'os') + self.assertRaises(SystemError, import_, NULL) + + def test_importmodule(self): + # Test PyImport_ImportModule() + importmodule = _testlimitedcapi.PyImport_ImportModule + self.check_import_func(importmodule) + + self.assertRaises(UnicodeDecodeError, importmodule, b'\xff') + # CRASHES importmodule(NULL) + + def test_importmodulenoblock(self): + # Test deprecated PyImport_ImportModuleNoBlock() + importmodulenoblock = _testlimitedcapi.PyImport_ImportModuleNoBlock + with check_warnings(('', DeprecationWarning)): + self.check_import_func(importmodulenoblock) + self.assertRaises(UnicodeDecodeError, importmodulenoblock, b'\xff') + + # CRASHES importmodulenoblock(NULL) + + def check_frozen_import(self, import_frozen_module): + # Importing a frozen module executes its code, so start by unloading + # the module to execute the code in a new (temporary) module. + old_zipimport = sys.modules.pop('zipimport') + try: + self.assertEqual(import_frozen_module('zipimport'), 1) + + # import zipimport again + self.assertEqual(import_frozen_module('zipimport'), 1) + finally: + sys.modules['zipimport'] = old_zipimport + + # not a frozen module + self.assertEqual(import_frozen_module('sys'), 0) + self.assertEqual(import_frozen_module('nonexistent'), 0) + self.assertEqual(import_frozen_module(''), 0) + + def test_importfrozenmodule(self): + # Test PyImport_ImportFrozenModule() + importfrozenmodule = _testlimitedcapi.PyImport_ImportFrozenModule + self.check_frozen_import(importfrozenmodule) + + self.assertRaises(UnicodeDecodeError, importfrozenmodule, b'\xff') + # CRASHES importfrozenmodule(NULL) + + def test_importfrozenmoduleobject(self): + # Test PyImport_ImportFrozenModuleObject() + importfrozenmoduleobject = _testlimitedcapi.PyImport_ImportFrozenModuleObject + self.check_frozen_import(importfrozenmoduleobject) + self.assertEqual(importfrozenmoduleobject(b'zipimport'), 0) + self.assertEqual(importfrozenmoduleobject(NULL), 0) + + def test_importmoduleex(self): + # Test PyImport_ImportModuleEx() + importmoduleex = _testlimitedcapi.PyImport_ImportModuleEx + self.check_import_func(lambda name: importmoduleex(name, NULL, NULL, NULL)) + + self.assertRaises(ModuleNotFoundError, importmoduleex, 'nonexistent', NULL, NULL, NULL) + self.assertRaises(ValueError, importmoduleex, '', NULL, NULL, NULL) + self.assertRaises(UnicodeDecodeError, importmoduleex, b'\xff', NULL, NULL, NULL) + # CRASHES importmoduleex(NULL, NULL, NULL, NULL) + + def check_importmodulelevel(self, importmodulelevel): + self.check_import_func(lambda name: importmodulelevel(name, NULL, NULL, NULL, 0)) + + self.assertRaises(ModuleNotFoundError, importmodulelevel, 'nonexistent', NULL, NULL, NULL, 0) + self.assertRaises(ValueError, importmodulelevel, '', NULL, NULL, NULL, 0) + + if __package__: + self.assertIs(importmodulelevel('test_import', globals(), NULL, NULL, 1), + sys.modules['test.test_capi.test_import']) + self.assertIs(importmodulelevel('test_capi', globals(), NULL, NULL, 2), + sys.modules['test.test_capi']) + self.assertRaises(ValueError, importmodulelevel, 'os', NULL, NULL, NULL, -1) + with self.assertWarns(ImportWarning): + self.assertRaises(KeyError, importmodulelevel, 'test_import', {}, NULL, NULL, 1) + self.assertRaises(TypeError, importmodulelevel, 'test_import', [], NULL, NULL, 1) + + def test_importmodulelevel(self): + # Test PyImport_ImportModuleLevel() + importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevel + self.check_importmodulelevel(importmodulelevel) + + self.assertRaises(UnicodeDecodeError, importmodulelevel, b'\xff', NULL, NULL, NULL, 0) + # CRASHES importmodulelevel(NULL, NULL, NULL, NULL, 0) + + def test_importmodulelevelobject(self): + # Test PyImport_ImportModuleLevelObject() + importmodulelevel = _testlimitedcapi.PyImport_ImportModuleLevelObject + self.check_importmodulelevel(importmodulelevel) + + self.assertRaises(TypeError, importmodulelevel, b'os', NULL, NULL, NULL, 0) + self.assertRaises(ValueError, importmodulelevel, NULL, NULL, NULL, NULL, 0) + + def check_executecodemodule(self, execute_code, *args): + name = 'test_import_executecode' + try: + # Create a temporary module where the code will be executed + self.assertNotIn(name, sys.modules) + module = _testlimitedcapi.PyImport_AddModuleRef(name) + self.assertNotHasAttr(module, 'attr') + + # Execute the code + code = compile('attr = 1', '', 'exec') + module2 = execute_code(name, code, *args) + self.assertIs(module2, module) + + # Check the function side effects + self.assertEqual(module.attr, 1) + finally: + sys.modules.pop(name, None) + return module.__spec__.origin + + def test_executecodemodule(self): + # Test PyImport_ExecCodeModule() + execcodemodule = _testlimitedcapi.PyImport_ExecCodeModule + self.check_executecodemodule(execcodemodule) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execcodemodule, b'\xff', code) + # CRASHES execcodemodule(NULL, code) + # CRASHES execcodemodule(name, NULL) + + def test_executecodemoduleex(self): + # Test PyImport_ExecCodeModuleEx() + execcodemoduleex = _testlimitedcapi.PyImport_ExecCodeModuleEx + + # Test NULL path (it should not crash) + self.check_executecodemodule(execcodemoduleex, NULL) + + # Test non-NULL path + pathname = b'pathname' + origin = self.check_executecodemodule(execcodemoduleex, pathname) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + pathname = os_helper.TESTFN_UNDECODABLE + if pathname: + origin = self.check_executecodemodule(execcodemoduleex, pathname) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execcodemoduleex, b'\xff', code, NULL) + # CRASHES execcodemoduleex(NULL, code, NULL) + # CRASHES execcodemoduleex(name, NULL, NULL) + + def check_executecode_pathnames(self, execute_code_func, object=False): + # Test non-NULL pathname and NULL cpathname + + # Test NULL paths (it should not crash) + self.check_executecodemodule(execute_code_func, NULL, NULL) + + pathname = 'pathname' + origin = self.check_executecodemodule(execute_code_func, pathname, NULL) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + origin = self.check_executecodemodule(execute_code_func, NULL, pathname) + if not object: + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + + pathname = os_helper.TESTFN_UNDECODABLE + if pathname: + if object: + pathname = os.fsdecode(pathname) + origin = self.check_executecodemodule(execute_code_func, pathname, NULL) + self.assertEqual(origin, os.path.abspath(os.fsdecode(pathname))) + self.check_executecodemodule(execute_code_func, NULL, pathname) + + # Test NULL pathname and non-NULL cpathname + pyc_filename = importlib.util.cache_from_source(__file__) + py_filename = importlib.util.source_from_cache(pyc_filename) + origin = self.check_executecodemodule(execute_code_func, NULL, pyc_filename) + if not object: + self.assertEqual(origin, py_filename) + + def test_executecodemodulewithpathnames(self): + # Test PyImport_ExecCodeModuleWithPathnames() + execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleWithPathnames + self.check_executecode_pathnames(execute_code_func) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(UnicodeDecodeError, execute_code_func, b'\xff', code, NULL, NULL) + # CRASHES execute_code_func(NULL, code, NULL, NULL) + # CRASHES execute_code_func(name, NULL, NULL, NULL) + + def test_executecodemoduleobject(self): + # Test PyImport_ExecCodeModuleObject() + execute_code_func = _testlimitedcapi.PyImport_ExecCodeModuleObject + self.check_executecode_pathnames(execute_code_func, object=True) + + code = compile('attr = 1', '', 'exec') + self.assertRaises(TypeError, execute_code_func, [], code, NULL, NULL) + nonstring = tuple(['hashable non-string']) + self.assertRaises(AttributeError, execute_code_func, nonstring, code, NULL, NULL) + sys.modules.pop(nonstring, None) + # CRASHES execute_code_func(NULL, code, NULL, NULL) + # CRASHES execute_code_func(name, NULL, NULL, NULL) + + # TODO: test PyImport_GetImporter() + # TODO: test PyImport_ReloadModule() + # TODO: test PyImport_ExtendInittab() + # PyImport_AppendInittab() is tested by test_embed + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_capi/test_misc.py b/Lib/test/test_capi/test_misc.py index 99d9a757759dcd..114e7cdfd0cd9c 100644 --- a/Lib/test/test_capi/test_misc.py +++ b/Lib/test/test_capi/test_misc.py @@ -75,8 +75,11 @@ class InstanceMethod: id = _testcapi.instancemethod(id) testfunction = _testcapi.instancemethod(testfunction) + CURRENT_THREAD_REGEX = r'Current thread.*:\n' if not support.Py_GIL_DISABLED else r'Stack .*:\n' + +@support.force_not_colorized_test_class class CAPITest(unittest.TestCase): def test_instancemethod(self): @@ -116,8 +119,7 @@ def test_no_FatalError_infinite_loop(self): "after Python initialization and before Python finalization, " "but it was called without an active thread state. " "Are you trying to call the C API inside of a Py_BEGIN_ALLOW_THREADS block?").encode() - self.assertTrue(err.rstrip().startswith(msg), - err) + self.assertStartsWith(err.rstrip(), msg) def test_memoryview_from_NULL_pointer(self): self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer) @@ -720,7 +722,7 @@ def test_heaptype_with_setattro(self): def test_heaptype_with_custom_metaclass(self): metaclass = _testcapi.HeapCTypeMetaclass - self.assertTrue(issubclass(metaclass, type)) + self.assertIsSubclass(metaclass, type) # Class creation from C t = _testcapi.pytype_fromspec_meta(metaclass) @@ -736,7 +738,7 @@ def test_heaptype_with_custom_metaclass(self): def test_heaptype_with_custom_metaclass_null_new(self): metaclass = _testcapi.HeapCTypeMetaclassNullNew - self.assertTrue(issubclass(metaclass, type)) + self.assertIsSubclass(metaclass, type) # Class creation from C t = _testcapi.pytype_fromspec_meta(metaclass) @@ -751,7 +753,7 @@ def test_heaptype_with_custom_metaclass_null_new(self): def test_heaptype_with_custom_metaclass_custom_new(self): metaclass = _testcapi.HeapCTypeMetaclassCustomNew - self.assertTrue(issubclass(_testcapi.HeapCTypeMetaclassCustomNew, type)) + self.assertIsSubclass(_testcapi.HeapCTypeMetaclassCustomNew, type) msg = "Metaclasses with custom tp_new are not supported." with self.assertRaisesRegex(TypeError, msg): @@ -910,8 +912,7 @@ def test_export_symbols(self): names.append('Py_FrozenMain') for name in names: - with self.subTest(name=name): - self.assertTrue(hasattr(ctypes.pythonapi, name)) + self.assertHasAttr(ctypes.pythonapi, name) def test_clear_managed_dict(self): @@ -1503,7 +1504,8 @@ def inner(arg5, arg6): self.assertIsInstance(closure, tuple) self.assertEqual(len(closure), 1) self.assertEqual(len(closure), len(func.__code__.co_freevars)) - self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) + for cell in closure: + self.assertIsInstance(cell, CellType) self.assertTrue(closure[0].cell_contents, 5) func = with_two_levels(1, 2)(3, 4) @@ -1512,7 +1514,8 @@ def inner(arg5, arg6): self.assertIsInstance(closure, tuple) self.assertEqual(len(closure), 4) self.assertEqual(len(closure), len(func.__code__.co_freevars)) - self.assertTrue(all(isinstance(cell, CellType) for cell in closure)) + for cell in closure: + self.assertIsInstance(cell, CellType) self.assertEqual([cell.cell_contents for cell in closure], [1, 2, 3, 4]) @@ -2141,28 +2144,27 @@ async def foo(arg): return await arg # Py 3.5 self.assertEqual(ret, 0) self.assertEqual(pickle.load(f), {'a': '123x', 'b': '123'}) + # _testcapi cannot be imported in a subinterpreter on a Free Threaded build + @support.requires_gil_enabled() def test_py_config_isoloated_per_interpreter(self): # A config change in one interpreter must not leak to out to others. # # This test could verify ANY config value, it just happens to have been # written around the time of int_max_str_digits. Refactoring is okay. code = """if 1: - import sys, _testinternalcapi + import sys, _testcapi # Any config value would do, this happens to be the one being # double checked at the time this test was written. - config = _testinternalcapi.get_config() - config['int_max_str_digits'] = 55555 - config['parse_argv'] = 0 - _testinternalcapi.set_config(config) - sub_value = _testinternalcapi.get_config()['int_max_str_digits'] + _testcapi.config_set('int_max_str_digits', 55555) + sub_value = _testcapi.config_get('int_max_str_digits') assert sub_value == 55555, sub_value """ - before_config = _testinternalcapi.get_config() - assert before_config['int_max_str_digits'] != 55555 + before_config = _testcapi.config_get('int_max_str_digits') + assert before_config != 55555 self.assertEqual(support.run_in_subinterp(code), 0, 'subinterp code failure, check stderr.') - after_config = _testinternalcapi.get_config() + after_config = _testcapi.config_get('int_max_str_digits') self.assertIsNot( before_config, after_config, "Expected get_config() to return a new dict on each call") @@ -2365,7 +2367,7 @@ def test_mutate_exception(self): support.run_in_subinterp("import binascii; binascii.Error.foobar = 'foobar'") - self.assertFalse(hasattr(binascii.Error, "foobar")) + self.assertNotHasAttr(binascii.Error, "foobar") @unittest.skipIf(_testmultiphase is None, "test requires _testmultiphase module") # gh-117649: The free-threaded build does not currently support sharing diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index 12542d8b7fa62e..9cfc6c142da6cd 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -35,90 +35,6 @@ def clear_executors(func): func.__code__ = func.__code__.replace() -@requires_specialization -@unittest.skipIf(Py_GIL_DISABLED, "optimizer not yet supported in free-threaded builds") -@unittest.skipUnless(hasattr(_testinternalcapi, "get_optimizer"), - "Requires optimizer infrastructure") -class TestOptimizerAPI(unittest.TestCase): - - def test_new_counter_optimizer_dealloc(self): - # See gh-108727 - def f(): - _testinternalcapi.new_counter_optimizer() - - f() - - def test_get_set_optimizer(self): - old = _testinternalcapi.get_optimizer() - opt = _testinternalcapi.new_counter_optimizer() - try: - _testinternalcapi.set_optimizer(opt) - self.assertEqual(_testinternalcapi.get_optimizer(), opt) - _testinternalcapi.set_optimizer(None) - self.assertEqual(_testinternalcapi.get_optimizer(), None) - finally: - _testinternalcapi.set_optimizer(old) - - - def test_counter_optimizer(self): - # Generate a new function at each call - ns = {} - exec(textwrap.dedent(f""" - def loop(): - for _ in range({TIER2_THRESHOLD + 1000}): - pass - """), ns, ns) - loop = ns['loop'] - - for repeat in range(5): - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - self.assertEqual(opt.get_count(), 0) - with clear_executors(loop): - loop() - self.assertEqual(opt.get_count(), 1001) - - def test_long_loop(self): - "Check that we aren't confused by EXTENDED_ARG" - - # Generate a new function at each call - ns = {} - exec(textwrap.dedent(f""" - def nop(): - pass - - def long_loop(): - for _ in range({TIER2_THRESHOLD + 20}): - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - nop(); nop(); nop(); nop(); nop(); nop(); nop(); nop(); - """), ns, ns) - long_loop = ns['long_loop'] - - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - self.assertEqual(opt.get_count(), 0) - long_loop() - self.assertEqual(opt.get_count(), 21) # Need iterations to warm up - - def test_code_restore_for_ENTER_EXECUTOR(self): - def testfunc(x): - i = 0 - while i < x: - i += 1 - - opt = _testinternalcapi.new_counter_optimizer() - with temporary_optimizer(opt): - testfunc(1000) - code, replace_code = testfunc.__code__, testfunc.__code__.replace() - self.assertEqual(code, replace_code) - self.assertEqual(hash(code), hash(replace_code)) - - def get_first_executor(func): code = func.__code__ co_code = code.co_code @@ -145,14 +61,6 @@ def get_opnames(ex): "Requires optimizer infrastructure") class TestExecutorInvalidation(unittest.TestCase): - def setUp(self): - self.old = _testinternalcapi.get_optimizer() - self.opt = _testinternalcapi.new_counter_optimizer() - _testinternalcapi.set_optimizer(self.opt) - - def tearDown(self): - _testinternalcapi.set_optimizer(self.old) - def test_invalidate_object(self): # Generate a new set of functions at each call ns = {} @@ -167,8 +75,10 @@ def f{n}(): funcs = [ ns[f'f{n}'] for n in range(5)] objects = [object() for _ in range(5)] - for f in funcs: - f() + opt = _testinternalcapi.new_uop_optimizer() + with temporary_optimizer(opt): + for f in funcs: + f() executors = [get_first_executor(f) for f in funcs] # Set things up so each executor depends on the objects # with an equal or lower index. @@ -1555,6 +1465,25 @@ def f(l: complex, r: complex) -> None: with self.subTest(l=l, r=r, x=x, y=y): script_helper.assert_python_ok("-c", s) + def test_symbols_flow_through_tuples(self): + def testfunc(n): + for _ in range(n): + a = 1 + b = 2 + t = a, b + x, y = t + r = x + y + return r + + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, 3) + self.assertIsNotNone(ex) + uops = get_opnames(ex) + self.assertIn("_BINARY_OP_ADD_INT", uops) + self.assertNotIn("_GUARD_BOTH_INT", uops) + self.assertNotIn("_GUARD_NOS_INT", uops) + self.assertNotIn("_GUARD_TOS_INT", uops) + def test_decref_escapes(self): class Convert9999ToNone: def __del__(self): diff --git a/Lib/test/test_capi/test_sys.py b/Lib/test/test_capi/test_sys.py index 54a8e026d883d4..d3a9b378e7769a 100644 --- a/Lib/test/test_capi/test_sys.py +++ b/Lib/test/test_capi/test_sys.py @@ -51,7 +51,7 @@ def test_sys_setobject(self): self.assertEqual(setobject(b'newattr', value2), 0) self.assertIs(sys.newattr, value2) self.assertEqual(setobject(b'newattr', NULL), 0) - self.assertFalse(hasattr(sys, 'newattr')) + self.assertNotHasAttr(sys, 'newattr') self.assertEqual(setobject(b'newattr', NULL), 0) finally: with contextlib.suppress(AttributeError): @@ -60,7 +60,7 @@ def test_sys_setobject(self): self.assertEqual(setobject('\U0001f40d'.encode(), value), 0) self.assertIs(getattr(sys, '\U0001f40d'), value) self.assertEqual(setobject('\U0001f40d'.encode(), NULL), 0) - self.assertFalse(hasattr(sys, '\U0001f40d')) + self.assertNotHasAttr(sys, '\U0001f40d') finally: with contextlib.suppress(AttributeError): delattr(sys, '\U0001f40d') diff --git a/Lib/test/test_capi/test_type.py b/Lib/test/test_capi/test_type.py index 92d056e802eeed..ffcaae73bca236 100644 --- a/Lib/test/test_capi/test_type.py +++ b/Lib/test/test_capi/test_type.py @@ -67,3 +67,10 @@ class FreezeThis(metaclass=Meta): Base.value = 3 type_freeze(FreezeThis) self.assertEqual(FreezeThis.value, 2) + + def test_manual_heap_type(self): + # gh-128923: test that a manually allocated and initailized heap type + # works correctly + ManualHeapType = _testcapi.ManualHeapType + for i in range(100): + self.assertIsInstance(ManualHeapType(), ManualHeapType) diff --git a/Lib/test/test_cmd_line_script.py b/Lib/test/test_cmd_line_script.py index f30107225ff612..e7f3e46c1868f7 100644 --- a/Lib/test/test_cmd_line_script.py +++ b/Lib/test/test_cmd_line_script.py @@ -88,6 +88,8 @@ def _make_test_zip_pkg(zip_dir, zip_basename, pkg_name, script_basename, importlib.invalidate_caches() return to_return + +@support.force_not_colorized_test_class class CmdLineTest(unittest.TestCase): def _check_output(self, script_name, exit_code, data, expected_file, expected_argv0, @@ -659,7 +661,8 @@ def test_syntaxerror_invalid_escape_sequence_multi_line(self): stderr.splitlines()[-3:], [ b' foo = """\\q"""', b' ^^^^^^^^', - b'SyntaxError: invalid escape sequence \'\\q\'' + b'SyntaxError: "\\q" is an invalid escape sequence. ' + b'Did you mean "\\\\q"? A raw string is also an option.' ], ) diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 7ffa4eb8639add..69c1ee0690d269 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -429,14 +429,14 @@ def test_invalid_bytecode(self): def foo(): pass - # assert that opcode 229 is invalid - self.assertEqual(opname[229], '<229>') + # assert that opcode 135 is invalid + self.assertEqual(opname[135], '<135>') - # change first opcode to 0xeb (=229) + # change first opcode to 0x87 (=135) foo.__code__ = foo.__code__.replace( - co_code=b'\xe5' + foo.__code__.co_code[1:]) + co_code=b'\x87' + foo.__code__.co_code[1:]) - msg = "unknown opcode 229" + msg = "unknown opcode 135" with self.assertRaisesRegex(SystemError, msg): foo() diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 787bd1b6a79e20..0eefc22d11bce0 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -282,7 +282,7 @@ def test_warning(self): # Test that the warning is only returned once. with warnings_helper.check_warnings( ('"is" with \'str\' literal', SyntaxWarning), - ("invalid escape sequence", SyntaxWarning), + ('"\\\\e" is an invalid escape sequence', SyntaxWarning), ) as w: compile_command(r"'\e' is 0") self.assertEqual(len(w.warnings), 2) diff --git a/Lib/test/test_collections.py b/Lib/test/test_collections.py index a24d3e3ea142b7..1e93530398be79 100644 --- a/Lib/test/test_collections.py +++ b/Lib/test/test_collections.py @@ -742,11 +742,11 @@ def validate_isinstance(self, abc, name): C = type('C', (object,), {'__hash__': None}) setattr(C, name, stub) self.assertIsInstance(C(), abc) - self.assertTrue(issubclass(C, abc)) + self.assertIsSubclass(C, abc) C = type('C', (object,), {'__hash__': None}) self.assertNotIsInstance(C(), abc) - self.assertFalse(issubclass(C, abc)) + self.assertNotIsSubclass(C, abc) def validate_comparison(self, instance): ops = ['lt', 'gt', 'le', 'ge', 'ne', 'or', 'and', 'xor', 'sub'] @@ -812,12 +812,12 @@ def __await__(self): non_samples = [None, int(), gen(), object()] for x in non_samples: self.assertNotIsInstance(x, Awaitable) - self.assertFalse(issubclass(type(x), Awaitable), repr(type(x))) + self.assertNotIsSubclass(type(x), Awaitable) samples = [Bar(), MinimalCoro()] for x in samples: self.assertIsInstance(x, Awaitable) - self.assertTrue(issubclass(type(x), Awaitable)) + self.assertIsSubclass(type(x), Awaitable) c = coro() # Iterable coroutines (generators with CO_ITERABLE_COROUTINE @@ -831,8 +831,8 @@ def __await__(self): class CoroLike: pass Coroutine.register(CoroLike) - self.assertTrue(isinstance(CoroLike(), Awaitable)) - self.assertTrue(issubclass(CoroLike, Awaitable)) + self.assertIsInstance(CoroLike(), Awaitable) + self.assertIsSubclass(CoroLike, Awaitable) CoroLike = None support.gc_collect() # Kill CoroLike to clean-up ABCMeta cache @@ -864,12 +864,12 @@ def __await__(self): non_samples = [None, int(), gen(), object(), Bar()] for x in non_samples: self.assertNotIsInstance(x, Coroutine) - self.assertFalse(issubclass(type(x), Coroutine), repr(type(x))) + self.assertNotIsSubclass(type(x), Coroutine) samples = [MinimalCoro()] for x in samples: self.assertIsInstance(x, Awaitable) - self.assertTrue(issubclass(type(x), Awaitable)) + self.assertIsSubclass(type(x), Awaitable) c = coro() # Iterable coroutines (generators with CO_ITERABLE_COROUTINE @@ -890,8 +890,8 @@ def close(self): pass def __await__(self): pass - self.assertTrue(isinstance(CoroLike(), Coroutine)) - self.assertTrue(issubclass(CoroLike, Coroutine)) + self.assertIsInstance(CoroLike(), Coroutine) + self.assertIsSubclass(CoroLike, Coroutine) class CoroLike: def send(self, value): @@ -900,15 +900,15 @@ def close(self): pass def __await__(self): pass - self.assertFalse(isinstance(CoroLike(), Coroutine)) - self.assertFalse(issubclass(CoroLike, Coroutine)) + self.assertNotIsInstance(CoroLike(), Coroutine) + self.assertNotIsSubclass(CoroLike, Coroutine) def test_Hashable(self): # Check some non-hashables non_samples = [bytearray(), list(), set(), dict()] for x in non_samples: self.assertNotIsInstance(x, Hashable) - self.assertFalse(issubclass(type(x), Hashable), repr(type(x))) + self.assertNotIsSubclass(type(x), Hashable) # Check some hashables samples = [None, int(), float(), complex(), @@ -918,14 +918,14 @@ def test_Hashable(self): ] for x in samples: self.assertIsInstance(x, Hashable) - self.assertTrue(issubclass(type(x), Hashable), repr(type(x))) + self.assertIsSubclass(type(x), Hashable) self.assertRaises(TypeError, Hashable) # Check direct subclassing class H(Hashable): def __hash__(self): return super().__hash__() self.assertEqual(hash(H()), 0) - self.assertFalse(issubclass(int, H)) + self.assertNotIsSubclass(int, H) self.validate_abstract_methods(Hashable, '__hash__') self.validate_isinstance(Hashable, '__hash__') @@ -933,13 +933,13 @@ def test_AsyncIterable(self): class AI: def __aiter__(self): return self - self.assertTrue(isinstance(AI(), AsyncIterable)) - self.assertTrue(issubclass(AI, AsyncIterable)) + self.assertIsInstance(AI(), AsyncIterable) + self.assertIsSubclass(AI, AsyncIterable) # Check some non-iterables non_samples = [None, object, []] for x in non_samples: self.assertNotIsInstance(x, AsyncIterable) - self.assertFalse(issubclass(type(x), AsyncIterable), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncIterable) self.validate_abstract_methods(AsyncIterable, '__aiter__') self.validate_isinstance(AsyncIterable, '__aiter__') @@ -949,13 +949,13 @@ def __aiter__(self): return self async def __anext__(self): raise StopAsyncIteration - self.assertTrue(isinstance(AI(), AsyncIterator)) - self.assertTrue(issubclass(AI, AsyncIterator)) + self.assertIsInstance(AI(), AsyncIterator) + self.assertIsSubclass(AI, AsyncIterator) non_samples = [None, object, []] # Check some non-iterables for x in non_samples: self.assertNotIsInstance(x, AsyncIterator) - self.assertFalse(issubclass(type(x), AsyncIterator), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncIterator) # Similarly to regular iterators (see issue 10565) class AnextOnly: async def __anext__(self): @@ -968,7 +968,7 @@ def test_Iterable(self): non_samples = [None, 42, 3.14, 1j] for x in non_samples: self.assertNotIsInstance(x, Iterable) - self.assertFalse(issubclass(type(x), Iterable), repr(type(x))) + self.assertNotIsSubclass(type(x), Iterable) # Check some iterables samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), @@ -978,13 +978,13 @@ def test_Iterable(self): ] for x in samples: self.assertIsInstance(x, Iterable) - self.assertTrue(issubclass(type(x), Iterable), repr(type(x))) + self.assertIsSubclass(type(x), Iterable) # Check direct subclassing class I(Iterable): def __iter__(self): return super().__iter__() self.assertEqual(list(I()), []) - self.assertFalse(issubclass(str, I)) + self.assertNotIsSubclass(str, I) self.validate_abstract_methods(Iterable, '__iter__') self.validate_isinstance(Iterable, '__iter__') # Check None blocking @@ -992,22 +992,22 @@ class It: def __iter__(self): return iter([]) class ItBlocked(It): __iter__ = None - self.assertTrue(issubclass(It, Iterable)) - self.assertTrue(isinstance(It(), Iterable)) - self.assertFalse(issubclass(ItBlocked, Iterable)) - self.assertFalse(isinstance(ItBlocked(), Iterable)) + self.assertIsSubclass(It, Iterable) + self.assertIsInstance(It(), Iterable) + self.assertNotIsSubclass(ItBlocked, Iterable) + self.assertNotIsInstance(ItBlocked(), Iterable) def test_Reversible(self): # Check some non-reversibles non_samples = [None, 42, 3.14, 1j, set(), frozenset()] for x in non_samples: self.assertNotIsInstance(x, Reversible) - self.assertFalse(issubclass(type(x), Reversible), repr(type(x))) + self.assertNotIsSubclass(type(x), Reversible) # Check some non-reversible iterables non_reversibles = [_test_gen(), (x for x in []), iter([]), reversed([])] for x in non_reversibles: self.assertNotIsInstance(x, Reversible) - self.assertFalse(issubclass(type(x), Reversible), repr(type(x))) + self.assertNotIsSubclass(type(x), Reversible) # Check some reversible iterables samples = [bytes(), str(), tuple(), list(), OrderedDict(), OrderedDict().keys(), OrderedDict().items(), @@ -1016,11 +1016,11 @@ def test_Reversible(self): dict().keys(), dict().items(), dict().values()] for x in samples: self.assertIsInstance(x, Reversible) - self.assertTrue(issubclass(type(x), Reversible), repr(type(x))) + self.assertIsSubclass(type(x), Reversible) # Check also Mapping, MutableMapping, and Sequence - self.assertTrue(issubclass(Sequence, Reversible), repr(Sequence)) - self.assertFalse(issubclass(Mapping, Reversible), repr(Mapping)) - self.assertFalse(issubclass(MutableMapping, Reversible), repr(MutableMapping)) + self.assertIsSubclass(Sequence, Reversible) + self.assertNotIsSubclass(Mapping, Reversible) + self.assertNotIsSubclass(MutableMapping, Reversible) # Check direct subclassing class R(Reversible): def __iter__(self): @@ -1028,17 +1028,17 @@ def __iter__(self): def __reversed__(self): return iter(list()) self.assertEqual(list(reversed(R())), []) - self.assertFalse(issubclass(float, R)) + self.assertNotIsSubclass(float, R) self.validate_abstract_methods(Reversible, '__reversed__', '__iter__') # Check reversible non-iterable (which is not Reversible) class RevNoIter: def __reversed__(self): return reversed([]) class RevPlusIter(RevNoIter): def __iter__(self): return iter([]) - self.assertFalse(issubclass(RevNoIter, Reversible)) - self.assertFalse(isinstance(RevNoIter(), Reversible)) - self.assertTrue(issubclass(RevPlusIter, Reversible)) - self.assertTrue(isinstance(RevPlusIter(), Reversible)) + self.assertNotIsSubclass(RevNoIter, Reversible) + self.assertNotIsInstance(RevNoIter(), Reversible) + self.assertIsSubclass(RevPlusIter, Reversible) + self.assertIsInstance(RevPlusIter(), Reversible) # Check None blocking class Rev: def __iter__(self): return iter([]) @@ -1047,39 +1047,38 @@ class RevItBlocked(Rev): __iter__ = None class RevRevBlocked(Rev): __reversed__ = None - self.assertTrue(issubclass(Rev, Reversible)) - self.assertTrue(isinstance(Rev(), Reversible)) - self.assertFalse(issubclass(RevItBlocked, Reversible)) - self.assertFalse(isinstance(RevItBlocked(), Reversible)) - self.assertFalse(issubclass(RevRevBlocked, Reversible)) - self.assertFalse(isinstance(RevRevBlocked(), Reversible)) + self.assertIsSubclass(Rev, Reversible) + self.assertIsInstance(Rev(), Reversible) + self.assertNotIsSubclass(RevItBlocked, Reversible) + self.assertNotIsInstance(RevItBlocked(), Reversible) + self.assertNotIsSubclass(RevRevBlocked, Reversible) + self.assertNotIsInstance(RevRevBlocked(), Reversible) def test_Collection(self): # Check some non-collections non_collections = [None, 42, 3.14, 1j, lambda x: 2*x] for x in non_collections: self.assertNotIsInstance(x, Collection) - self.assertFalse(issubclass(type(x), Collection), repr(type(x))) + self.assertNotIsSubclass(type(x), Collection) # Check some non-collection iterables non_col_iterables = [_test_gen(), iter(b''), iter(bytearray()), (x for x in [])] for x in non_col_iterables: self.assertNotIsInstance(x, Collection) - self.assertFalse(issubclass(type(x), Collection), repr(type(x))) + self.assertNotIsSubclass(type(x), Collection) # Check some collections samples = [set(), frozenset(), dict(), bytes(), str(), tuple(), list(), dict().keys(), dict().items(), dict().values()] for x in samples: self.assertIsInstance(x, Collection) - self.assertTrue(issubclass(type(x), Collection), repr(type(x))) + self.assertIsSubclass(type(x), Collection) # Check also Mapping, MutableMapping, etc. - self.assertTrue(issubclass(Sequence, Collection), repr(Sequence)) - self.assertTrue(issubclass(Mapping, Collection), repr(Mapping)) - self.assertTrue(issubclass(MutableMapping, Collection), - repr(MutableMapping)) - self.assertTrue(issubclass(Set, Collection), repr(Set)) - self.assertTrue(issubclass(MutableSet, Collection), repr(MutableSet)) - self.assertTrue(issubclass(Sequence, Collection), repr(MutableSet)) + self.assertIsSubclass(Sequence, Collection) + self.assertIsSubclass(Mapping, Collection) + self.assertIsSubclass(MutableMapping, Collection) + self.assertIsSubclass(Set, Collection) + self.assertIsSubclass(MutableSet, Collection) + self.assertIsSubclass(Sequence, Collection) # Check direct subclassing class Col(Collection): def __iter__(self): @@ -1090,13 +1089,13 @@ def __contains__(self, item): return False class DerCol(Col): pass self.assertEqual(list(iter(Col())), []) - self.assertFalse(issubclass(list, Col)) - self.assertFalse(issubclass(set, Col)) - self.assertFalse(issubclass(float, Col)) + self.assertNotIsSubclass(list, Col) + self.assertNotIsSubclass(set, Col) + self.assertNotIsSubclass(float, Col) self.assertEqual(list(iter(DerCol())), []) - self.assertFalse(issubclass(list, DerCol)) - self.assertFalse(issubclass(set, DerCol)) - self.assertFalse(issubclass(float, DerCol)) + self.assertNotIsSubclass(list, DerCol) + self.assertNotIsSubclass(set, DerCol) + self.assertNotIsSubclass(float, DerCol) self.validate_abstract_methods(Collection, '__len__', '__iter__', '__contains__') # Check sized container non-iterable (which is not Collection) etc. @@ -1109,12 +1108,12 @@ def __contains__(self, item): return False class ColNoCont: def __iter__(self): return iter([]) def __len__(self): return 0 - self.assertFalse(issubclass(ColNoIter, Collection)) - self.assertFalse(isinstance(ColNoIter(), Collection)) - self.assertFalse(issubclass(ColNoSize, Collection)) - self.assertFalse(isinstance(ColNoSize(), Collection)) - self.assertFalse(issubclass(ColNoCont, Collection)) - self.assertFalse(isinstance(ColNoCont(), Collection)) + self.assertNotIsSubclass(ColNoIter, Collection) + self.assertNotIsInstance(ColNoIter(), Collection) + self.assertNotIsSubclass(ColNoSize, Collection) + self.assertNotIsInstance(ColNoSize(), Collection) + self.assertNotIsSubclass(ColNoCont, Collection) + self.assertNotIsInstance(ColNoCont(), Collection) # Check None blocking class SizeBlock: def __iter__(self): return iter([]) @@ -1124,10 +1123,10 @@ class IterBlock: def __len__(self): return 0 def __contains__(self): return True __iter__ = None - self.assertFalse(issubclass(SizeBlock, Collection)) - self.assertFalse(isinstance(SizeBlock(), Collection)) - self.assertFalse(issubclass(IterBlock, Collection)) - self.assertFalse(isinstance(IterBlock(), Collection)) + self.assertNotIsSubclass(SizeBlock, Collection) + self.assertNotIsInstance(SizeBlock(), Collection) + self.assertNotIsSubclass(IterBlock, Collection) + self.assertNotIsInstance(IterBlock(), Collection) # Check None blocking in subclass class ColImpl: def __iter__(self): @@ -1138,15 +1137,15 @@ def __contains__(self, item): return False class NonCol(ColImpl): __contains__ = None - self.assertFalse(issubclass(NonCol, Collection)) - self.assertFalse(isinstance(NonCol(), Collection)) + self.assertNotIsSubclass(NonCol, Collection) + self.assertNotIsInstance(NonCol(), Collection) def test_Iterator(self): non_samples = [None, 42, 3.14, 1j, b"", "", (), [], {}, set()] for x in non_samples: self.assertNotIsInstance(x, Iterator) - self.assertFalse(issubclass(type(x), Iterator), repr(type(x))) + self.assertNotIsSubclass(type(x), Iterator) samples = [iter(bytes()), iter(str()), iter(tuple()), iter(list()), iter(dict()), iter(set()), iter(frozenset()), @@ -1157,7 +1156,7 @@ def test_Iterator(self): ] for x in samples: self.assertIsInstance(x, Iterator) - self.assertTrue(issubclass(type(x), Iterator), repr(type(x))) + self.assertIsSubclass(type(x), Iterator) self.validate_abstract_methods(Iterator, '__next__', '__iter__') # Issue 10565 @@ -1190,7 +1189,7 @@ def throw(self, typ, val=None, tb=None): pass iter(()), iter([]), NonGen1(), NonGen2(), NonGen3()] for x in non_samples: self.assertNotIsInstance(x, Generator) - self.assertFalse(issubclass(type(x), Generator), repr(type(x))) + self.assertNotIsSubclass(type(x), Generator) class Gen: def __iter__(self): return self @@ -1212,7 +1211,7 @@ def gen(): for x in samples: self.assertIsInstance(x, Iterator) self.assertIsInstance(x, Generator) - self.assertTrue(issubclass(type(x), Generator), repr(type(x))) + self.assertIsSubclass(type(x), Generator) self.validate_abstract_methods(Generator, 'send', 'throw') # mixin tests @@ -1261,7 +1260,7 @@ def athrow(self, typ, val=None, tb=None): pass iter(()), iter([]), NonAGen1(), NonAGen2(), NonAGen3()] for x in non_samples: self.assertNotIsInstance(x, AsyncGenerator) - self.assertFalse(issubclass(type(x), AsyncGenerator), repr(type(x))) + self.assertNotIsSubclass(type(x), AsyncGenerator) class Gen: def __aiter__(self): return self @@ -1283,7 +1282,7 @@ async def gen(): for x in samples: self.assertIsInstance(x, AsyncIterator) self.assertIsInstance(x, AsyncGenerator) - self.assertTrue(issubclass(type(x), AsyncGenerator), repr(type(x))) + self.assertIsSubclass(type(x), AsyncGenerator) self.validate_abstract_methods(AsyncGenerator, 'asend', 'athrow') def run_async(coro): @@ -1326,14 +1325,14 @@ def test_Sized(self): ] for x in non_samples: self.assertNotIsInstance(x, Sized) - self.assertFalse(issubclass(type(x), Sized), repr(type(x))) + self.assertNotIsSubclass(type(x), Sized) samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), dict().values(), ] for x in samples: self.assertIsInstance(x, Sized) - self.assertTrue(issubclass(type(x), Sized), repr(type(x))) + self.assertIsSubclass(type(x), Sized) self.validate_abstract_methods(Sized, '__len__') self.validate_isinstance(Sized, '__len__') @@ -1344,14 +1343,14 @@ def test_Container(self): ] for x in non_samples: self.assertNotIsInstance(x, Container) - self.assertFalse(issubclass(type(x), Container), repr(type(x))) + self.assertNotIsSubclass(type(x), Container) samples = [bytes(), str(), tuple(), list(), set(), frozenset(), dict(), dict().keys(), dict().items(), ] for x in samples: self.assertIsInstance(x, Container) - self.assertTrue(issubclass(type(x), Container), repr(type(x))) + self.assertIsSubclass(type(x), Container) self.validate_abstract_methods(Container, '__contains__') self.validate_isinstance(Container, '__contains__') @@ -1363,7 +1362,7 @@ def test_Callable(self): ] for x in non_samples: self.assertNotIsInstance(x, Callable) - self.assertFalse(issubclass(type(x), Callable), repr(type(x))) + self.assertNotIsSubclass(type(x), Callable) samples = [lambda: None, type, int, object, len, @@ -1371,7 +1370,7 @@ def test_Callable(self): ] for x in samples: self.assertIsInstance(x, Callable) - self.assertTrue(issubclass(type(x), Callable), repr(type(x))) + self.assertIsSubclass(type(x), Callable) self.validate_abstract_methods(Callable, '__call__') self.validate_isinstance(Callable, '__call__') @@ -1379,16 +1378,16 @@ def test_direct_subclassing(self): for B in Hashable, Iterable, Iterator, Reversible, Sized, Container, Callable: class C(B): pass - self.assertTrue(issubclass(C, B)) - self.assertFalse(issubclass(int, C)) + self.assertIsSubclass(C, B) + self.assertNotIsSubclass(int, C) def test_registration(self): for B in Hashable, Iterable, Iterator, Reversible, Sized, Container, Callable: class C: __hash__ = None # Make sure it isn't hashable by default - self.assertFalse(issubclass(C, B), B.__name__) + self.assertNotIsSubclass(C, B) B.register(C) - self.assertTrue(issubclass(C, B)) + self.assertIsSubclass(C, B) class WithSet(MutableSet): @@ -1419,7 +1418,7 @@ class TestCollectionABCs(ABCTestCase): def test_Set(self): for sample in [set, frozenset]: self.assertIsInstance(sample(), Set) - self.assertTrue(issubclass(sample, Set)) + self.assertIsSubclass(sample, Set) self.validate_abstract_methods(Set, '__contains__', '__iter__', '__len__') class MySet(Set): def __contains__(self, x): @@ -1500,9 +1499,9 @@ def __len__(self): def test_MutableSet(self): self.assertIsInstance(set(), MutableSet) - self.assertTrue(issubclass(set, MutableSet)) + self.assertIsSubclass(set, MutableSet) self.assertNotIsInstance(frozenset(), MutableSet) - self.assertFalse(issubclass(frozenset, MutableSet)) + self.assertNotIsSubclass(frozenset, MutableSet) self.validate_abstract_methods(MutableSet, '__contains__', '__iter__', '__len__', 'add', 'discard') @@ -1841,7 +1840,7 @@ def test_Set_hash_matches_frozenset(self): def test_Mapping(self): for sample in [dict]: self.assertIsInstance(sample(), Mapping) - self.assertTrue(issubclass(sample, Mapping)) + self.assertIsSubclass(sample, Mapping) self.validate_abstract_methods(Mapping, '__contains__', '__iter__', '__len__', '__getitem__') class MyMapping(Mapping): @@ -1857,7 +1856,7 @@ def __iter__(self): def test_MutableMapping(self): for sample in [dict]: self.assertIsInstance(sample(), MutableMapping) - self.assertTrue(issubclass(sample, MutableMapping)) + self.assertIsSubclass(sample, MutableMapping) self.validate_abstract_methods(MutableMapping, '__contains__', '__iter__', '__len__', '__getitem__', '__setitem__', '__delitem__') @@ -1891,12 +1890,12 @@ def test_MutableMapping_subclass(self): def test_Sequence(self): for sample in [tuple, list, bytes, str]: self.assertIsInstance(sample(), Sequence) - self.assertTrue(issubclass(sample, Sequence)) + self.assertIsSubclass(sample, Sequence) self.assertIsInstance(range(10), Sequence) - self.assertTrue(issubclass(range, Sequence)) + self.assertIsSubclass(range, Sequence) self.assertIsInstance(memoryview(b""), Sequence) - self.assertTrue(issubclass(memoryview, Sequence)) - self.assertTrue(issubclass(str, Sequence)) + self.assertIsSubclass(memoryview, Sequence) + self.assertIsSubclass(str, Sequence) self.validate_abstract_methods(Sequence, '__contains__', '__iter__', '__len__', '__getitem__') @@ -1938,21 +1937,21 @@ def assert_index_same(seq1, seq2, index_args): def test_Buffer(self): for sample in [bytes, bytearray, memoryview]: self.assertIsInstance(sample(b"x"), Buffer) - self.assertTrue(issubclass(sample, Buffer)) + self.assertIsSubclass(sample, Buffer) for sample in [str, list, tuple]: self.assertNotIsInstance(sample(), Buffer) - self.assertFalse(issubclass(sample, Buffer)) + self.assertNotIsSubclass(sample, Buffer) self.validate_abstract_methods(Buffer, '__buffer__') def test_MutableSequence(self): for sample in [tuple, str, bytes]: self.assertNotIsInstance(sample(), MutableSequence) - self.assertFalse(issubclass(sample, MutableSequence)) + self.assertNotIsSubclass(sample, MutableSequence) for sample in [list, bytearray, deque]: self.assertIsInstance(sample(), MutableSequence) - self.assertTrue(issubclass(sample, MutableSequence)) - self.assertTrue(issubclass(array.array, MutableSequence)) - self.assertFalse(issubclass(str, MutableSequence)) + self.assertIsSubclass(sample, MutableSequence) + self.assertIsSubclass(array.array, MutableSequence) + self.assertNotIsSubclass(str, MutableSequence) self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__', '__len__', '__getitem__', '__setitem__', '__delitem__', 'insert') @@ -2043,8 +2042,8 @@ def test_basics(self): self.assertEqual(c, Counter(a=3, b=2, c=1)) self.assertIsInstance(c, dict) self.assertIsInstance(c, Mapping) - self.assertTrue(issubclass(Counter, dict)) - self.assertTrue(issubclass(Counter, Mapping)) + self.assertIsSubclass(Counter, dict) + self.assertIsSubclass(Counter, Mapping) self.assertEqual(len(c), 3) self.assertEqual(sum(c.values()), 6) self.assertEqual(list(c.values()), [3, 2, 1]) diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index 3a34c6822bc079..a580a240d9f474 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -766,6 +766,7 @@ def test_d_compile_error(self): rc, out, err = self.assertRunNotOK('-q', '-d', 'dinsdale', self.pkgdir) self.assertRegex(out, b'File "dinsdale') + @support.force_not_colorized def test_d_runtime_error(self): bazfn = script_helper.make_script(self.pkgdir, 'baz', 'raise Exception') self.assertRunOK('-q', '-d', 'dinsdale', self.pkgdir) diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py index e3c5d08dd1e7d1..bde805eb741c33 100644 --- a/Lib/test/test_configparser.py +++ b/Lib/test/test_configparser.py @@ -2174,6 +2174,15 @@ def test_disabled_error(self): with self.assertRaises(configparser.UnnamedSectionDisabledError): configparser.ConfigParser().add_section(configparser.UNNAMED_SECTION) + def test_multiple_configs(self): + cfg = configparser.ConfigParser(allow_unnamed_section=True) + cfg.read_string('a = 1') + cfg.read_string('b = 2') + + self.assertEqual([configparser.UNNAMED_SECTION], cfg.sections()) + self.assertEqual('1', cfg[configparser.UNNAMED_SECTION]['a']) + self.assertEqual('2', cfg[configparser.UNNAMED_SECTION]['b']) + class MiscTestCase(unittest.TestCase): def test__all__(self): diff --git a/Lib/test/test_ctypes/test_c_simple_type_meta.py b/Lib/test/test_ctypes/test_c_simple_type_meta.py index eb77d6d7782478..b446fd5c77dde2 100644 --- a/Lib/test/test_ctypes/test_c_simple_type_meta.py +++ b/Lib/test/test_ctypes/test_c_simple_type_meta.py @@ -1,4 +1,5 @@ import unittest +from test.support import MS_WINDOWS import ctypes from ctypes import POINTER, c_void_p @@ -54,9 +55,9 @@ class Sub2(Sub): pass self.assertIsInstance(POINTER(Sub2), p_meta) - self.assertTrue(issubclass(POINTER(Sub2), Sub2)) - self.assertTrue(issubclass(POINTER(Sub2), POINTER(Sub))) - self.assertTrue(issubclass(POINTER(Sub), POINTER(CtBase))) + self.assertIsSubclass(POINTER(Sub2), Sub2) + self.assertIsSubclass(POINTER(Sub2), POINTER(Sub)) + self.assertIsSubclass(POINTER(Sub), POINTER(CtBase)) def test_creating_pointer_in_dunder_new_2(self): # A simpler variant of the above, used in `CoClass` of the `comtypes` @@ -84,7 +85,7 @@ class Sub(CtBase): pass self.assertIsInstance(POINTER(Sub), p_meta) - self.assertTrue(issubclass(POINTER(Sub), Sub)) + self.assertIsSubclass(POINTER(Sub), Sub) def test_creating_pointer_in_dunder_init_1(self): class ct_meta(type): @@ -120,9 +121,9 @@ class Sub2(Sub): pass self.assertIsInstance(POINTER(Sub2), p_meta) - self.assertTrue(issubclass(POINTER(Sub2), Sub2)) - self.assertTrue(issubclass(POINTER(Sub2), POINTER(Sub))) - self.assertTrue(issubclass(POINTER(Sub), POINTER(CtBase))) + self.assertIsSubclass(POINTER(Sub2), Sub2) + self.assertIsSubclass(POINTER(Sub2), POINTER(Sub)) + self.assertIsSubclass(POINTER(Sub), POINTER(CtBase)) def test_creating_pointer_in_dunder_init_2(self): class ct_meta(type): @@ -149,4 +150,21 @@ class Sub(CtBase): pass self.assertIsInstance(POINTER(Sub), p_meta) - self.assertTrue(issubclass(POINTER(Sub), Sub)) + self.assertIsSubclass(POINTER(Sub), Sub) + + def test_bad_type_message(self): + """Verify the error message that lists all available type codes""" + # (The string is generated at runtime, so this checks the underlying + # set of types as well as correct construction of the string.) + with self.assertRaises(AttributeError) as cm: + class F(metaclass=PyCSimpleType): + _type_ = "\0" + message = str(cm.exception) + expected_type_chars = list('cbBhHiIlLdCEFfuzZqQPXOv?g') + if not hasattr(ctypes, 'c_float_complex'): + expected_type_chars.remove('C') + expected_type_chars.remove('E') + expected_type_chars.remove('F') + if not MS_WINDOWS: + expected_type_chars.remove('X') + self.assertIn("'" + ''.join(expected_type_chars) + "'", message) diff --git a/Lib/test/test_ctypes/test_generated_structs.py b/Lib/test/test_ctypes/test_generated_structs.py index d61754d6d49e70..1df9f0dc16368f 100644 --- a/Lib/test/test_ctypes/test_generated_structs.py +++ b/Lib/test/test_ctypes/test_generated_structs.py @@ -443,7 +443,7 @@ def test_generated_data(self): - None - reason to skip the test (str) - This does depend on the C compiler keeping padding bits zero. + This does depend on the C compiler keeping padding bits unchanged. Common compilers seem to do so. """ for name, cls in TESTCASES.items(): @@ -696,7 +696,8 @@ def output(string): output(' ' + line) typename = f'{struct_or_union(cls)} {name}' output(f""" - {typename} value = {{0}}; + {typename} value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString({c_str_repr(name)})); APPEND(PyLong_FromLong(sizeof({typename}))); APPEND(PyLong_FromLong(_Alignof({typename}))); diff --git a/Lib/test/test_ctypes/test_loading.py b/Lib/test/test_ctypes/test_loading.py index fc1eecb77e17e3..13ed813ad98c31 100644 --- a/Lib/test/test_ctypes/test_loading.py +++ b/Lib/test/test_ctypes/test_loading.py @@ -135,7 +135,7 @@ def test_1703286_B(self): 'test specific to Windows') def test_load_hasattr(self): # bpo-34816: shouldn't raise OSError - self.assertFalse(hasattr(ctypes.windll, 'test')) + self.assertNotHasAttr(ctypes.windll, 'test') @unittest.skipUnless(os.name == "nt", 'test specific to Windows') diff --git a/Lib/test/test_ctypes/test_repr.py b/Lib/test/test_ctypes/test_repr.py index e7587984a92c45..8c85e6cbe70cea 100644 --- a/Lib/test/test_ctypes/test_repr.py +++ b/Lib/test/test_ctypes/test_repr.py @@ -22,12 +22,12 @@ class ReprTest(unittest.TestCase): def test_numbers(self): for typ in subclasses: base = typ.__bases__[0] - self.assertTrue(repr(base(42)).startswith(base.__name__)) - self.assertEqual(" str: LOAD_SMALL_INT 1 BINARY_OP 13 (+=) STORE_NAME 0 (x) - JUMP_BACKWARD 8 (to L1) + JUMP_BACKWARD 12 (to L1) """ dis_traceback = """\ @@ -843,7 +843,7 @@ def foo(x): L1: RESUME 0 LOAD_FAST 0 (.0) GET_ITER - L2: FOR_ITER 10 (to L3) + L2: FOR_ITER 14 (to L3) STORE_FAST 1 (z) LOAD_DEREF 2 (x) LOAD_FAST 1 (z) @@ -851,7 +851,7 @@ def foo(x): YIELD_VALUE 0 RESUME 5 POP_TOP - JUMP_BACKWARD 12 (to L2) + JUMP_BACKWARD 16 (to L2) L3: END_FOR POP_ITER LOAD_CONST 0 (None) @@ -999,12 +999,14 @@ def test_boundaries(self): def test_widths(self): long_opcodes = set(['JUMP_BACKWARD_NO_INTERRUPT', 'INSTRUMENTED_CALL_FUNCTION_EX']) - for opcode, opname in enumerate(dis.opname): + for op, opname in enumerate(dis.opname): if opname in long_opcodes or opname.startswith("INSTRUMENTED"): continue + if opname in opcode._specialized_opmap: + continue with self.subTest(opname=opname): width = dis._OPNAME_WIDTH - if opcode in dis.hasarg: + if op in dis.hasarg: width += 1 + dis._OPARG_WIDTH self.assertLessEqual(len(opname), width) @@ -1805,7 +1807,7 @@ def _prepare_test_cases(): Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=116, start_offset=116, starts_line=False, line_number=10, label=None, positions=None, cache_info=None), Instruction(opname='LOAD_FAST_CHECK', opcode=85, arg=0, argval='i', argrepr='i', offset=118, start_offset=118, starts_line=True, line_number=11, label=5, positions=None, cache_info=None), Instruction(opname='TO_BOOL', opcode=39, arg=None, argval=None, argrepr='', offset=120, start_offset=120, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=36, argval=204, argrepr='to L8', offset=128, start_offset=128, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=40, argval=212, argrepr='to L8', offset=128, start_offset=128, starts_line=False, line_number=11, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=132, start_offset=132, starts_line=False, line_number=11, label=None, positions=None, cache_info=None), Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=134, start_offset=134, starts_line=True, line_number=12, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=144, start_offset=144, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), @@ -1813,93 +1815,93 @@ def _prepare_test_cases(): Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=154, start_offset=154, starts_line=False, line_number=12, label=None, positions=None, cache_info=None), Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=156, start_offset=156, starts_line=True, line_number=13, label=None, positions=None, cache_info=None), Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=158, start_offset=158, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), - Instruction(opname='BINARY_OP', opcode=44, arg=23, argval=23, argrepr='-=', offset=160, start_offset=160, starts_line=False, line_number=13, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=164, start_offset=164, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=166, start_offset=166, starts_line=True, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=6, argval=6, argrepr='', offset=168, start_offset=168, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=56, arg=148, argval='>', argrepr='bool(>)', offset=170, start_offset=170, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=3, argval=184, argrepr='to L6', offset=174, start_offset=174, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=178, start_offset=178, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=74, arg=33, argval=118, argrepr='to L5', offset=180, start_offset=180, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=184, start_offset=184, starts_line=True, line_number=16, label=6, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=4, argval=4, argrepr='', offset=186, start_offset=186, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), - Instruction(opname='COMPARE_OP', opcode=56, arg=18, argval='<', argrepr='bool(<)', offset=188, start_offset=188, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=3, argval=202, argrepr='to L7', offset=192, start_offset=192, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=196, start_offset=196, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD', opcode=74, arg=42, argval=118, argrepr='to L5', offset=198, start_offset=198, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='JUMP_FORWARD', opcode=76, arg=11, argval=226, argrepr='to L9', offset=202, start_offset=202, starts_line=True, line_number=17, label=7, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=204, start_offset=204, starts_line=True, line_number=19, label=8, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=214, start_offset=214, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=216, start_offset=216, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=224, start_offset=224, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), - Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=226, start_offset=226, starts_line=True, line_number=20, label=9, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=228, start_offset=228, starts_line=True, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=0, argval=0, argrepr='', offset=230, start_offset=230, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='BINARY_OP', opcode=44, arg=11, argval=11, argrepr='/', offset=232, start_offset=232, starts_line=False, line_number=21, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=236, start_offset=236, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=238, start_offset=238, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=59, arg=1, argval=1, argrepr='', offset=240, start_offset=240, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SPECIAL', opcode=92, arg=1, argval=1, argrepr='__exit__', offset=242, start_offset=242, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='SWAP', opcode=114, arg=2, argval=2, argrepr='', offset=244, start_offset=244, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='SWAP', opcode=114, arg=3, argval=3, argrepr='', offset=246, start_offset=246, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_SPECIAL', opcode=92, arg=0, argval=0, argrepr='__enter__', offset=248, start_offset=248, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=0, argval=0, argrepr='', offset=250, start_offset=250, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='STORE_FAST', opcode=109, arg=1, argval='dodgy', argrepr='dodgy', offset=258, start_offset=258, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=260, start_offset=260, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval='Never reach this', argrepr="'Never reach this'", offset=270, start_offset=270, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=272, start_offset=272, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=280, start_offset=280, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=282, start_offset=282, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=284, start_offset=284, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=286, start_offset=286, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=3, argval=3, argrepr='', offset=288, start_offset=288, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=296, start_offset=296, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=298, start_offset=298, starts_line=True, line_number=28, label=10, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=308, start_offset=308, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=310, start_offset=310, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=318, start_offset=318, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=320, start_offset=320, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='RETURN_VALUE', opcode=35, arg=None, argval=None, argrepr='', offset=322, start_offset=322, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=324, start_offset=324, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='WITH_EXCEPT_START', opcode=43, arg=None, argval=None, argrepr='', offset=326, start_offset=326, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='TO_BOOL', opcode=39, arg=None, argval=None, argrepr='', offset=328, start_offset=328, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=2, argval=344, argrepr='to L11', offset=336, start_offset=336, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=2, argval=2, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25, label=11, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=346, start_offset=346, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=348, start_offset=348, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=350, start_offset=350, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=352, start_offset=352, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=29, argval=298, argrepr='to L10', offset=354, start_offset=354, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=356, start_offset=356, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=364, start_offset=364, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='CHECK_EXC_MATCH', opcode=5, arg=None, argval=None, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=15, argval=410, argrepr='to L12', offset=376, start_offset=376, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), - Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=380, start_offset=380, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=382, start_offset=382, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=384, start_offset=384, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=81, arg=4, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=394, start_offset=394, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=404, start_offset=404, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=406, start_offset=406, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=56, argval=298, argrepr='to L10', offset=408, start_offset=408, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=410, start_offset=410, starts_line=True, line_number=22, label=12, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=412, start_offset=412, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=414, start_offset=414, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=416, start_offset=416, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=418, start_offset=418, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=420, start_offset=420, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), - Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=430, start_offset=430, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=432, start_offset=432, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), - Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=440, start_offset=440, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=442, start_offset=442, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), - Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=444, start_offset=444, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=446, start_offset=446, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), - Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=448, start_offset=448, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='BINARY_OP', opcode=44, arg=23, argval=23, argrepr='-=', offset=160, start_offset=160, starts_line=False, line_number=13, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]), + Instruction(opname='STORE_FAST', opcode=109, arg=0, argval='i', argrepr='i', offset=172, start_offset=172, starts_line=False, line_number=13, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=174, start_offset=174, starts_line=True, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=6, argval=6, argrepr='', offset=176, start_offset=176, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=148, argval='>', argrepr='bool(>)', offset=178, start_offset=178, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=3, argval=192, argrepr='to L6', offset=182, start_offset=182, starts_line=False, line_number=14, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=186, start_offset=186, starts_line=False, line_number=14, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=37, argval=118, argrepr='to L5', offset=188, start_offset=188, starts_line=True, line_number=15, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=192, start_offset=192, starts_line=True, line_number=16, label=6, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=4, argval=4, argrepr='', offset=194, start_offset=194, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), + Instruction(opname='COMPARE_OP', opcode=56, arg=18, argval='<', argrepr='bool(<)', offset=196, start_offset=196, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=3, argval=210, argrepr='to L7', offset=200, start_offset=200, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=204, start_offset=204, starts_line=False, line_number=16, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD', opcode=74, arg=46, argval=118, argrepr='to L5', offset=206, start_offset=206, starts_line=False, line_number=16, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='JUMP_FORWARD', opcode=76, arg=11, argval=234, argrepr='to L9', offset=210, start_offset=210, starts_line=True, line_number=17, label=7, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=212, start_offset=212, starts_line=True, line_number=19, label=8, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=1, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=222, start_offset=222, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=224, start_offset=224, starts_line=False, line_number=19, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=232, start_offset=232, starts_line=False, line_number=19, label=None, positions=None, cache_info=None), + Instruction(opname='NOP', opcode=27, arg=None, argval=None, argrepr='', offset=234, start_offset=234, starts_line=True, line_number=20, label=9, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=1, argval=1, argrepr='', offset=236, start_offset=236, starts_line=True, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SMALL_INT', opcode=91, arg=0, argval=0, argrepr='', offset=238, start_offset=238, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='BINARY_OP', opcode=44, arg=11, argval=11, argrepr='/', offset=240, start_offset=240, starts_line=False, line_number=21, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=252, start_offset=252, starts_line=False, line_number=21, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_FAST', opcode=83, arg=0, argval='i', argrepr='i', offset=254, start_offset=254, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=1, argval=1, argrepr='', offset=256, start_offset=256, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SPECIAL', opcode=92, arg=1, argval=1, argrepr='__exit__', offset=258, start_offset=258, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='SWAP', opcode=114, arg=2, argval=2, argrepr='', offset=260, start_offset=260, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='SWAP', opcode=114, arg=3, argval=3, argrepr='', offset=262, start_offset=262, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_SPECIAL', opcode=92, arg=0, argval=0, argrepr='__enter__', offset=264, start_offset=264, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=0, argval=0, argrepr='', offset=266, start_offset=266, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='STORE_FAST', opcode=109, arg=1, argval='dodgy', argrepr='dodgy', offset=274, start_offset=274, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=276, start_offset=276, starts_line=True, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=2, argval='Never reach this', argrepr="'Never reach this'", offset=286, start_offset=286, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=288, start_offset=288, starts_line=False, line_number=26, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=296, start_offset=296, starts_line=False, line_number=26, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=298, start_offset=298, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=300, start_offset=300, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=302, start_offset=302, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=3, argval=3, argrepr='', offset=304, start_offset=304, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=312, start_offset=312, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=314, start_offset=314, starts_line=True, line_number=28, label=10, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=324, start_offset=324, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=326, start_offset=326, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=334, start_offset=334, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_CONST', opcode=81, arg=3, argval=None, argrepr='None', offset=336, start_offset=336, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='RETURN_VALUE', opcode=35, arg=None, argval=None, argrepr='', offset=338, start_offset=338, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=True, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='WITH_EXCEPT_START', opcode=43, arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='TO_BOOL', opcode=39, arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_JUMP_IF_TRUE', opcode=100, arg=2, argval=360, argrepr='to L11', offset=352, start_offset=352, starts_line=False, line_number=25, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=356, start_offset=356, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=2, argval=2, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=25, label=11, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=364, start_offset=364, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=366, start_offset=366, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=368, start_offset=368, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=29, argval=314, argrepr='to L10', offset=370, start_offset=370, starts_line=False, line_number=25, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=372, start_offset=372, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=376, start_offset=376, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=378, start_offset=378, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=380, start_offset=380, starts_line=True, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='CHECK_EXC_MATCH', opcode=5, arg=None, argval=None, argrepr='', offset=390, start_offset=390, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='POP_JUMP_IF_FALSE', opcode=97, arg=15, argval=426, argrepr='to L12', offset=392, start_offset=392, starts_line=False, line_number=22, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00')]), + Instruction(opname='NOT_TAKEN', opcode=28, arg=None, argval=None, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=398, start_offset=398, starts_line=False, line_number=22, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=400, start_offset=400, starts_line=True, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=4, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=410, start_offset=410, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=412, start_offset=412, starts_line=False, line_number=23, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=420, start_offset=420, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='JUMP_BACKWARD_NO_INTERRUPT', opcode=75, arg=56, argval=314, argrepr='to L10', offset=424, start_offset=424, starts_line=False, line_number=23, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=426, start_offset=426, starts_line=True, line_number=22, label=12, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=428, start_offset=428, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=432, start_offset=432, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='PUSH_EXC_INFO', opcode=32, arg=None, argval=None, argrepr='', offset=434, start_offset=434, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='LOAD_GLOBAL', opcode=89, arg=3, argval='print', argrepr='print + NULL', offset=436, start_offset=436, starts_line=True, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]), + Instruction(opname='LOAD_CONST', opcode=81, arg=5, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=446, start_offset=446, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='CALL', opcode=51, arg=1, argval=1, argrepr='', offset=448, start_offset=448, starts_line=False, line_number=28, label=None, positions=None, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]), + Instruction(opname='POP_TOP', opcode=31, arg=None, argval=None, argrepr='', offset=456, start_offset=456, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=0, argval=0, argrepr='', offset=458, start_offset=458, starts_line=False, line_number=28, label=None, positions=None, cache_info=None), + Instruction(opname='COPY', opcode=59, arg=3, argval=3, argrepr='', offset=460, start_offset=460, starts_line=True, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='POP_EXCEPT', opcode=29, arg=None, argval=None, argrepr='', offset=462, start_offset=462, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), + Instruction(opname='RERAISE', opcode=102, arg=1, argval=1, argrepr='', offset=464, start_offset=464, starts_line=False, line_number=None, label=None, positions=None, cache_info=None), ] # One last piece of inspect fodder to check the default line number handling diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py index b1e165fe16b54f..a4a49298bab3be 100644 --- a/Lib/test/test_doctest/test_doctest.py +++ b/Lib/test/test_doctest/test_doctest.py @@ -2860,7 +2860,7 @@ def test_testfile(): r""" >>> _colorize.COLORIZE = save_colorize """ -class TestImporter(importlib.abc.MetaPathFinder, importlib.abc.ResourceLoader): +class TestImporter(importlib.abc.MetaPathFinder): def find_spec(self, fullname, path, target=None): return importlib.util.spec_from_file_location(fullname, path, loader=self) @@ -2869,6 +2869,12 @@ def get_data(self, path): with open(path, mode='rb') as f: return f.read() + def exec_module(self, module): + raise ImportError + + def create_module(self, spec): + return None + class TestHook: def __init__(self, pathdir): diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py index 95224e19f67ce5..d60a7039f9d4c6 100644 --- a/Lib/test/test_email/test__header_value_parser.py +++ b/Lib/test/test_email/test__header_value_parser.py @@ -3082,13 +3082,40 @@ def test_address_list_with_list_separator_after_fold(self): self._test(parser.get_address_list(to)[0], f'{a},\n =?utf-8?q?H=C3=BCbsch?= Kaktus \n') - a = '.' * 79 + a = '.' * 79 # ('.' is a special, so must be in quoted-string.) to = f'"{a}" , "Hübsch Kaktus" ' self._test(parser.get_address_list(to)[0], - f'{a}\n' + f'"{a}"\n' ' , =?utf-8?q?H=C3=BCbsch?= Kaktus ' '\n') + def test_address_list_with_specials_in_long_quoted_string(self): + # Regression for gh-80222. + policy = self.policy.clone(max_line_length=40) + cases = [ + # (to, folded) + ('"Exfiltrator (unclosed comment?" ', + '"Exfiltrator (unclosed\n' + ' comment?" \n'), + ('"Escaped \\" chars \\\\ in quoted-string stay escaped" ', + '"Escaped \\" chars \\\\ in quoted-string\n' + ' stay escaped" \n'), + ('This long display name does not need quotes ', + 'This long display name does not need\n' + ' quotes \n'), + ('"Quotes are not required but are retained here" ', + '"Quotes are not required but are\n' + ' retained here" \n'), + ('"A quoted-string, it can be a valid local-part"@example.com', + '"A quoted-string, it can be a valid\n' + ' local-part"@example.com\n'), + ('"local-part-with-specials@but-no-fws.cannot-fold"@example.com', + '"local-part-with-specials@but-no-fws.cannot-fold"@example.com\n'), + ] + for (to, folded) in cases: + with self.subTest(to=to): + self._test(parser.get_address_list(to)[0], folded, policy=policy) + # XXX Need tests with comments on various sides of a unicode token, # and with unicode tokens in the comments. Spaces inside the quotes # currently don't do the right thing. diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 1b55cd156d759d..ffb69c87a065aa 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -51,6 +51,14 @@ MAX_HASH_SEED = 4294967295 ABI_THREAD = 't' if sysconfig.get_config_var('Py_GIL_DISABLED') else '' +# PLATSTDLIB_LANDMARK copied from Modules/getpath.py +if os.name == 'nt': + PLATSTDLIB_LANDMARK = f'{sys.platlibdir}' +else: + VERSION_MAJOR = sys.version_info.major + VERSION_MINOR = sys.version_info.minor + PLATSTDLIB_LANDMARK = (f'{sys.platlibdir}/python{VERSION_MAJOR}.' + f'{VERSION_MINOR}{ABI_THREAD}/lib-dynload') # If we are running from a build dir, but the stdlib has been installed, @@ -1049,7 +1057,6 @@ def test_init_compat_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1086,7 +1093,6 @@ def test_init_python_env(self): 'use_hash_seed': True, 'hash_seed': 42, 'tracemalloc': 2, - 'perf_profiling': 0, 'import_time': True, 'code_debug_ranges': False, 'malloc_stats': True, @@ -1274,24 +1280,6 @@ def test_init_run_main(self): } self.check_all_configs("test_init_run_main", config, api=API_PYTHON) - def test_init_main(self): - code = ('import _testinternalcapi, json; ' - 'print(json.dumps(_testinternalcapi.get_configs()))') - config = { - 'argv': ['-c', 'arg2'], - 'orig_argv': ['python3', - '-c', code, - 'arg2'], - 'program_name': './python3', - 'run_command': code + '\n', - 'parse_argv': True, - '_init_main': False, - 'sys_path_0': '', - } - self.check_all_configs("test_init_main", config, - api=API_PYTHON, - stderr="Run Python code before _Py_InitializeMain") - def test_init_parse_argv(self): config = { 'parse_argv': True, @@ -1616,7 +1604,13 @@ def test_init_pyvenv_cfg(self): with self.tmpdir_with_python() as tmpdir, \ tempfile.TemporaryDirectory() as pyvenv_home: + ver = sys.version_info + base_prefix = sysconfig.get_config_var("prefix") + + # gh-128690: base_exec_prefix depends if PLATSTDLIB_LANDMARK exists + platstdlib = os.path.join(base_prefix, PLATSTDLIB_LANDMARK) + change_exec_prefix = not os.path.isdir(platstdlib) if not MS_WINDOWS: lib_dynload = os.path.join(pyvenv_home, @@ -1640,7 +1634,8 @@ def test_init_pyvenv_cfg(self): paths = self.module_search_paths() if not MS_WINDOWS: - paths[-1] = lib_dynload + if change_exec_prefix: + paths[-1] = lib_dynload else: paths = [ os.path.join(tmpdir, os.path.basename(paths[0])), @@ -1650,16 +1645,16 @@ def test_init_pyvenv_cfg(self): executable = self.test_exe base_executable = os.path.join(pyvenv_home, os.path.basename(executable)) - exec_prefix = pyvenv_home config = { - 'base_prefix': sysconfig.get_config_var("prefix"), - 'base_exec_prefix': exec_prefix, + 'base_prefix': base_prefix, 'exec_prefix': tmpdir, 'prefix': tmpdir, 'base_executable': base_executable, 'executable': executable, 'module_search_paths': paths, } + if change_exec_prefix: + config['base_exec_prefix'] = pyvenv_home if MS_WINDOWS: config['base_prefix'] = pyvenv_home config['stdlib_dir'] = os.path.join(pyvenv_home, 'Lib') @@ -1766,15 +1761,6 @@ def test_init_warnoptions(self): self.check_all_configs("test_init_warnoptions", config, preconfig, api=API_PYTHON) - def test_init_set_config(self): - config = { - '_init_main': 0, - 'bytes_warning': 2, - 'warnoptions': ['error::BytesWarning'], - } - self.check_all_configs("test_init_set_config", config, - api=API_ISOLATED) - @unittest.skipIf(support.check_bolt_optimized, "segfaults on BOLT instrumented binaries") def test_initconfig_api(self): preconfig = { @@ -1866,22 +1852,6 @@ def test_init_in_background_thread(self): self.assertEqual(err, "") -class SetConfigTests(unittest.TestCase): - def test_set_config(self): - # bpo-42260: Test _PyInterpreterState_SetConfig() - import_helper.import_module('_testcapi') - cmd = [sys.executable, '-X', 'utf8', '-I', '-m', 'test._test_embed_set_config'] - proc = subprocess.run(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding='utf-8', errors='backslashreplace') - if proc.returncode and support.verbose: - print(proc.stdout) - print(proc.stderr) - self.assertEqual(proc.returncode, 0, - (proc.returncode, proc.stdout, proc.stderr)) - - class AuditingTests(EmbeddingTestsMixin, unittest.TestCase): def test_open_code_hook(self): self.run_embedded_interpreter("test_open_code_hook") diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index e377383450e19d..582e5b6de6e687 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -2,7 +2,7 @@ import sys from codecs import BOM_UTF8 -from test import support +from test.support import force_not_colorized from test.support import os_helper from test.support import script_helper from test.support import warnings_helper @@ -44,6 +44,7 @@ def test_EOFS(self): self.assertEqual(cm.exception.text, "ä = '''thîs is ") self.assertEqual(cm.exception.offset, 5) + @force_not_colorized def test_EOFS_with_file(self): expect = ("(, line 1)") with os_helper.temp_dir() as temp_dir: @@ -123,6 +124,7 @@ def test_line_continuation_EOF(self): self.assertEqual(str(cm.exception), expect) @unittest.skipIf(not sys.executable, "sys.executable required") + @force_not_colorized def test_line_continuation_EOF_from_file_bpo2180(self): """Ensure tok_nextc() does not add too many ending newlines.""" with os_helper.temp_dir() as temp_dir: diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 206e22e791e02a..2d324827451b54 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -1465,6 +1465,7 @@ def gen(): @cpython_only @unittest.skipIf(_testcapi is None, "requires _testcapi") + @force_not_colorized def test_recursion_normalizing_infinite_exception(self): # Issue #30697. Test that a RecursionError is raised when # maximum recursion depth has been exceeded when creating @@ -2180,6 +2181,7 @@ def test_multiline_not_highlighted(self): self.assertEqual(result[-len(expected):], expected) +@support.force_not_colorized_test_class class SyntaxErrorTests(unittest.TestCase): maxDiff = None diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py index d896fec73d1971..2ab48a4778be4d 100644 --- a/Lib/test/test_external_inspection.py +++ b/Lib/test/test_external_inspection.py @@ -13,8 +13,10 @@ try: from _testexternalinspection import PROCESS_VM_READV_SUPPORTED from _testexternalinspection import get_stack_trace + from _testexternalinspection import get_async_stack_trace except ImportError: - raise unittest.SkipTest("Test only runs when _testexternalinspection is available") + raise unittest.SkipTest( + "Test only runs when _testexternalinspection is available") def _make_test_script(script_dir, script_basename, source): to_return = make_script(script_dir, script_basename, source) @@ -23,12 +25,14 @@ def _make_test_script(script_dir, script_basename, source): class TestGetStackTrace(unittest.TestCase): - @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", "Test only runs on Linux and MacOS") - @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, "Test only runs on Linux with process_vm_readv support") + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") def test_remote_stack_trace(self): # Spawn a process with some realistic Python code script = textwrap.dedent("""\ - import time, sys, os + import time, sys def bar(): for x in range(100): if x == 50: @@ -37,8 +41,8 @@ def baz(): foo() def foo(): - fifo = sys.argv[1] - with open(sys.argv[1], "w") as fifo: + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: fifo.write("ready") time.sleep(1000) @@ -74,8 +78,281 @@ def foo(): ] self.assertEqual(stack_trace, expected_stack_trace) - @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", "Test only runs on Linux and MacOS") - @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, "Test only runs on Linux with process_vm_readv support") + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + def c5(): + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c4(): + await asyncio.sleep(0) + c5() + + async def c3(): + await c4() + + async def c2(): + await c3() + + async def c1(task): + await task + + async def main(): + async with asyncio.TaskGroup() as tg: + task = tg.create_task(c2(), name="c2_root") + tg.create_task(c1(task), name="sub_main_1") + tg.create_task(c1(task), name="sub_main_2") + + def new_eager_loop(): + loop = asyncio.new_event_loop() + eager_task_factory = asyncio.create_eager_task_factory( + asyncio.Task) + loop.set_task_factory(eager_task_factory) + return loop + + asyncio.run(main(), loop_factory={TASK_FACTORY}) + """) + stack_trace = None + for task_factory_variant in "asyncio.new_event_loop", "new_eager_loop": + with ( + self.subTest(task_factory_variant=task_factory_variant), + os_helper.temp_dir() as work_dir, + ): + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script( + script_dir, 'script', + script.format(TASK_FACTORY=task_factory_variant)) + try: + p = subprocess.Popen( + [sys.executable, script_name, str(fifo)] + ) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + root_task = "Task-1" + expected_stack_trace = [ + ["c5", "c4", "c3", "c2"], + "c2_root", + [ + [["main"], root_task, []], + [["c1"], "sub_main_1", [[["main"], root_task, []]]], + [["c1"], "sub_main_2", [[["main"], root_task, []]]], + ], + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_asyncgen_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + async def gen_nested_call(): + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def gen(): + for num in range(2): + yield num + if num == 1: + await gen_nested_call() + + async def main(): + async for el in gen(): + pass + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest("Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['gen_nested_call', 'gen', 'main'], 'Task-1', [] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_gather_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio + import time + import sys + + async def deep(): + await asyncio.sleep(0) + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(0) + + async def main(): + await asyncio.gather(c1(), c2()) + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['deep', 'c1'], 'Task-2', [[['main'], 'Task-1', []]] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") + def test_async_staggered_race_remote_stack_trace(self): + # Spawn a process with some realistic Python code + script = textwrap.dedent("""\ + import asyncio.staggered + import time + import sys + + async def deep(): + await asyncio.sleep(0) + fifo_path = sys.argv[1] + with open(fifo_path, "w") as fifo: + fifo.write("ready") + time.sleep(10000) + + async def c1(): + await asyncio.sleep(0) + await deep() + + async def c2(): + await asyncio.sleep(10000) + + async def main(): + await asyncio.staggered.staggered_race( + [c1, c2], + delay=None, + ) + + asyncio.run(main()) + """) + stack_trace = None + with os_helper.temp_dir() as work_dir: + script_dir = os.path.join(work_dir, "script_pkg") + os.mkdir(script_dir) + fifo = f"{work_dir}/the_fifo" + os.mkfifo(fifo) + script_name = _make_test_script(script_dir, 'script', script) + try: + p = subprocess.Popen([sys.executable, script_name, str(fifo)]) + with open(fifo, "r") as fifo_file: + response = fifo_file.read() + self.assertEqual(response, "ready") + stack_trace = get_async_stack_trace(p.pid) + except PermissionError: + self.skipTest( + "Insufficient permissions to read the stack trace") + finally: + os.remove(fifo) + p.kill() + p.terminate() + p.wait(timeout=SHORT_TIMEOUT) + + # sets are unordered, so we want to sort "awaited_by"s + stack_trace[2].sort(key=lambda x: x[1]) + + expected_stack_trace = [ + ['deep', 'c1', 'run_one_coro'], 'Task-2', [[['main'], 'Task-1', []]] + ] + self.assertEqual(stack_trace, expected_stack_trace) + + @unittest.skipIf(sys.platform != "darwin" and sys.platform != "linux", + "Test only runs on Linux and MacOS") + @unittest.skipIf(sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED, + "Test only runs on Linux with process_vm_readv support") def test_self_trace(self): stack_trace = get_stack_trace(os.getpid()) self.assertEqual(stack_trace[0], "test_self_trace") diff --git a/Lib/test/test_frame.py b/Lib/test/test_frame.py index 11f191700ccef0..7bd13eada8fedf 100644 --- a/Lib/test/test_frame.py +++ b/Lib/test/test_frame.py @@ -222,6 +222,56 @@ def test_f_lineno_del_segfault(self): with self.assertRaises(AttributeError): del f.f_lineno + def test_f_generator(self): + # Test f_generator in different contexts. + + def t0(): + def nested(): + frame = sys._getframe() + return frame.f_generator + + def gen(): + yield nested() + + g = gen() + try: + return next(g) + finally: + g.close() + + def t1(): + frame = sys._getframe() + return frame.f_generator + + def t2(): + frame = sys._getframe() + yield frame.f_generator + + async def t3(): + frame = sys._getframe() + return frame.f_generator + + # For regular functions f_generator is None + self.assertIsNone(t0()) + self.assertIsNone(t1()) + + # For generators f_generator is equal to self + g = t2() + try: + frame_g = next(g) + self.assertIs(g, frame_g) + finally: + g.close() + + # Ditto for coroutines + c = t3() + try: + c.send(None) + except StopIteration as ex: + self.assertIs(ex.value, c) + else: + raise AssertionError('coroutine did not exit') + class ReprTest(unittest.TestCase): """ diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index c359f2ecce01f1..1d96b7a2c2459b 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -1649,6 +1649,14 @@ def __repr__(self): #self.assertEqual(f'X{x =}Y', 'Xx\t='+repr(x)+'Y') #self.assertEqual(f'X{x = }Y', 'Xx\t=\t'+repr(x)+'Y') + def test_debug_expressions_are_raw_strings(self): + + self.assertEqual(f'{b"\N{OX}"=}', 'b"\\N{OX}"=b\'\\\\N{OX}\'') + self.assertEqual(f'{r"\xff"=}', 'r"\\xff"=\'\\\\xff\'') + self.assertEqual(f'{r"\n"=}', 'r"\\n"=\'\\\\n\'') + self.assertEqual(f"{'\''=}", "'\\''=\"'\"") + self.assertEqual(f'{'\xc5'=}', r"'\xc5'='Å'") + def test_walrus(self): x = 20 # This isn't an assignment expression, it's 'x', with a format @@ -1757,5 +1765,23 @@ def get_code(s): for s in ["", "some string"]: self.assertEqual(get_code(f"'{s}'"), get_code(f"f'{s}'")) + def test_gh129093(self): + self.assertEqual(f'{1==2=}', '1==2=False') + self.assertEqual(f'{1 == 2=}', '1 == 2=False') + self.assertEqual(f'{1!=2=}', '1!=2=True') + self.assertEqual(f'{1 != 2=}', '1 != 2=True') + + self.assertEqual(f'{(1) != 2=}', '(1) != 2=True') + self.assertEqual(f'{(1*2) != (3)=}', '(1*2) != (3)=True') + + self.assertEqual(f'{1 != 2 == 3 != 4=}', '1 != 2 == 3 != 4=False') + self.assertEqual(f'{1 == 2 != 3 == 4=}', '1 == 2 != 3 == 4=False') + + self.assertEqual(f'{f'{1==2=}'=}', "f'{1==2=}'='1==2=False'") + self.assertEqual(f'{f'{1 == 2=}'=}', "f'{1 == 2=}'='1 == 2=False'") + self.assertEqual(f'{f'{1!=2=}'=}', "f'{1!=2=}'='1!=2=True'") + self.assertEqual(f'{f'{1 != 2=}'=}', "f'{1 != 2=}'='1 != 2=True'") + + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index 32224866082824..4beb4380c3ad6b 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -645,11 +645,11 @@ def test_bound_method_introspection(self): def test_unbound_method_retrieval(self): obj = self.A - self.assertFalse(hasattr(obj.both, "__self__")) - self.assertFalse(hasattr(obj.nested, "__self__")) - self.assertFalse(hasattr(obj.over_partial, "__self__")) - self.assertFalse(hasattr(obj.static, "__self__")) - self.assertFalse(hasattr(self.a.static, "__self__")) + self.assertNotHasAttr(obj.both, "__self__") + self.assertNotHasAttr(obj.nested, "__self__") + self.assertNotHasAttr(obj.over_partial, "__self__") + self.assertNotHasAttr(obj.static, "__self__") + self.assertNotHasAttr(self.a.static, "__self__") def test_descriptors(self): for obj in [self.A, self.a]: @@ -791,7 +791,7 @@ def wrapper(): self.assertNotEqual(wrapper.__qualname__, f.__qualname__) self.assertEqual(wrapper.__doc__, None) self.assertEqual(wrapper.__annotations__, {}) - self.assertFalse(hasattr(wrapper, 'attr')) + self.assertNotHasAttr(wrapper, 'attr') def test_selective_update(self): def f(): @@ -840,7 +840,7 @@ def wrapper(): pass functools.update_wrapper(wrapper, max) self.assertEqual(wrapper.__name__, 'max') - self.assertTrue(wrapper.__doc__.startswith('max(')) + self.assertStartsWith(wrapper.__doc__, 'max(') self.assertEqual(wrapper.__annotations__, {}) def test_update_type_wrapper(self): @@ -910,7 +910,7 @@ def wrapper(): self.assertEqual(wrapper.__name__, 'wrapper') self.assertNotEqual(wrapper.__qualname__, f.__qualname__) self.assertEqual(wrapper.__doc__, None) - self.assertFalse(hasattr(wrapper, 'attr')) + self.assertNotHasAttr(wrapper, 'attr') def test_selective_update(self): def f(): @@ -2666,15 +2666,15 @@ def _(self, arg): a.t(0) self.assertEqual(a.arg, "int") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') a.t('') self.assertEqual(a.arg, "str") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') a.t(0.0) self.assertEqual(a.arg, "base") aa = A() - self.assertFalse(hasattr(aa, 'arg')) + self.assertNotHasAttr(aa, 'arg') def test_staticmethod_register(self): class A: @@ -3036,16 +3036,16 @@ def i(arg): @i.register(42) def _(arg): return "I annotated with a non-type" - self.assertTrue(str(exc.exception).startswith(msg_prefix + "42")) - self.assertTrue(str(exc.exception).endswith(msg_suffix)) + self.assertStartsWith(str(exc.exception), msg_prefix + "42") + self.assertEndsWith(str(exc.exception), msg_suffix) with self.assertRaises(TypeError) as exc: @i.register def _(arg): return "I forgot to annotate" - self.assertTrue(str(exc.exception).startswith(msg_prefix + + self.assertStartsWith(str(exc.exception), msg_prefix + "._" - )) - self.assertTrue(str(exc.exception).endswith(msg_suffix)) + ) + self.assertEndsWith(str(exc.exception), msg_suffix) with self.assertRaises(TypeError) as exc: @i.register @@ -3055,23 +3055,23 @@ def _(arg: typing.Iterable[str]): # types from `typing`. Instead, annotate with regular types # or ABCs. return "I annotated with a generic collection" - self.assertTrue(str(exc.exception).startswith( + self.assertStartsWith(str(exc.exception), "Invalid annotation for 'arg'." - )) - self.assertTrue(str(exc.exception).endswith( + ) + self.assertEndsWith(str(exc.exception), 'typing.Iterable[str] is not a class.' - )) + ) with self.assertRaises(TypeError) as exc: @i.register def _(arg: typing.Union[int, typing.Iterable[str]]): return "Invalid Union" - self.assertTrue(str(exc.exception).startswith( + self.assertStartsWith(str(exc.exception), "Invalid annotation for 'arg'." - )) - self.assertTrue(str(exc.exception).endswith( + ) + self.assertEndsWith(str(exc.exception), 'typing.Union[int, typing.Iterable[str]] not all arguments are classes.' - )) + ) def test_invalid_positional_argument(self): @functools.singledispatch diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py index 7a50a29bb0126c..bb78dd9af83b62 100644 --- a/Lib/test/test_generated_cases.py +++ b/Lib/test/test_generated_cases.py @@ -281,12 +281,12 @@ def run_cases_test(self, input: str, expected: str): ) with open(self.temp_output_filename) as temp_output: - lines = temp_output.readlines() - while lines and lines[0].startswith(("// ", "#", " #", "\n")): - lines.pop(0) - while lines and lines[-1].startswith(("#", "\n")): - lines.pop(-1) - actual = "".join(lines) + lines = temp_output.read() + _, rest = lines.split(tier1_generator.INSTRUCTION_START_MARKER) + instructions, labels_with_prelude_and_postlude = rest.split(tier1_generator.INSTRUCTION_END_MARKER) + _, labels_with_postlude = labels_with_prelude_and_postlude.split(tier1_generator.LABEL_START_MARKER) + labels, _ = labels_with_postlude.split(tier1_generator.LABEL_END_MARKER) + actual = instructions + labels # if actual.strip() != expected.strip(): # print("Actual:") # print(actual) @@ -445,7 +445,7 @@ def test_predictions(self): frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(OP1); - PREDICTED(OP1); + PREDICTED_OP1:; _PyStackRef res; res = Py_None; stack_pointer[-1] = res; @@ -679,7 +679,7 @@ def test_macro_instruction(self): frame->instr_ptr = next_instr; next_instr += 6; INSTRUCTION_STATS(OP); - PREDICTED(OP); + PREDICTED_OP:; _Py_CODEUNIT* const this_instr = next_instr - 6; (void)this_instr; _PyStackRef left; @@ -1756,6 +1756,61 @@ def test_kill_in_wrong_order(self): with self.assertRaises(SyntaxError): self.run_cases_test(input, "") + def test_complex_label(self): + input = """ + label(my_label) { + // Comment + do_thing() + if (complex) { + goto other_label; + } + goto other_label2; + } + """ + + output = """ + my_label: + { + // Comment + do_thing() + if (complex) { + goto other_label; + } + goto other_label2; + } + """ + self.run_cases_test(input, output) + + def test_multiple_labels(self): + input = """ + label(my_label_1) { + // Comment + do_thing1(); + goto my_label_2; + } + + label(my_label_2) { + // Comment + do_thing2(); + goto my_label_3; + } + """ + + output = """ + my_label_1: + { + // Comment + do_thing1(); + goto my_label_2; + } + + my_label_2: + { + // Comment + do_thing2(); + goto my_label_3; + } + """ class TestGeneratedAbstractCases(unittest.TestCase): def setUp(self) -> None: @@ -1842,8 +1897,8 @@ def test_overridden_abstract_args(self): """ output = """ case OP: { - _Py_UopsSymbol *arg1; - _Py_UopsSymbol *out; + JitOptSymbol *arg1; + JitOptSymbol *out; arg1 = stack_pointer[-1]; out = EGGS(arg1); stack_pointer[-1] = out; @@ -1851,7 +1906,7 @@ def test_overridden_abstract_args(self): } case OP2: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = sym_new_not_null(ctx); stack_pointer[-1] = out; break; @@ -1876,14 +1931,14 @@ def test_no_overridden_case(self): """ output = """ case OP: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = sym_new_not_null(ctx); stack_pointer[-1] = out; break; } case OP2: { - _Py_UopsSymbol *out; + JitOptSymbol *out; out = NULL; stack_pointer[-1] = out; break; diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py index 2ea6dba12effc1..b6985054c33d10 100644 --- a/Lib/test/test_generators.py +++ b/Lib/test/test_generators.py @@ -652,6 +652,89 @@ def genfn(): self.assertIsNone(f_wr()) +# See https://github.com/python/cpython/issues/125723 +class GeneratorDeallocTest(unittest.TestCase): + def test_frame_outlives_generator(self): + def g1(): + a = 42 + yield sys._getframe() + + def g2(): + a = 42 + yield + + def g3(obj): + a = 42 + obj.frame = sys._getframe() + yield + + class ObjectWithFrame(): + def __init__(self): + self.frame = None + + def get_frame(index): + if index == 1: + return next(g1()) + elif index == 2: + gen = g2() + next(gen) + return gen.gi_frame + elif index == 3: + obj = ObjectWithFrame() + next(g3(obj)) + return obj.frame + else: + return None + + for index in (1, 2, 3): + with self.subTest(index=index): + frame = get_frame(index) + frame_locals = frame.f_locals + self.assertIn('a', frame_locals) + self.assertEqual(frame_locals['a'], 42) + + def test_frame_locals_outlive_generator(self): + frame_locals1 = None + + def g1(): + nonlocal frame_locals1 + frame_locals1 = sys._getframe().f_locals + a = 42 + yield + + def g2(): + a = 42 + yield sys._getframe().f_locals + + def get_frame_locals(index): + if index == 1: + nonlocal frame_locals1 + next(g1()) + return frame_locals1 + if index == 2: + return next(g2()) + else: + return None + + for index in (1, 2): + with self.subTest(index=index): + frame_locals = get_frame_locals(index) + self.assertIn('a', frame_locals) + self.assertEqual(frame_locals['a'], 42) + + def test_frame_locals_outlive_generator_with_exec(self): + def g(): + a = 42 + yield locals(), sys._getframe().f_locals + + locals_ = {'g': g} + for i in range(10): + exec("snapshot, live_locals = next(g())", locals=locals_) + for l in (locals_['snapshot'], locals_['live_locals']): + self.assertIn('a', l) + self.assertEqual(l['a'], 42) + + class GeneratorThrowTest(unittest.TestCase): def test_exception_context_with_yield(self): diff --git a/Lib/test/test_http_cookies.py b/Lib/test/test_http_cookies.py index 7b3dc0fdaedc3b..d945de23493f20 100644 --- a/Lib/test/test_http_cookies.py +++ b/Lib/test/test_http_cookies.py @@ -205,6 +205,14 @@ def test_set_secure_httponly_attrs(self): self.assertEqual(C.output(), 'Set-Cookie: Customer="WILE_E_COYOTE"; HttpOnly; Secure') + def test_set_secure_httponly_partitioned_attrs(self): + C = cookies.SimpleCookie('Customer="WILE_E_COYOTE"') + C['Customer']['secure'] = True + C['Customer']['httponly'] = True + C['Customer']['partitioned'] = True + self.assertEqual(C.output(), + 'Set-Cookie: Customer="WILE_E_COYOTE"; HttpOnly; Partitioned; Secure') + def test_samesite_attrs(self): samesite_values = ['Strict', 'Lax', 'strict', 'lax'] for val in samesite_values: diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index c2cec6444cb43a..207b7ae7517450 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -29,9 +29,21 @@ from test.support import os_helper from test.support import ( - STDLIB_DIR, swap_attr, swap_item, cpython_only, is_apple_mobile, is_emscripten, - is_wasi, run_in_subinterp, run_in_subinterp_with_config, Py_TRACE_REFS, - requires_gil_enabled, Py_GIL_DISABLED, no_rerun) + STDLIB_DIR, + swap_attr, + swap_item, + cpython_only, + is_apple_mobile, + is_emscripten, + is_wasi, + run_in_subinterp, + run_in_subinterp_with_config, + Py_TRACE_REFS, + requires_gil_enabled, + Py_GIL_DISABLED, + no_rerun, + force_not_colorized_test_class, +) from test.support.import_helper import ( forget, make_legacy_pyc, unlink, unload, ready_to_import, DirsOnSysPath, CleanImport, import_module) @@ -333,6 +345,7 @@ def _from_subinterp(cls, name, interpid, pipe, script_kwargs): return cls.parse(text.decode()) +@force_not_colorized_test_class class ImportTests(unittest.TestCase): def setUp(self): @@ -539,7 +552,7 @@ def test_import_name_binding(self): import test as x import test.support self.assertIs(x, test, x.__name__) - self.assertTrue(hasattr(test.support, "__file__")) + self.assertHasAttr(test.support, "__file__") # import x.y.z as w binds z as w import test.support as y @@ -610,7 +623,7 @@ def test_file_to_source(self): sys.path.insert(0, os.curdir) try: mod = __import__(TESTFN) - self.assertTrue(mod.__file__.endswith('.py')) + self.assertEndsWith(mod.__file__, '.py') os.remove(source) del sys.modules[TESTFN] make_legacy_pyc(source) @@ -1443,7 +1456,7 @@ def test_UNC_path(self): self.fail("could not import 'test_unc_path' from %r: %r" % (unc, e)) self.assertEqual(mod.testdata, 'test_unc_path') - self.assertTrue(mod.__file__.startswith(unc), mod.__file__) + self.assertStartsWith(mod.__file__, unc) unload("test_unc_path") @@ -1456,7 +1469,7 @@ def tearDown(self): def test_relimport_star(self): # This will import * from .test_import. from .. import relimport - self.assertTrue(hasattr(relimport, "RelativeImportTests")) + self.assertHasAttr(relimport, "RelativeImportTests") def test_issue3221(self): # Note for mergers: the 'absolute' tests from the 2.x branch @@ -1786,7 +1799,7 @@ def test_frozen_importlib_is_bootstrap(self): self.assertIs(mod, _bootstrap) self.assertEqual(mod.__name__, 'importlib._bootstrap') self.assertEqual(mod.__package__, 'importlib') - self.assertTrue(mod.__file__.endswith('_bootstrap.py'), mod.__file__) + self.assertEndsWith(mod.__file__, '_bootstrap.py') def test_frozen_importlib_external_is_bootstrap_external(self): from importlib import _bootstrap_external @@ -1794,7 +1807,7 @@ def test_frozen_importlib_external_is_bootstrap_external(self): self.assertIs(mod, _bootstrap_external) self.assertEqual(mod.__name__, 'importlib._bootstrap_external') self.assertEqual(mod.__package__, 'importlib') - self.assertTrue(mod.__file__.endswith('_bootstrap_external.py'), mod.__file__) + self.assertEndsWith(mod.__file__, '_bootstrap_external.py') def test_there_can_be_only_one(self): # Issue #15386 revealed a tricky loophole in the bootstrapping @@ -2800,7 +2813,7 @@ def check_common(self, loaded): self.assertEqual(mod.__file__, self.FILE) self.assertEqual(mod.__spec__.origin, self.ORIGIN) if not isolated: - self.assertTrue(issubclass(mod.error, Exception)) + self.assertIsSubclass(mod.error, Exception) self.assertEqual(mod.int_const, 1969) self.assertEqual(mod.str_const, 'something different') self.assertIsInstance(mod._module_initialized, float) @@ -3311,30 +3324,6 @@ def test_basic_multiple_interpreters_reset_each(self): # * module's global state was initialized, not reset -@cpython_only -class CAPITests(unittest.TestCase): - def test_pyimport_addmodule(self): - # gh-105922: Test PyImport_AddModuleRef(), PyImport_AddModule() - # and PyImport_AddModuleObject() - _testcapi = import_module("_testcapi") - for name in ( - 'sys', # frozen module - 'test', # package - __name__, # package.module - ): - _testcapi.check_pyimport_addmodule(name) - - def test_pyimport_addmodule_create(self): - # gh-105922: Test PyImport_AddModuleRef(), create a new module - _testcapi = import_module("_testcapi") - name = 'dontexist' - self.assertNotIn(name, sys.modules) - self.addCleanup(unload, name) - - mod = _testcapi.check_pyimport_addmodule(name) - self.assertIs(mod, sys.modules[name]) - - @cpython_only class TestMagicNumber(unittest.TestCase): def test_magic_number_endianness(self): diff --git a/Lib/test/test_importlib/extension/test_path_hook.py b/Lib/test/test_importlib/extension/test_path_hook.py index 314a635c77e082..941dcd5432ce46 100644 --- a/Lib/test/test_importlib/extension/test_path_hook.py +++ b/Lib/test/test_importlib/extension/test_path_hook.py @@ -21,7 +21,7 @@ def hook(self, entry): def test_success(self): # Path hook should handle a directory where a known extension module # exists. - self.assertTrue(hasattr(self.hook(util.EXTENSIONS.path), 'find_spec')) + self.assertHasAttr(self.hook(util.EXTENSIONS.path), 'find_spec') (Frozen_PathHooksTests, diff --git a/Lib/test/test_importlib/frozen/test_loader.py b/Lib/test/test_importlib/frozen/test_loader.py index 1112c0664ad477..c808bb73291a7c 100644 --- a/Lib/test/test_importlib/frozen/test_loader.py +++ b/Lib/test/test_importlib/frozen/test_loader.py @@ -61,7 +61,7 @@ def exec_module(self, name, origname=None): module.main() self.assertTrue(module.initialized) - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') self.assertEqual(module.__spec__.origin, 'frozen') return module, stdout.getvalue() @@ -72,7 +72,7 @@ def test_module(self): for attr, value in check.items(): self.assertEqual(getattr(module, attr), value) self.assertEqual(output, 'Hello world!\n') - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') self.assertEqual(module.__spec__.loader_state.origname, name) def test_package(self): @@ -136,7 +136,7 @@ def test_get_code(self): exec(code, mod.__dict__) with captured_stdout() as stdout: mod.main() - self.assertTrue(hasattr(mod, 'initialized')) + self.assertHasAttr(mod, 'initialized') self.assertEqual(stdout.getvalue(), 'Hello world!\n') def test_get_source(self): diff --git a/Lib/test/test_importlib/import_/test_caching.py b/Lib/test/test_importlib/import_/test_caching.py index aedf0fd4f9db02..718e7d041b0860 100644 --- a/Lib/test/test_importlib/import_/test_caching.py +++ b/Lib/test/test_importlib/import_/test_caching.py @@ -78,7 +78,7 @@ def test_using_cache_for_assigning_to_attribute(self): with self.create_mock('pkg.__init__', 'pkg.module') as importer: with util.import_state(meta_path=[importer]): module = self.__import__('pkg.module') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(id(module.module), id(sys.modules['pkg.module'])) @@ -88,7 +88,7 @@ def test_using_cache_for_fromlist(self): with self.create_mock('pkg.__init__', 'pkg.module') as importer: with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['module']) - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(id(module.module), id(sys.modules['pkg.module'])) diff --git a/Lib/test/test_importlib/import_/test_fromlist.py b/Lib/test/test_importlib/import_/test_fromlist.py index 4b4b9bc3f5e04a..feccc7be09a98c 100644 --- a/Lib/test/test_importlib/import_/test_fromlist.py +++ b/Lib/test/test_importlib/import_/test_fromlist.py @@ -63,7 +63,7 @@ def test_nonexistent_object(self): with util.import_state(meta_path=[importer]): module = self.__import__('module', fromlist=['non_existent']) self.assertEqual(module.__name__, 'module') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_module_from_package(self): # [module] @@ -71,7 +71,7 @@ def test_module_from_package(self): with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['module']) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.__name__, 'pkg.module') def test_nonexistent_from_package(self): @@ -79,7 +79,7 @@ def test_nonexistent_from_package(self): with util.import_state(meta_path=[importer]): module = self.__import__('pkg', fromlist=['non_existent']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_module_from_package_triggers_ModuleNotFoundError(self): # If a submodule causes an ModuleNotFoundError because it tries @@ -107,7 +107,7 @@ def basic_star_test(self, fromlist=['*']): mock['pkg'].__all__ = ['module'] module = self.__import__('pkg', fromlist=fromlist) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.__name__, 'pkg.module') def test_using_star(self): @@ -125,8 +125,8 @@ def test_star_with_others(self): mock['pkg'].__all__ = ['module1'] module = self.__import__('pkg', fromlist=['module2', '*']) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module1')) - self.assertTrue(hasattr(module, 'module2')) + self.assertHasAttr(module, 'module1') + self.assertHasAttr(module, 'module2') self.assertEqual(module.module1.__name__, 'pkg.module1') self.assertEqual(module.module2.__name__, 'pkg.module2') @@ -136,7 +136,7 @@ def test_nonexistent_in_all(self): importer['pkg'].__all__ = ['non_existent'] module = self.__import__('pkg', fromlist=['*']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, 'non_existent')) + self.assertNotHasAttr(module, 'non_existent') def test_star_in_all(self): with util.mock_spec('pkg.__init__') as importer: @@ -144,7 +144,7 @@ def test_star_in_all(self): importer['pkg'].__all__ = ['*'] module = self.__import__('pkg', fromlist=['*']) self.assertEqual(module.__name__, 'pkg') - self.assertFalse(hasattr(module, '*')) + self.assertNotHasAttr(module, '*') def test_invalid_type(self): with util.mock_spec('pkg.__init__') as importer: diff --git a/Lib/test/test_importlib/import_/test_meta_path.py b/Lib/test/test_importlib/import_/test_meta_path.py index 8689017ba43112..4c00f60681acf1 100644 --- a/Lib/test/test_importlib/import_/test_meta_path.py +++ b/Lib/test/test_importlib/import_/test_meta_path.py @@ -43,7 +43,7 @@ def test_empty(self): self.assertIsNone(importlib._bootstrap._find_spec('nothing', None)) self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, ImportWarning)) + self.assertIsSubclass(w[-1].category, ImportWarning) (Frozen_CallingOrder, diff --git a/Lib/test/test_importlib/import_/test_path.py b/Lib/test/test_importlib/import_/test_path.py index 89b52fbd1e1aff..51ff6115e1281e 100644 --- a/Lib/test/test_importlib/import_/test_path.py +++ b/Lib/test/test_importlib/import_/test_path.py @@ -1,3 +1,4 @@ +from test.support import os_helper from test.test_importlib import util importlib = util.import_importlib('importlib') @@ -80,7 +81,7 @@ def test_empty_path_hooks(self): self.assertIsNone(self.find('os')) self.assertIsNone(sys.path_importer_cache[path_entry]) self.assertEqual(len(w), 1) - self.assertTrue(issubclass(w[-1].category, ImportWarning)) + self.assertIsSubclass(w[-1].category, ImportWarning) def test_path_importer_cache_empty_string(self): # The empty string should create a finder using the cwd. @@ -153,6 +154,28 @@ def test_deleted_cwd(self): # Do not want FileNotFoundError raised. self.assertIsNone(self.machinery.PathFinder.find_spec('whatever')) + @os_helper.skip_unless_working_chmod + def test_permission_error_cwd(self): + # gh-115911: Test that an unreadable CWD does not break imports, in + # particular during early stages of interpreter startup. + with ( + os_helper.temp_dir() as new_dir, + os_helper.save_mode(new_dir), + os_helper.change_cwd(new_dir), + util.import_state(path=['']), + ): + # chmod() is done here (inside the 'with' block) because the order + # of teardown operations cannot be the reverse of setup order. See + # https://github.com/python/cpython/pull/116131#discussion_r1739649390 + try: + os.chmod(new_dir, 0o000) + except OSError: + self.skipTest("platform does not allow " + "changing mode of the cwd") + + # Do not want PermissionError raised. + self.assertIsNone(self.machinery.PathFinder.find_spec('whatever')) + def test_invalidate_caches_finders(self): # Finders with an invalidate_caches() method have it called. class FakeFinder: diff --git a/Lib/test/test_importlib/import_/test_relative_imports.py b/Lib/test/test_importlib/import_/test_relative_imports.py index 99c24f1fd9487c..e535d119763148 100644 --- a/Lib/test/test_importlib/import_/test_relative_imports.py +++ b/Lib/test/test_importlib/import_/test_relative_imports.py @@ -81,7 +81,7 @@ def callback(global_): self.__import__('pkg') # For __import__(). module = self.__import__('', global_, fromlist=['mod2'], level=1) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'mod2')) + self.assertHasAttr(module, 'mod2') self.assertEqual(module.mod2.attr, 'pkg.mod2') self.relative_import_test(create, globals_, callback) @@ -107,7 +107,7 @@ def callback(global_): module = self.__import__('', global_, fromlist=['module'], level=1) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'module')) + self.assertHasAttr(module, 'module') self.assertEqual(module.module.attr, 'pkg.module') self.relative_import_test(create, globals_, callback) @@ -131,7 +131,7 @@ def callback(global_): module = self.__import__('', global_, fromlist=['subpkg2'], level=2) self.assertEqual(module.__name__, 'pkg') - self.assertTrue(hasattr(module, 'subpkg2')) + self.assertHasAttr(module, 'subpkg2') self.assertEqual(module.subpkg2.attr, 'pkg.subpkg2.__init__') self.relative_import_test(create, globals_, callback) diff --git a/Lib/test/test_importlib/resources/_path.py b/Lib/test/test_importlib/resources/_path.py index 1f97c96146960d..b144628cb73c77 100644 --- a/Lib/test/test_importlib/resources/_path.py +++ b/Lib/test/test_importlib/resources/_path.py @@ -2,15 +2,44 @@ import functools from typing import Dict, Union +from typing import runtime_checkable +from typing import Protocol #### -# from jaraco.path 3.4.1 +# from jaraco.path 3.7.1 -FilesSpec = Dict[str, Union[str, bytes, 'FilesSpec']] # type: ignore +class Symlink(str): + """ + A string indicating the target of a symlink. + """ + + +FilesSpec = Dict[str, Union[str, bytes, Symlink, 'FilesSpec']] + + +@runtime_checkable +class TreeMaker(Protocol): + def __truediv__(self, *args, **kwargs): ... # pragma: no cover + + def mkdir(self, **kwargs): ... # pragma: no cover + + def write_text(self, content, **kwargs): ... # pragma: no cover + + def write_bytes(self, content): ... # pragma: no cover -def build(spec: FilesSpec, prefix=pathlib.Path()): + def symlink_to(self, target): ... # pragma: no cover + + +def _ensure_tree_maker(obj: Union[str, TreeMaker]) -> TreeMaker: + return obj if isinstance(obj, TreeMaker) else pathlib.Path(obj) # type: ignore[return-value] + + +def build( + spec: FilesSpec, + prefix: Union[str, TreeMaker] = pathlib.Path(), # type: ignore[assignment] +): """ Build a set of files/directories, as described by the spec. @@ -25,21 +54,25 @@ def build(spec: FilesSpec, prefix=pathlib.Path()): ... "__init__.py": "", ... }, ... "baz.py": "# Some code", - ... } + ... "bar.py": Symlink("baz.py"), + ... }, + ... "bing": Symlink("foo"), ... } >>> target = getfixture('tmp_path') >>> build(spec, target) >>> target.joinpath('foo/baz.py').read_text(encoding='utf-8') '# Some code' + >>> target.joinpath('bing/bar.py').read_text(encoding='utf-8') + '# Some code' """ for name, contents in spec.items(): - create(contents, pathlib.Path(prefix) / name) + create(contents, _ensure_tree_maker(prefix) / name) @functools.singledispatch def create(content: Union[str, bytes, FilesSpec], path): path.mkdir(exist_ok=True) - build(content, prefix=path) # type: ignore + build(content, prefix=path) # type: ignore[arg-type] @create.register @@ -52,5 +85,10 @@ def _(content: str, path): path.write_text(content, encoding='utf-8') +@create.register +def _(content: Symlink, path): + path.symlink_to(content) + + # end from jaraco.path #### diff --git a/Lib/test/test_importlib/resources/test_files.py b/Lib/test/test_importlib/resources/test_files.py index 933894dce2c045..db8a4e62a32dc6 100644 --- a/Lib/test/test_importlib/resources/test_files.py +++ b/Lib/test/test_importlib/resources/test_files.py @@ -60,6 +60,26 @@ class OpenZipTests(FilesTests, util.ZipSetup, unittest.TestCase): class OpenNamespaceTests(FilesTests, util.DiskSetup, unittest.TestCase): MODULE = 'namespacedata01' + def test_non_paths_in_dunder_path(self): + """ + Non-path items in a namespace package's ``__path__`` are ignored. + + As reported in python/importlib_resources#311, some tools + like Setuptools, when creating editable packages, will inject + non-paths into a namespace package's ``__path__``, a + sentinel like + ``__editable__.sample_namespace-1.0.finder.__path_hook__`` + to cause the ``PathEntryFinder`` to be called when searching + for packages. In that case, resources should still be loadable. + """ + import namespacedata01 + + namespacedata01.__path__.append( + '__editable__.sample_namespace-1.0.finder.__path_hook__' + ) + + resources.files(namespacedata01) + class OpenNamespaceZipTests(FilesTests, util.ZipSetup, unittest.TestCase): ZIP_MODULE = 'namespacedata01' @@ -86,7 +106,7 @@ def test_module_resources(self): """ A module can have resources found adjacent to the module. """ - import mod + import mod # type: ignore[import-not-found] actual = resources.files(mod).joinpath('res.txt').read_text(encoding='utf-8') assert actual == self.spec['res.txt'] diff --git a/Lib/test/test_importlib/resources/test_path.py b/Lib/test/test_importlib/resources/test_path.py index 378dc7a2baeb23..903911f57b3306 100644 --- a/Lib/test/test_importlib/resources/test_path.py +++ b/Lib/test/test_importlib/resources/test_path.py @@ -20,7 +20,7 @@ def test_reading(self): target = resources.files(self.data) / 'utf-8.file' with resources.as_file(target) as path: self.assertIsInstance(path, pathlib.Path) - self.assertTrue(path.name.endswith("utf-8.file"), repr(path)) + self.assertEndsWith(path.name, "utf-8.file") self.assertEqual('Hello, UTF-8 world!\n', path.read_text(encoding='utf-8')) diff --git a/Lib/test/test_importlib/source/test_finder.py b/Lib/test/test_importlib/source/test_finder.py index 8c06c4da1f5cba..4de736a6bf3b2d 100644 --- a/Lib/test/test_importlib/source/test_finder.py +++ b/Lib/test/test_importlib/source/test_finder.py @@ -73,7 +73,7 @@ def run_test(self, test, create=None, *, compile_=None, unlink=None): if error.errno != errno.ENOENT: raise loader = self.import_(mapping['.root'], test) - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') return loader def test_module(self): @@ -100,7 +100,7 @@ def test_module_in_package(self): with util.create_modules('pkg.__init__', 'pkg.sub') as mapping: pkg_dir = os.path.dirname(mapping['pkg.__init__']) loader = self.import_(pkg_dir, 'pkg.sub') - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') # [sub package] def test_package_in_package(self): @@ -108,7 +108,7 @@ def test_package_in_package(self): with context as mapping: pkg_dir = os.path.dirname(mapping['pkg.__init__']) loader = self.import_(pkg_dir, 'pkg.sub') - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') # [package over modules] def test_package_over_module(self): @@ -129,7 +129,7 @@ def test_empty_string_for_dir(self): file.write("# test file for importlib") try: loader = self._find(finder, 'mod', loader_only=True) - self.assertTrue(hasattr(loader, 'load_module')) + self.assertHasAttr(loader, 'load_module') finally: os.unlink('mod.py') diff --git a/Lib/test/test_importlib/source/test_path_hook.py b/Lib/test/test_importlib/source/test_path_hook.py index f274330e0b333b..6e1c23e6a9842b 100644 --- a/Lib/test/test_importlib/source/test_path_hook.py +++ b/Lib/test/test_importlib/source/test_path_hook.py @@ -15,12 +15,12 @@ def path_hook(self): def test_success(self): with util.create_modules('dummy') as mapping: - self.assertTrue(hasattr(self.path_hook()(mapping['.root']), - 'find_spec')) + self.assertHasAttr(self.path_hook()(mapping['.root']), + 'find_spec') def test_empty_string(self): # The empty string represents the cwd. - self.assertTrue(hasattr(self.path_hook()(''), 'find_spec')) + self.assertHasAttr(self.path_hook()(''), 'find_spec') (Frozen_PathHookTest, diff --git a/Lib/test/test_importlib/test_abc.py b/Lib/test/test_importlib/test_abc.py index 00af2dd712425a..b1ab52f966ffdb 100644 --- a/Lib/test/test_importlib/test_abc.py +++ b/Lib/test/test_importlib/test_abc.py @@ -43,14 +43,12 @@ def setUp(self): def test_subclasses(self): # Test that the expected subclasses inherit. for subclass in self.subclasses: - self.assertTrue(issubclass(subclass, self.__test), - "{0} is not a subclass of {1}".format(subclass, self.__test)) + self.assertIsSubclass(subclass, self.__test) def test_superclasses(self): # Test that the class inherits from the expected superclasses. for superclass in self.superclasses: - self.assertTrue(issubclass(self.__test, superclass), - "{0} is not a superclass of {1}".format(superclass, self.__test)) + self.assertIsSubclass(self.__test, superclass) class MetaPathFinder(InheritanceTests): @@ -226,7 +224,15 @@ class ResourceLoaderDefaultsTests(ABCTestHarness): SPLIT = make_abc_subclasses(ResourceLoader) def test_get_data(self): - with self.assertRaises(IOError): + with ( + self.assertRaises(IOError), + self.assertWarnsRegex( + DeprecationWarning, + r"importlib\.abc\.ResourceLoader is deprecated in favour of " + r"supporting resource loading through importlib\.resources" + r"\.abc\.TraversableResources.", + ), + ): self.ins.get_data('/some/path') @@ -416,14 +422,14 @@ def test_source_to_code_source(self): # Since compile() can handle strings, so should source_to_code(). source = 'attr = 42' module = self.source_to_module(source) - self.assertTrue(hasattr(module, 'attr')) + self.assertHasAttr(module, 'attr') self.assertEqual(module.attr, 42) def test_source_to_code_bytes(self): # Since compile() can handle bytes, so should source_to_code(). source = b'attr = 42' module = self.source_to_module(source) - self.assertTrue(hasattr(module, 'attr')) + self.assertHasAttr(module, 'attr') self.assertEqual(module.attr, 42) def test_source_to_code_path(self): @@ -757,7 +763,7 @@ def test_package_settings(self): warnings.simplefilter('ignore', DeprecationWarning) module = self.loader.load_module(self.name) self.verify_module(module) - self.assertFalse(hasattr(module, '__path__')) + self.assertNotHasAttr(module, '__path__') def test_get_source_encoding(self): # Source is considered encoded in UTF-8 by default unless otherwise @@ -927,9 +933,19 @@ def get_filename(self, fullname): def path_stats(self, path): return {'mtime': 1} - - loader = DummySourceLoader() - with self.assertWarns(DeprecationWarning): + with self.assertWarnsRegex( + DeprecationWarning, + r"importlib\.abc\.ResourceLoader is deprecated in favour of " + r"supporting resource loading through importlib\.resources" + r"\.abc\.TraversableResources.", + ): + loader = DummySourceLoader() + + with self.assertWarnsRegex( + DeprecationWarning, + r"SourceLoader\.path_mtime is deprecated in favour of " + r"SourceLoader\.path_stats\(\)\." + ): loader.path_mtime('foo.py') diff --git a/Lib/test/test_importlib/test_api.py b/Lib/test/test_importlib/test_api.py index 6035b2ca72efb9..1bc531a2fe34e7 100644 --- a/Lib/test/test_importlib/test_api.py +++ b/Lib/test/test_importlib/test_api.py @@ -430,8 +430,7 @@ def test_everyone_has___loader__(self): for name, module in sys.modules.items(): if isinstance(module, types.ModuleType): with self.subTest(name=name): - self.assertTrue(hasattr(module, '__loader__'), - '{!r} lacks a __loader__ attribute'.format(name)) + self.assertHasAttr(module, '__loader__') if self.machinery.BuiltinImporter.find_spec(name): self.assertIsNot(module.__loader__, None) elif self.machinery.FrozenImporter.find_spec(name): @@ -441,7 +440,7 @@ def test_everyone_has___spec__(self): for name, module in sys.modules.items(): if isinstance(module, types.ModuleType): with self.subTest(name=name): - self.assertTrue(hasattr(module, '__spec__')) + self.assertHasAttr(module, '__spec__') if self.machinery.BuiltinImporter.find_spec(name): self.assertIsNot(module.__spec__, None) elif self.machinery.FrozenImporter.find_spec(name): diff --git a/Lib/test/test_importlib/test_lazy.py b/Lib/test/test_importlib/test_lazy.py index 5c6e0303528906..e48fad8898f0ef 100644 --- a/Lib/test/test_importlib/test_lazy.py +++ b/Lib/test/test_importlib/test_lazy.py @@ -125,12 +125,12 @@ def test_delete_eventual_attr(self): # Deleting an attribute should stay deleted. module = self.new_module() del module.attr - self.assertFalse(hasattr(module, 'attr')) + self.assertNotHasAttr(module, 'attr') def test_delete_preexisting_attr(self): module = self.new_module() del module.__name__ - self.assertFalse(hasattr(module, '__name__')) + self.assertNotHasAttr(module, '__name__') def test_module_substitution_error(self): with test_util.uncache(TestingImporter.module_name): diff --git a/Lib/test/test_importlib/test_namespace_pkgs.py b/Lib/test/test_importlib/test_namespace_pkgs.py index cbbdada3b010a7..6ca0978f9bca69 100644 --- a/Lib/test/test_importlib/test_namespace_pkgs.py +++ b/Lib/test/test_importlib/test_namespace_pkgs.py @@ -80,7 +80,7 @@ def test_cant_import_other(self): def test_simple_repr(self): import foo.one - self.assertTrue(repr(foo).startswith(">> def frame_leaker(container): + ... import sys + ... container.append(sys._getframe()) + ... import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + ... pass + + >>> def test_function(): + ... import gc + ... container = [] + ... frame_leaker(container) # c + ... print(len(gc.get_referrers(container[0]))) + ... container = [] + ... frame_leaker(container) # n c + ... print(len(gc.get_referrers(container[0]))) + ... container = [] + ... frame_leaker(container) # r c + ... print(len(gc.get_referrers(container[0]))) + + >>> with PdbTestInput([ # doctest: +NORMALIZE_WHITESPACE + ... 'continue', + ... 'next', + ... 'continue', + ... 'return', + ... 'continue', + ... ]): + ... test_function() + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) continue + 1 + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) next + > (5)frame_leaker() + -> pass + (Pdb) continue + 1 + > (4)frame_leaker() + -> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace() + (Pdb) return + --Return-- + > (5)frame_leaker()->None + -> pass + (Pdb) continue + 1 + """ + def test_pdb_function_break(): """Testing the line number of break on function @@ -4186,6 +4237,62 @@ def test_checkline_is_not_executable(self): self.assertFalse(db.checkline(os_helper.TESTFN, lineno)) +@support.requires_subprocess() +class PdbTestInline(unittest.TestCase): + @unittest.skipIf(sys.flags.safe_path, + 'PYTHONSAFEPATH changes default sys.path') + def _run_script(self, script, commands, + expected_returncode=0, + extra_env=None): + self.addCleanup(os_helper.rmtree, '__pycache__') + filename = 'main.py' + with open(filename, 'w') as f: + f.write(textwrap.dedent(script)) + self.addCleanup(os_helper.unlink, filename) + + commands = textwrap.dedent(commands) + + cmd = [sys.executable, 'main.py'] + if extra_env is not None: + env = os.environ | extra_env + else: + env = os.environ + with subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stdin=subprocess.PIPE, + stderr=subprocess.PIPE, + env = {**env, 'PYTHONIOENCODING': 'utf-8'} + ) as proc: + stdout, stderr = proc.communicate(str.encode(commands)) + stdout = bytes.decode(stdout) if isinstance(stdout, bytes) else stdout + stderr = bytes.decode(stderr) if isinstance(stderr, bytes) else stderr + self.assertEqual( + proc.returncode, + expected_returncode, + f"Unexpected return code\nstdout: {stdout}\nstderr: {stderr}" + ) + return stdout, stderr + + def test_quit(self): + script = """ + x = 1 + breakpoint() + """ + + commands = """ + quit + n + p x + 1 + quit + y + """ + + stdout, stderr = self._run_script(script, commands) + self.assertIn("2", stdout) + self.assertIn("Quit anyway", stdout) + + @support.requires_subprocess() class PdbTestReadline(unittest.TestCase): def setUpClass(): diff --git a/Lib/test/test_perf_profiler.py b/Lib/test/test_perf_profiler.py index 1e74990878007a..6f1fd8d38e4ea0 100644 --- a/Lib/test/test_perf_profiler.py +++ b/Lib/test/test_perf_profiler.py @@ -47,6 +47,7 @@ def tearDown(self) -> None: for file in files_to_delete: file.unlink() + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_trampoline_works(self): code = """if 1: def foo(): @@ -100,6 +101,7 @@ def baz(): "Address should contain only hex characters", ) + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_trampoline_works_with_forks(self): code = """if 1: import os, sys @@ -160,6 +162,7 @@ def baz(): self.assertIn(f"py::bar_fork:{script}", child_perf_file_contents) self.assertIn(f"py::baz_fork:{script}", child_perf_file_contents) + @unittest.skipIf(support.check_bolt_optimized, "fails on BOLT instrumented binaries") def test_sys_api(self): code = """if 1: import sys diff --git a/Lib/test/test_pydoc/test_pydoc.py b/Lib/test/test_pydoc/test_pydoc.py index cec18aa9440c9e..b02ba3aafd4d20 100644 --- a/Lib/test/test_pydoc/test_pydoc.py +++ b/Lib/test/test_pydoc/test_pydoc.py @@ -556,6 +556,14 @@ class object | ... and 82 other subclasses """ doc = pydoc.TextDoc() + try: + # Make sure HeapType, which has no __module__ attribute, is one + # of the known subclasses of object. (doc.docclass() used to + # fail if HeapType was imported before running this test, like + # when running tests sequentially.) + from _testcapi import HeapType + except ImportError: + pass text = doc.docclass(object) snip = (" | Built-in subclasses:\n" " | async_generator\n" diff --git a/Lib/test/test_pyrepl/support.py b/Lib/test/test_pyrepl/support.py index 672d4896c92283..45e3bf758f17de 100644 --- a/Lib/test/test_pyrepl/support.py +++ b/Lib/test/test_pyrepl/support.py @@ -101,16 +101,6 @@ def handle_all_events( ) -def make_clean_env() -> dict[str, str]: - clean_env = os.environ.copy() - for k in clean_env.copy(): - if k.startswith("PYTHON"): - clean_env.pop(k) - clean_env.pop("FORCE_COLOR", None) - clean_env.pop("NO_COLOR", None) - return clean_env - - class FakeConsole(Console): def __init__(self, events, encoding="utf-8") -> None: self.events = iter(events) diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py index f29a7ffbd7cafd..3540d2a5a41662 100644 --- a/Lib/test/test_pyrepl/test_pyrepl.py +++ b/Lib/test/test_pyrepl/test_pyrepl.py @@ -10,7 +10,7 @@ import tempfile from unittest import TestCase, skipUnless, skipIf from unittest.mock import patch -from test.support import force_not_colorized +from test.support import force_not_colorized, make_clean_env from test.support import SHORT_TIMEOUT from test.support.import_helper import import_module from test.support.os_helper import unlink @@ -23,7 +23,6 @@ multiline_input, code_to_events, clean_screen, - make_clean_env, ) from _pyrepl.console import Event from _pyrepl.readline import (ReadlineAlikeReader, ReadlineConfig, @@ -851,7 +850,7 @@ def test_global_namespace_completion(self): output = multiline_input(reader, namespace) self.assertEqual(output, "python") - def test_updown_arrow_with_completion_menu(self): + def test_up_down_arrow_with_completion_menu(self): """Up arrow in the middle of unfinished tab completion when the menu is displayed should work and trigger going back in history. Down arrow should subsequently get us back to the incomplete command.""" @@ -861,6 +860,7 @@ def test_updown_arrow_with_completion_menu(self): events = itertools.chain( code_to_events(code), [ + Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), Event(evt="key", data="up", raw=bytearray(b"\x1bOA")), Event(evt="key", data="down", raw=bytearray(b"\x1bOB")), ], @@ -1324,23 +1324,35 @@ def test_readline_history_file(self): if readline.backend != "editline": self.skipTest("GNU readline is not affected by this issue") - hfile = tempfile.NamedTemporaryFile() - self.addCleanup(unlink, hfile.name) - env = os.environ.copy() - env["PYTHON_HISTORY"] = hfile.name + with tempfile.NamedTemporaryFile() as hfile: + env = os.environ.copy() + env["PYTHON_HISTORY"] = hfile.name - env["PYTHON_BASIC_REPL"] = "1" - output, exit_code = self.run_repl("spam \nexit()\n", env=env) - self.assertEqual(exit_code, 0) - self.assertIn("spam ", output) - self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) - self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text()) + env["PYTHON_BASIC_REPL"] = "1" + output, exit_code = self.run_repl("spam \nexit()\n", env=env) + self.assertEqual(exit_code, 0) + self.assertIn("spam ", output) + self.assertNotEqual(pathlib.Path(hfile.name).stat().st_size, 0) + self.assertIn("spam\\040", pathlib.Path(hfile.name).read_text()) - env.pop("PYTHON_BASIC_REPL", None) - output, exit_code = self.run_repl("exit\n", env=env) - self.assertEqual(exit_code, 0) - self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) + env.pop("PYTHON_BASIC_REPL", None) + output, exit_code = self.run_repl("exit\n", env=env) + self.assertEqual(exit_code, 0) + self.assertNotIn("\\040", pathlib.Path(hfile.name).read_text()) def test_keyboard_interrupt_after_isearch(self): output, exit_code = self.run_repl(["\x12", "\x03", "exit"]) self.assertEqual(exit_code, 0) + + def test_prompt_after_help(self): + output, exit_code = self.run_repl(["help", "q", "exit"]) + + # Regex pattern to remove ANSI escape sequences + ansi_escape = re.compile(r"(\x1B(=|>|(\[)[0-?]*[ -\/]*[@-~]))") + cleaned_output = ansi_escape.sub("", output) + self.assertEqual(exit_code, 0) + + # Ensure that we don't see multiple prompts after exiting `help` + # Extra stuff (newline and `exit` rewrites) are necessary + # because of how run_repl works. + self.assertNotIn(">>> \n>>> >>>", cleaned_output) diff --git a/Lib/test/test_pyrepl/test_reader.py b/Lib/test/test_pyrepl/test_reader.py index 6c72a1d39c55df..863ecc61ddd432 100644 --- a/Lib/test/test_pyrepl/test_reader.py +++ b/Lib/test/test_pyrepl/test_reader.py @@ -295,8 +295,8 @@ def test_completions_updated_on_key_press(self): actual = reader.screen self.assertEqual(len(actual), 2) - self.assertEqual(actual[0].rstrip(), "itertools.accumulate(") - self.assertEqual(actual[1], f"{code}a") + self.assertEqual(actual[0], f"{code}a") + self.assertEqual(actual[1].rstrip(), "itertools.accumulate(") def test_key_press_on_tab_press_once(self): namespace = {"itertools": itertools} diff --git a/Lib/test/test_pyrepl/test_unix_console.py b/Lib/test/test_pyrepl/test_unix_console.py index e3bbabcb0089fb..15dbf48bcf0f1c 100644 --- a/Lib/test/test_pyrepl/test_unix_console.py +++ b/Lib/test/test_pyrepl/test_unix_console.py @@ -1,7 +1,9 @@ import itertools +import os import sys import unittest from functools import partial +from test.support import os_helper from unittest import TestCase from unittest.mock import MagicMock, call, patch, ANY @@ -312,3 +314,14 @@ def same_console(events): ) console.restore() con.restore() + + def test_getheightwidth_with_invalid_environ(self, _os_write): + # gh-128636 + console = UnixConsole() + with os_helper.EnvironmentVarGuard() as env: + env["LINES"] = "" + self.assertIsInstance(console.getheightwidth(), tuple) + env["COLUMNS"] = "" + self.assertIsInstance(console.getheightwidth(), tuple) + os.environ = [] + self.assertIsInstance(console.getheightwidth(), tuple) diff --git a/Lib/test/test_pyrepl/test_windows_console.py b/Lib/test/test_pyrepl/test_windows_console.py index 4a3b2baf64a944..07eaccd1124cd6 100644 --- a/Lib/test/test_pyrepl/test_windows_console.py +++ b/Lib/test/test_pyrepl/test_windows_console.py @@ -329,6 +329,20 @@ def move_right(self, cols=1): def erase_in_line(self): return ERASE_IN_LINE.encode("utf8") + def test_multiline_ctrl_z(self): + # see gh-126332 + code = "abcdefghi" + + events = itertools.chain( + code_to_events(code), + [ + Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')), + Event(evt="key", data='\x1a', raw=bytearray(b'\x1a')), + ], + ) + reader, _ = self.handle_events_narrow(events) + self.assertEqual(reader.cxy, (2, 3)) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index ab46ccbf004a3a..e9ef830c848aad 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -792,6 +792,7 @@ def test_finds_expected_number_of_tests(self): f'{", ".join(output.splitlines())}') +@support.force_not_colorized_test_class class ProgramsTestCase(BaseTestCase): """ Test various ways to run the Python test suite. Use options close @@ -905,6 +906,7 @@ def test_pcbuild_rt(self): self.run_batch(script, *rt_args, *self.regrtest_args, *self.tests) +@support.force_not_colorized_test_class class ArgsTestCase(BaseTestCase): """ Test arguments of the Python test suite. @@ -2145,25 +2147,25 @@ def check_add_python_opts(self, option): import unittest from test import support try: - from _testinternalcapi import get_config + from _testcapi import config_get except ImportError: - get_config = None + config_get = None # WASI/WASM buildbots don't use -E option use_environment = (support.is_emscripten or support.is_wasi) class WorkerTests(unittest.TestCase): - @unittest.skipUnless(get_config is None, 'need get_config()') + @unittest.skipUnless(config_get is None, 'need config_get()') def test_config(self): - config = get_config()['config'] + config = config_get() # -u option - self.assertEqual(config['buffered_stdio'], 0) + self.assertEqual(config_get('buffered_stdio'), 0) # -W default option - self.assertTrue(config['warnoptions'], ['default']) + self.assertTrue(config_get('warnoptions'), ['default']) # -bb option - self.assertTrue(config['bytes_warning'], 2) + self.assertTrue(config_get('bytes_warning'), 2) # -E option - self.assertTrue(config['use_environment'], use_environment) + self.assertTrue(config_get('use_environment'), use_environment) def test_python_opts(self): # -u option diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index e764e60560db23..356ff5b198d637 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -70,6 +70,7 @@ def run_on_interactive_mode(source): return output +@support.force_not_colorized_test_class class TestInteractiveInterpreter(unittest.TestCase): @cpython_only @@ -273,6 +274,8 @@ def test_asyncio_repl_is_ok(self): self.assertEqual(exit_code, 0, "".join(output)) + +@support.force_not_colorized_test_class class TestInteractiveModeSyntaxErrors(unittest.TestCase): def test_interactive_syntax_error_correct_line(self): diff --git a/Lib/test/test_runpy.py b/Lib/test/test_runpy.py index b64383f6546f31..ada78ec8e6b0c7 100644 --- a/Lib/test/test_runpy.py +++ b/Lib/test/test_runpy.py @@ -12,8 +12,14 @@ import textwrap import unittest import warnings -from test.support import (infinite_recursion, no_tracing, verbose, - requires_subprocess, requires_resource) +from test.support import ( + force_not_colorized_test_class, + infinite_recursion, + no_tracing, + requires_resource, + requires_subprocess, + verbose, +) from test.support.import_helper import forget, make_legacy_pyc, unload from test.support.os_helper import create_empty_file, temp_dir, FakePath from test.support.script_helper import make_script, make_zip_script @@ -758,6 +764,7 @@ def test_encoding(self): self.assertEqual(result['s'], "non-ASCII: h\xe9") +@force_not_colorized_test_class class TestExit(unittest.TestCase): STATUS_CONTROL_C_EXIT = 0xC000013A EXPECTED_CODE = ( diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index c7c6f684cd33f0..f56195ca27672c 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -116,7 +116,9 @@ def test_eval_str_invalid_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("'''\n\\z'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") + self.assertEqual(str(w[0].message), r'"\z" is an invalid escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -126,7 +128,8 @@ def test_eval_str_invalid_escape(self): eval("'''\n\\z'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid escape sequence '\z'") + self.assertEqual(exc.msg, r'"\z" is an invalid escape sequence. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) self.assertEqual(exc.offset, 1) @@ -153,7 +156,9 @@ def test_eval_str_invalid_octal_escape(self): eval("'''\n\\407'''") self.assertEqual(len(w), 1) self.assertEqual(str(w[0].message), - r"invalid octal escape sequence '\407'") + r'"\407" is an invalid octal escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -163,7 +168,8 @@ def test_eval_str_invalid_octal_escape(self): eval("'''\n\\407'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid octal escape sequence '\407'") + self.assertEqual(exc.msg, r'"\407" is an invalid octal escape sequence. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) self.assertEqual(exc.offset, 1) @@ -205,7 +211,9 @@ def test_eval_bytes_invalid_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\z'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), r"invalid escape sequence '\z'") + self.assertEqual(str(w[0].message), r'"\z" is an invalid escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -215,7 +223,8 @@ def test_eval_bytes_invalid_escape(self): eval("b'''\n\\z'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid escape sequence '\z'") + self.assertEqual(exc.msg, r'"\z" is an invalid escape sequence. ' + r'Did you mean "\\z"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) @@ -228,8 +237,9 @@ def test_eval_bytes_invalid_octal_escape(self): warnings.simplefilter('always', category=SyntaxWarning) eval("b'''\n\\407'''") self.assertEqual(len(w), 1) - self.assertEqual(str(w[0].message), - r"invalid octal escape sequence '\407'") + self.assertEqual(str(w[0].message), r'"\407" is an invalid octal escape sequence. ' + r'Such sequences will not work in the future. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(w[0].filename, '') self.assertEqual(w[0].lineno, 1) @@ -239,7 +249,8 @@ def test_eval_bytes_invalid_octal_escape(self): eval("b'''\n\\407'''") exc = cm.exception self.assertEqual(w, []) - self.assertEqual(exc.msg, r"invalid octal escape sequence '\407'") + self.assertEqual(exc.msg, r'"\407" is an invalid octal escape sequence. ' + r'Did you mean "\\407"? A raw string is also an option.') self.assertEqual(exc.filename, '') self.assertEqual(exc.lineno, 1) diff --git a/Lib/test/test_sysconfig.py b/Lib/test/test_sysconfig.py index ce504dc21af85f..1002d90074599a 100644 --- a/Lib/test/test_sysconfig.py +++ b/Lib/test/test_sysconfig.py @@ -711,5 +711,38 @@ def test_parse_makefile(self): }) +class DeprecationTests(unittest.TestCase): + def deprecated(self, removal_version, deprecation_msg=None, error=Exception, error_msg=None): + if sys.version_info >= removal_version: + return self.assertRaises(error, msg=error_msg) + else: + return self.assertWarns(DeprecationWarning, msg=deprecation_msg) + + def test_expand_makefile_vars(self): + with self.deprecated( + removal_version=(3, 16), + deprecation_msg=( + 'sysconfig.expand_makefile_vars is deprecated and will be removed in ' + 'Python 3.16. Use sysconfig.get_paths(vars=...) instead.', + ), + error=AttributeError, + error_msg="module 'sysconfig' has no attribute 'expand_makefile_vars'", + ): + sysconfig.expand_makefile_vars('', {}) + + def test_is_python_build_check_home(self): + with self.deprecated( + removal_version=(3, 15), + deprecation_msg=( + 'The check_home argument of sysconfig.is_python_build is ' + 'deprecated and its value is ignored. ' + 'It will be removed in Python 3.15.' + ), + error=TypeError, + error_msg="is_python_build() takes 0 positional arguments but 1 were given", + ): + sysconfig.is_python_build('foo') + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 3e164a12581dd1..214e1ba0b53dd2 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -2130,6 +2130,15 @@ def test_set_name(self): # Test long non-ASCII name (truncated) "x" * (limit - 1) + "é€", + + # Test long non-BMP names (truncated) creating surrogate pairs + # on Windows + "x" * (limit - 1) + "\U0010FFFF", + "x" * (limit - 2) + "\U0010FFFF" * 2, + "x" + "\U0001f40d" * limit, + "xx" + "\U0001f40d" * limit, + "xxx" + "\U0001f40d" * limit, + "xxxx" + "\U0001f40d" * limit, ] if os_helper.FS_NONASCII: tests.append(f"nonascii:{os_helper.FS_NONASCII}") @@ -2146,15 +2155,31 @@ def work(): work_name = _thread._get_name() for name in tests: - encoded = name.encode(encoding, "replace") - if b'\0' in encoded: - encoded = encoded.split(b'\0', 1)[0] - if truncate is not None: - encoded = encoded[:truncate] - if sys.platform.startswith("solaris"): - expected = encoded.decode("utf-8", "surrogateescape") + if not support.MS_WINDOWS: + encoded = name.encode(encoding, "replace") + if b'\0' in encoded: + encoded = encoded.split(b'\0', 1)[0] + if truncate is not None: + encoded = encoded[:truncate] + if sys.platform.startswith("solaris"): + expected = encoded.decode("utf-8", "surrogateescape") + else: + expected = os.fsdecode(encoded) else: - expected = os.fsdecode(encoded) + size = 0 + chars = [] + for ch in name: + if ord(ch) > 0xFFFF: + size += 2 + else: + size += 1 + if size > truncate: + break + chars.append(ch) + expected = ''.join(chars) + + if '\0' in expected: + expected = expected.split('\0', 1)[0] with self.subTest(name=name, expected=expected): work_name = None diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 75710db7d05375..480bff743a9f8a 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1,4 +1,5 @@ import os +import re import token import tokenize import unittest @@ -1819,6 +1820,22 @@ def test_iter_compat(self): self.assertEqual(tokenize.untokenize(iter(tokens)), b'Hello ') +def contains_ambiguous_backslash(source): + """Return `True` if the source contains a backslash on a + line by itself. For example: + + a = (1 + \\ + ) + + Code like this cannot be untokenized exactly. This is because + the tokenizer does not produce any tokens for the line containing + the backslash and so there is no way to know its indent. + """ + pattern = re.compile(br'\n\s*\\\r?\n') + return pattern.search(source) is not None + + class TestRoundtrip(TestCase): def check_roundtrip(self, f): @@ -1829,6 +1846,9 @@ def check_roundtrip(self, f): tokenize.untokenize(), and the latter tokenized again to 2-tuples. The test fails if the 3 pair tokenizations do not match. + If the source code can be untokenized unambiguously, the + untokenized code must match the original code exactly. + When untokenize bugs are fixed, untokenize with 5-tuples should reproduce code that does not contain a backslash continuation following spaces. A proper test should test this. @@ -1852,6 +1872,13 @@ def check_roundtrip(self, f): tokens2_from5 = [tok[:2] for tok in tokenize.tokenize(readline5)] self.assertEqual(tokens2_from5, tokens2) + if not contains_ambiguous_backslash(code): + # The BOM does not produce a token so there is no way to preserve it. + code_without_bom = code.removeprefix(b'\xef\xbb\xbf') + readline = iter(code_without_bom.splitlines(keepends=True)).__next__ + untokenized_code = tokenize.untokenize(tokenize.tokenize(readline)) + self.assertEqual(code_without_bom, untokenized_code) + def check_line_extraction(self, f): if isinstance(f, str): code = f.encode('utf-8') diff --git a/Lib/test/test_tomllib/test_misc.py b/Lib/test/test_tomllib/test_misc.py index 9e677a337a2835..59116afa1f36ad 100644 --- a/Lib/test/test_tomllib/test_misc.py +++ b/Lib/test/test_tomllib/test_misc.py @@ -5,6 +5,7 @@ import copy import datetime from decimal import Decimal as D +import importlib from pathlib import Path import sys import tempfile @@ -113,3 +114,11 @@ def test_inline_table_recursion_limit(self): nest_count=nest_count): recursive_table_toml = nest_count * "key = {" + nest_count * "}" tomllib.loads(recursive_table_toml) + + def test_types_import(self): + """Test that `_types` module runs. + + The module is for type annotations only, so it is otherwise + never imported by tests. + """ + importlib.import_module(f"{tomllib.__name__}._types") diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index abdfc4638f2e9c..89980ae6f8573a 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -86,7 +86,7 @@ def test_caret(self): err = self.get_exception_format(self.syntax_error_with_caret, SyntaxError) self.assertEqual(len(err), 4) - self.assertTrue(err[1].strip() == "return x!") + self.assertEqual(err[1].strip(), "return x!") self.assertIn("^", err[2]) # third line has caret self.assertEqual(err[1].find("!"), err[2].find("^")) # in the right place self.assertEqual(err[2].count("^"), 1) @@ -376,6 +376,30 @@ def f(): ' ValueError: 0\n', ]) + def test_format_exception_group_syntax_error_with_custom_values(self): + # See https://github.com/python/cpython/issues/128894 + for exc in [ + SyntaxError('error', 'abcd'), + SyntaxError('error', [None] * 4), + SyntaxError('error', (1, 2, 3, 4)), + SyntaxError('error', (1, 2, 3, 4)), + SyntaxError('error', (1, 'a', 'b', 2)), + # with end_lineno and end_offset: + SyntaxError('error', 'abcdef'), + SyntaxError('error', [None] * 6), + SyntaxError('error', (1, 2, 3, 4, 5, 6)), + SyntaxError('error', (1, 'a', 'b', 2, 'c', 'd')), + ]: + with self.subTest(exc=exc): + err = traceback.format_exception_only(exc, show_group=True) + # Should not raise an exception: + if exc.lineno is not None: + self.assertEqual(len(err), 2) + self.assertTrue(err[0].startswith(' File')) + else: + self.assertEqual(len(err), 1) + self.assertEqual(err[-1], 'SyntaxError: error\n') + @requires_subprocess() @force_not_colorized def test_encoded_file(self): @@ -419,16 +443,10 @@ def do_test(firstlines, message, charset, lineno): err_line = "raise RuntimeError('{0}')".format(message_ascii) err_msg = "RuntimeError: {0}".format(message_ascii) - self.assertIn(("line %s" % lineno), stdout[1], - "Invalid line number: {0!r} instead of {1}".format( - stdout[1], lineno)) - self.assertTrue(stdout[2].endswith(err_line), - "Invalid traceback line: {0!r} instead of {1!r}".format( - stdout[2], err_line)) + self.assertIn("line %s" % lineno, stdout[1]) + self.assertEndsWith(stdout[2], err_line) actual_err_msg = stdout[3] - self.assertTrue(actual_err_msg == err_msg, - "Invalid error message: {0!r} instead of {1!r}".format( - actual_err_msg, err_msg)) + self.assertEqual(actual_err_msg, err_msg) do_test("", "foo", "ascii", 3) for charset in ("ascii", "iso-8859-1", "utf-8", "GBK"): @@ -1809,9 +1827,9 @@ def check_traceback_format(self, cleanup_func=None): banner = tb_lines[0] self.assertEqual(len(tb_lines), 5) location, source_line = tb_lines[-2], tb_lines[-1] - self.assertTrue(banner.startswith('Traceback')) - self.assertTrue(location.startswith(' File')) - self.assertTrue(source_line.startswith(' raise')) + self.assertStartsWith(banner, 'Traceback') + self.assertStartsWith(location, ' File') + self.assertStartsWith(source_line, ' raise') def test_traceback_format(self): self.check_traceback_format() @@ -2190,12 +2208,12 @@ def zero_div(self): def check_zero_div(self, msg): lines = msg.splitlines() if has_no_debug_ranges(): - self.assertTrue(lines[-3].startswith(' File')) + self.assertStartsWith(lines[-3], ' File') self.assertIn('1/0 # In zero_div', lines[-2]) else: - self.assertTrue(lines[-4].startswith(' File')) + self.assertStartsWith(lines[-4], ' File') self.assertIn('1/0 # In zero_div', lines[-3]) - self.assertTrue(lines[-1].startswith('ZeroDivisionError'), lines[-1]) + self.assertStartsWith(lines[-1], 'ZeroDivisionError') def test_simple(self): try: @@ -2205,12 +2223,12 @@ def test_simple(self): lines = self.get_report(e).splitlines() if has_no_debug_ranges(): self.assertEqual(len(lines), 4) - self.assertTrue(lines[3].startswith('ZeroDivisionError')) + self.assertStartsWith(lines[3], 'ZeroDivisionError') else: self.assertEqual(len(lines), 5) - self.assertTrue(lines[4].startswith('ZeroDivisionError')) - self.assertTrue(lines[0].startswith('Traceback')) - self.assertTrue(lines[1].startswith(' File')) + self.assertStartsWith(lines[4], 'ZeroDivisionError') + self.assertStartsWith(lines[0], 'Traceback') + self.assertStartsWith(lines[1], ' File') self.assertIn('1/0 # Marker', lines[2]) def test_cause(self): @@ -2251,9 +2269,9 @@ def test_context_suppression(self): e = _ lines = self.get_report(e).splitlines() self.assertEqual(len(lines), 4) - self.assertTrue(lines[3].startswith('ZeroDivisionError')) - self.assertTrue(lines[0].startswith('Traceback')) - self.assertTrue(lines[1].startswith(' File')) + self.assertStartsWith(lines[3], 'ZeroDivisionError') + self.assertStartsWith(lines[0], 'Traceback') + self.assertStartsWith(lines[1], ' File') self.assertIn('ZeroDivisionError from None', lines[2]) def test_cause_and_context(self): @@ -2919,6 +2937,33 @@ def exc(): report = self.get_report(exc) self.assertEqual(report, expected) + def test_exception_group_wrapped_naked(self): + # See gh-128799 + + def exc(): + try: + raise Exception(42) + except* Exception as e: + raise + + expected = (f' + Exception Group Traceback (most recent call last):\n' + f' | File "{__file__}", line {self.callable_line}, in get_exception\n' + f' | exception_or_callable()\n' + f' | ~~~~~~~~~~~~~~~~~~~~~^^\n' + f' | File "{__file__}", line {exc.__code__.co_firstlineno + 3}, in exc\n' + f' | except* Exception as e:\n' + f' | raise\n' + f' | ExceptionGroup: (1 sub-exception)\n' + f' +-+---------------- 1 ----------------\n' + f' | Traceback (most recent call last):\n' + f' | File "{__file__}", line {exc.__code__.co_firstlineno + 2}, in exc\n' + f' | raise Exception(42)\n' + f' | Exception: 42\n' + f' +------------------------------------\n') + + report = self.get_report(exc) + self.assertEqual(report, expected) + def test_KeyboardInterrupt_at_first_line_of_frame(self): # see GH-93249 def f(): diff --git a/Lib/test/test_tracemalloc.py b/Lib/test/test_tracemalloc.py index 5755f7697de91a..0220a83d24b428 100644 --- a/Lib/test/test_tracemalloc.py +++ b/Lib/test/test_tracemalloc.py @@ -1,14 +1,16 @@ import contextlib import os import sys +import textwrap import tracemalloc import unittest from unittest.mock import patch from test.support.script_helper import (assert_python_ok, assert_python_failure, interpreter_requires_environment) from test import support -from test.support import os_helper from test.support import force_not_colorized +from test.support import os_helper +from test.support import threading_helper try: import _testcapi @@ -18,6 +20,7 @@ _testinternalcapi = None +DEFAULT_DOMAIN = 0 EMPTY_STRING_SIZE = sys.getsizeof(b'') INVALID_NFRAME = (-1, 2**30) @@ -952,7 +955,6 @@ def check_env_var_invalid(self, nframe): return self.fail(f"unexpected output: {stderr!a}") - def test_env_var_invalid(self): for nframe in INVALID_NFRAME: with self.subTest(nframe=nframe): @@ -981,6 +983,7 @@ def check_sys_xoptions_invalid(self, nframe): return self.fail(f"unexpected output: {stderr!a}") + @force_not_colorized def test_sys_xoptions_invalid(self): for nframe in INVALID_NFRAME: with self.subTest(nframe=nframe): @@ -1026,8 +1029,8 @@ def track(self, release_gil=False, nframe=1): release_gil) return frames - def untrack(self): - _testcapi.tracemalloc_untrack(self.domain, self.ptr) + def untrack(self, release_gil=False): + _testcapi.tracemalloc_untrack(self.domain, self.ptr, release_gil) def get_traced_memory(self): # Get the traced size in the domain @@ -1069,7 +1072,7 @@ def test_track_already_tracked(self): self.assertEqual(self.get_traceback(), tracemalloc.Traceback(frames)) - def test_untrack(self): + def check_untrack(self, release_gil): tracemalloc.start() self.track() @@ -1077,13 +1080,19 @@ def test_untrack(self): self.assertEqual(self.get_traced_memory(), self.size) # untrack must remove the trace - self.untrack() + self.untrack(release_gil) self.assertIsNone(self.get_traceback()) self.assertEqual(self.get_traced_memory(), 0) # calling _PyTraceMalloc_Untrack() multiple times must not crash - self.untrack() - self.untrack() + self.untrack(release_gil) + self.untrack(release_gil) + + def test_untrack(self): + self.check_untrack(False) + + def test_untrack_without_gil(self): + self.check_untrack(True) def test_stop_track(self): tracemalloc.start() @@ -1101,6 +1110,37 @@ def test_stop_untrack(self): with self.assertRaises(RuntimeError): self.untrack() + @unittest.skipIf(_testcapi is None, 'need _testcapi') + @threading_helper.requires_working_threading() + # gh-128679: Test crash on a debug build (especially on FreeBSD). + @unittest.skipIf(support.Py_DEBUG, 'need release build') + def test_tracemalloc_track_race(self): + # gh-128679: Test fix for tracemalloc.stop() race condition + _testcapi.tracemalloc_track_race() + + def test_late_untrack(self): + code = textwrap.dedent(f""" + from test import support + import tracemalloc + import _testcapi + + class Tracked: + def __init__(self, domain, size): + self.domain = domain + self.ptr = id(self) + self.size = size + _testcapi.tracemalloc_track(self.domain, self.ptr, self.size) + + def __del__(self, untrack=_testcapi.tracemalloc_untrack): + untrack(self.domain, self.ptr, 1) + + domain = {DEFAULT_DOMAIN} + tracemalloc.start() + obj = Tracked(domain, 1024 * 1024) + support.late_deletion(obj) + """) + assert_python_ok("-c", code) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_turtle.py b/Lib/test/test_turtle.py index c75a002a89b4c4..d02cac284a909a 100644 --- a/Lib/test/test_turtle.py +++ b/Lib/test/test_turtle.py @@ -1,9 +1,9 @@ import os import pickle import re +import tempfile import unittest import unittest.mock -import tempfile from test import support from test.support import import_helper from test.support import os_helper @@ -54,6 +54,21 @@ """ +def patch_screen(): + """Patch turtle._Screen for testing without a display. + + We must patch the _Screen class itself instead of the _Screen + instance because instantiating it requires a display. + """ + return unittest.mock.patch( + "turtle._Screen.__new__", + **{ + "return_value.__class__": turtle._Screen, + "return_value.mode.return_value": "standard", + }, + ) + + class TurtleConfigTest(unittest.TestCase): def get_cfg_file(self, cfg_str): @@ -513,7 +528,7 @@ def test_save_overwrites_if_specified(self) -> None: turtle.TurtleScreen.save(screen, file_path, overwrite=True) with open(file_path) as f: - assert f.read() == "postscript" + self.assertEqual(f.read(), "postscript") def test_save(self) -> None: screen = unittest.mock.Mock() @@ -524,7 +539,101 @@ def test_save(self) -> None: turtle.TurtleScreen.save(screen, file_path) with open(file_path) as f: - assert f.read() == "postscript" + self.assertEqual(f.read(), "postscript") + + def test_no_animation_sets_tracer_0(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + + with s.no_animation(): + self.assertEqual(s.tracer(), 0) + + def test_no_animation_resets_tracer_to_old_value(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + + for tracer in [0, 1, 5]: + s.tracer(tracer) + with s.no_animation(): + pass + self.assertEqual(s.tracer(), tracer) + + def test_no_animation_calls_update_at_exit(self): + s = turtle.TurtleScreen(cv=unittest.mock.MagicMock()) + s.update = unittest.mock.MagicMock() + + with s.no_animation(): + s.update.assert_not_called() + s.update.assert_called_once() + + +class TestTurtle(unittest.TestCase): + def setUp(self): + with patch_screen(): + self.turtle = turtle.Turtle() + + # Reset the Screen singleton to avoid reference leaks + self.addCleanup(setattr, turtle.Turtle, '_screen', None) + + def test_begin_end_fill(self): + self.assertFalse(self.turtle.filling()) + self.turtle.begin_fill() + self.assertTrue(self.turtle.filling()) + self.turtle.end_fill() + self.assertFalse(self.turtle.filling()) + + def test_fill(self): + # The context manager behaves like begin_fill and end_fill. + self.assertFalse(self.turtle.filling()) + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + self.assertFalse(self.turtle.filling()) + + def test_fill_resets_after_exception(self): + # The context manager cleans up correctly after exceptions. + try: + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + raise ValueError + except ValueError: + self.assertFalse(self.turtle.filling()) + + def test_fill_context_when_filling(self): + # The context manager works even when the turtle is already filling. + self.turtle.begin_fill() + self.assertTrue(self.turtle.filling()) + with self.turtle.fill(): + self.assertTrue(self.turtle.filling()) + self.assertFalse(self.turtle.filling()) + + def test_begin_end_poly(self): + self.assertFalse(self.turtle._creatingPoly) + self.turtle.begin_poly() + self.assertTrue(self.turtle._creatingPoly) + self.turtle.end_poly() + self.assertFalse(self.turtle._creatingPoly) + + def test_poly(self): + # The context manager behaves like begin_poly and end_poly. + self.assertFalse(self.turtle._creatingPoly) + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + self.assertFalse(self.turtle._creatingPoly) + + def test_poly_resets_after_exception(self): + # The context manager cleans up correctly after exceptions. + try: + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + raise ValueError + except ValueError: + self.assertFalse(self.turtle._creatingPoly) + + def test_poly_context_when_creating_poly(self): + # The context manager works when the turtle is already creating poly. + self.turtle.begin_poly() + self.assertTrue(self.turtle._creatingPoly) + with self.turtle.poly(): + self.assertTrue(self.turtle._creatingPoly) + self.assertFalse(self.turtle._creatingPoly) class TestModuleLevel(unittest.TestCase): diff --git a/Lib/test/test_unicodedata.py b/Lib/test/test_unicodedata.py index c7d09a6b460c19..0285f0d51f2365 100644 --- a/Lib/test/test_unicodedata.py +++ b/Lib/test/test_unicodedata.py @@ -11,8 +11,14 @@ import sys import unicodedata import unittest -from test.support import (open_urlresource, requires_resource, script_helper, - cpython_only, check_disallow_instantiation) +from test.support import ( + open_urlresource, + requires_resource, + script_helper, + cpython_only, + check_disallow_instantiation, + force_not_colorized, +) class UnicodeMethodsTest(unittest.TestCase): @@ -277,6 +283,7 @@ def test_disallow_instantiation(self): # Ensure that the type disallows instantiation (bpo-43916) check_disallow_instantiation(self, unicodedata.UCD) + @force_not_colorized def test_failed_import_during_compiling(self): # Issue 4367 # Decoding \N escapes requires the unicodedata module. If it can't be diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py index cd366496eedca3..a04af55f3fc0ae 100644 --- a/Lib/test/test_unittest/test_case.py +++ b/Lib/test/test_unittest/test_case.py @@ -795,7 +795,15 @@ def testAssertHasAttr(self): with self.assertRaises(self.failureException) as cm: self.assertHasAttr(a, 'y') self.assertEqual(str(cm.exception), - "List instance has no attribute 'y'") + "'List' object has no attribute 'y'") + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(List, 'spam') + self.assertEqual(str(cm.exception), + "type object 'List' has no attribute 'spam'") + with self.assertRaises(self.failureException) as cm: + self.assertHasAttr(sys, 'nonexistent') + self.assertEqual(str(cm.exception), + "module 'sys' has no attribute 'nonexistent'") with self.assertRaises(self.failureException) as cm: self.assertHasAttr(a, 'y', 'ababahalamaha') @@ -811,7 +819,15 @@ def testAssertNotHasAttr(self): with self.assertRaises(self.failureException) as cm: self.assertNotHasAttr(a, 'x') self.assertEqual(str(cm.exception), - "List instance has unexpected attribute 'x'") + "'List' object has unexpected attribute 'x'") + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(List, 'append') + self.assertEqual(str(cm.exception), + "type object 'List' has unexpected attribute 'append'") + with self.assertRaises(self.failureException) as cm: + self.assertNotHasAttr(sys, 'modules') + self.assertEqual(str(cm.exception), + "module 'sys' has unexpected attribute 'modules'") with self.assertRaises(self.failureException) as cm: self.assertNotHasAttr(a, 'x', 'ababahalamaha') diff --git a/Lib/test/test_unittest/test_program.py b/Lib/test/test_unittest/test_program.py index 58d0cef9708c95..6092ed292d8f60 100644 --- a/Lib/test/test_unittest/test_program.py +++ b/Lib/test/test_unittest/test_program.py @@ -4,10 +4,10 @@ from test import support import unittest import test.test_unittest -from test.support import force_not_colorized from test.test_unittest.test_result import BufferedWriter +@support.force_not_colorized_test_class class Test_TestProgram(unittest.TestCase): def test_discovery_from_dotted_path(self): @@ -121,7 +121,6 @@ def run(self, test): self.assertEqual(['test.test_unittest', 'test.test_unittest2'], program.testNames) - @force_not_colorized def test_NonExit(self): stream = BufferedWriter() program = unittest.main(exit=False, @@ -137,7 +136,6 @@ def test_NonExit(self): 'expected failures=1, unexpected successes=1)\n') self.assertEndsWith(out, expected) - @force_not_colorized def test_Exit(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: @@ -155,7 +153,6 @@ def test_Exit(self): 'expected failures=1, unexpected successes=1)\n') self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitAsDefault(self): stream = BufferedWriter() with self.assertRaises(SystemExit): @@ -171,7 +168,6 @@ def test_ExitAsDefault(self): 'expected failures=1, unexpected successes=1)\n') self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitSkippedSuite(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: @@ -184,7 +180,6 @@ def test_ExitSkippedSuite(self): expected = '\n\nOK (skipped=1)\n' self.assertEndsWith(out, expected) - @force_not_colorized def test_ExitEmptySuite(self): stream = BufferedWriter() with self.assertRaises(SystemExit) as cm: diff --git a/Lib/test/test_unittest/test_result.py b/Lib/test/test_unittest/test_result.py index 327b246452bedf..9ac4c52449c2ff 100644 --- a/Lib/test/test_unittest/test_result.py +++ b/Lib/test/test_unittest/test_result.py @@ -5,11 +5,7 @@ import unittest from unittest.util import strclass from test.support import warnings_helper -from test.support import ( - captured_stdout, - force_not_colorized, - force_not_colorized_test_class, -) +from test.support import captured_stdout, force_not_colorized_test_class from test.test_unittest.support import BufferedWriter @@ -37,6 +33,7 @@ def bad_cleanup2(): raise ValueError('bad cleanup2') +@force_not_colorized_test_class class Test_TestResult(unittest.TestCase): # Note: there are not separate tests for TestResult.wasSuccessful(), # TestResult.errors, TestResult.failures, TestResult.testsRun or @@ -208,7 +205,6 @@ def test_1(self): self.assertIs(test_case, test) self.assertIsInstance(formatted_exc, str) - @force_not_colorized def test_addFailure_filter_traceback_frames(self): class Foo(unittest.TestCase): def test_1(self): @@ -235,7 +231,6 @@ def get_exc_info(): self.assertEqual(len(dropped), 1) self.assertIn("raise self.failureException(msg)", dropped[0]) - @force_not_colorized def test_addFailure_filter_traceback_frames_context(self): class Foo(unittest.TestCase): def test_1(self): @@ -265,7 +260,6 @@ def get_exc_info(): self.assertEqual(len(dropped), 1) self.assertIn("raise self.failureException(msg)", dropped[0]) - @force_not_colorized def test_addFailure_filter_traceback_frames_chained_exception_self_loop(self): class Foo(unittest.TestCase): def test_1(self): @@ -291,7 +285,6 @@ def get_exc_info(): formatted_exc = result.failures[0][1] self.assertEqual(formatted_exc.count("Exception: Loop\n"), 1) - @force_not_colorized def test_addFailure_filter_traceback_frames_chained_exception_cycle(self): class Foo(unittest.TestCase): def test_1(self): @@ -453,7 +446,6 @@ def testFailFast(self): result.addUnexpectedSuccess(None) self.assertTrue(result.shouldStop) - @force_not_colorized def testFailFastSetByRunner(self): stream = BufferedWriter() runner = unittest.TextTestRunner(stream=stream, failfast=True) @@ -465,6 +457,7 @@ def test(result): self.assertEndsWith(stream.getvalue(), '\n\nOK\n') +@force_not_colorized_test_class class Test_TextTestResult(unittest.TestCase): maxDiff = None @@ -627,7 +620,6 @@ def _run_test(self, test_name, verbosity, tearDownError=None): test.run(result) return stream.getvalue() - @force_not_colorized def testDotsOutput(self): self.assertEqual(self._run_test('testSuccess', 1), '.') self.assertEqual(self._run_test('testSkip', 1), 's') @@ -636,7 +628,6 @@ def testDotsOutput(self): self.assertEqual(self._run_test('testExpectedFailure', 1), 'x') self.assertEqual(self._run_test('testUnexpectedSuccess', 1), 'u') - @force_not_colorized def testLongOutput(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSuccess', 2), @@ -652,21 +643,17 @@ def testLongOutput(self): self.assertEqual(self._run_test('testUnexpectedSuccess', 2), f'testUnexpectedSuccess ({classname}.testUnexpectedSuccess) ... unexpected success\n') - @force_not_colorized def testDotsOutputSubTestSuccess(self): self.assertEqual(self._run_test('testSubTestSuccess', 1), '.') - @force_not_colorized def testLongOutputSubTestSuccess(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSubTestSuccess', 2), f'testSubTestSuccess ({classname}.testSubTestSuccess) ... ok\n') - @force_not_colorized def testDotsOutputSubTestMixed(self): self.assertEqual(self._run_test('testSubTestMixed', 1), 'sFE') - @force_not_colorized def testLongOutputSubTestMixed(self): classname = f'{__name__}.{self.Test.__qualname__}' self.assertEqual(self._run_test('testSubTestMixed', 2), @@ -675,7 +662,6 @@ def testLongOutputSubTestMixed(self): f' testSubTestMixed ({classname}.testSubTestMixed) [fail] (c=3) ... FAIL\n' f' testSubTestMixed ({classname}.testSubTestMixed) [error] (d=4) ... ERROR\n') - @force_not_colorized def testDotsOutputTearDownFail(self): out = self._run_test('testSuccess', 1, AssertionError('fail')) self.assertEqual(out, 'F') @@ -686,7 +672,6 @@ def testDotsOutputTearDownFail(self): out = self._run_test('testSkip', 1, AssertionError('fail')) self.assertEqual(out, 'sF') - @force_not_colorized def testLongOutputTearDownFail(self): classname = f'{__name__}.{self.Test.__qualname__}' out = self._run_test('testSuccess', 2, AssertionError('fail')) diff --git a/Lib/test/test_unittest/test_runner.py b/Lib/test/test_unittest/test_runner.py index 1131cd73128866..4d3cfd60b8d9c3 100644 --- a/Lib/test/test_unittest/test_runner.py +++ b/Lib/test/test_unittest/test_runner.py @@ -4,7 +4,6 @@ import pickle import subprocess from test import support -from test.support import force_not_colorized import unittest from unittest.case import _Outcome @@ -107,7 +106,7 @@ def cleanup2(*args, **kwargs): self.assertTrue(test.doCleanups()) self.assertEqual(cleanups, [(2, (), {}), (1, (1, 2, 3), dict(four='hello', five='goodbye'))]) - @force_not_colorized + @support.force_not_colorized def testCleanUpWithErrors(self): class TestableTest(unittest.TestCase): def testNothing(self): @@ -251,6 +250,7 @@ def testNothing(self): self.assertEqual(test._cleanups, []) +@support.force_not_colorized_test_class class TestClassCleanup(unittest.TestCase): def test_addClassCleanUp(self): class TestableTest(unittest.TestCase): @@ -418,7 +418,6 @@ def cleanup2(): self.assertIsInstance(e2[1], CustomError) self.assertEqual(str(e2[1]), 'cleanup1') - @force_not_colorized def test_with_errors_addCleanUp(self): ordering = [] class TestableTest(unittest.TestCase): @@ -442,7 +441,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'cleanup_exc', 'tearDownClass', 'cleanup_good']) - @force_not_colorized def test_run_with_errors_addClassCleanUp(self): ordering = [] class TestableTest(unittest.TestCase): @@ -466,7 +464,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'test', 'cleanup_good', 'tearDownClass', 'cleanup_exc']) - @force_not_colorized def test_with_errors_in_addClassCleanup_and_setUps(self): ordering = [] class_blow_up = False @@ -519,7 +516,6 @@ def tearDownClass(cls): ['setUpClass', 'setUp', 'tearDownClass', 'cleanup_exc']) - @force_not_colorized def test_with_errors_in_tearDownClass(self): ordering = [] class TestableTest(unittest.TestCase): @@ -596,7 +592,6 @@ def test(self): 'inner setup', 'inner test', 'inner cleanup', 'end outer test', 'outer cleanup']) - @force_not_colorized def test_run_empty_suite_error_message(self): class EmptyTest(unittest.TestCase): pass @@ -608,6 +603,7 @@ class EmptyTest(unittest.TestCase): self.assertIn("\nNO TESTS RAN\n", runner.stream.getvalue()) +@support.force_not_colorized_test_class class TestModuleCleanUp(unittest.TestCase): def test_add_and_do_ModuleCleanup(self): module_cleanups = [] @@ -670,7 +666,6 @@ class Module(object): self.assertEqual(cleanups, [((1, 2), {'function': 'hello'})]) - @force_not_colorized def test_run_module_cleanUp(self): blowUp = True ordering = [] @@ -810,7 +805,6 @@ def tearDownClass(cls): 'tearDownClass', 'cleanup_good']) self.assertEqual(unittest.case._module_cleanups, []) - @force_not_colorized def test_run_module_cleanUp_when_teardown_exception(self): ordering = [] class Module(object): @@ -972,7 +966,6 @@ def testNothing(self): self.assertEqual(cleanups, [((1, 2), {'function': 3, 'self': 4})]) - @force_not_colorized def test_with_errors_in_addClassCleanup(self): ordering = [] @@ -1006,7 +999,6 @@ def tearDownClass(cls): ['setUpModule', 'setUpClass', 'test', 'tearDownClass', 'cleanup_exc', 'tearDownModule', 'cleanup_good']) - @force_not_colorized def test_with_errors_in_addCleanup(self): ordering = [] class Module(object): @@ -1037,7 +1029,6 @@ def tearDown(self): ['setUpModule', 'setUp', 'test', 'tearDown', 'cleanup_exc', 'tearDownModule', 'cleanup_good']) - @force_not_colorized def test_with_errors_in_addModuleCleanup_and_setUps(self): ordering = [] module_blow_up = False @@ -1330,7 +1321,7 @@ def MockResultClass(*args): expectedresult = (runner.stream, DESCRIPTIONS, VERBOSITY) self.assertEqual(runner._makeResult(), expectedresult) - @force_not_colorized + @support.force_not_colorized @support.requires_subprocess() def test_warnings(self): """ diff --git a/Lib/test/test_unparse.py b/Lib/test/test_unparse.py index 35394f29fbe49d..332919540da4d6 100644 --- a/Lib/test/test_unparse.py +++ b/Lib/test/test_unparse.py @@ -651,7 +651,9 @@ def test_multiquote_joined_string(self): def test_backslash_in_format_spec(self): import re - msg = re.escape("invalid escape sequence '\\ '") + msg = re.escape('"\\ " is an invalid escape sequence. ' + 'Such sequences will not work in the future. ' + 'Did you mean "\\\\ "? A raw string is also an option.') with self.assertWarnsRegex(SyntaxWarning, msg): self.check_ast_roundtrip("""f"{x:\\ }" """) self.check_ast_roundtrip("""f"{x:\\n}" """) diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py index 79e7337606b4bc..76b1de0e3519f9 100644 --- a/Lib/test/test_zipfile/test_core.py +++ b/Lib/test/test_zipfile/test_core.py @@ -1,3 +1,4 @@ +import _pyio import array import contextlib import importlib.util @@ -19,7 +20,7 @@ from random import randint, random, randbytes from test import archiver_tests -from test.support import script_helper +from test.support import script_helper, os_helper from test.support import ( findfile, requires_zlib, requires_bz2, requires_lzma, captured_stdout, captured_stderr, requires_subprocess, @@ -1783,6 +1784,35 @@ def test_writestr_extended_local_header_issue1202(self): zinfo.flag_bits |= zipfile._MASK_USE_DATA_DESCRIPTOR # Include an extended local header. orig_zip.writestr(zinfo, data) + def test_write_with_source_date_epoch(self): + with os_helper.EnvironmentVarGuard() as env: + # Set the SOURCE_DATE_EPOCH environment variable to a specific timestamp + env['SOURCE_DATE_EPOCH'] = "1735715999" + + with zipfile.ZipFile(TESTFN, "w") as zf: + zf.writestr("test_source_date_epoch.txt", "Testing SOURCE_DATE_EPOCH") + + with zipfile.ZipFile(TESTFN, "r") as zf: + zip_info = zf.getinfo("test_source_date_epoch.txt") + get_time = time.localtime(int(os.environ['SOURCE_DATE_EPOCH']))[:6] + # Compare each element of the date_time tuple + # Allow for a 1-second difference + for z_time, g_time in zip(zip_info.date_time, get_time): + self.assertAlmostEqual(z_time, g_time, delta=1) + + def test_write_without_source_date_epoch(self): + with os_helper.EnvironmentVarGuard() as env: + del env['SOURCE_DATE_EPOCH'] + + with zipfile.ZipFile(TESTFN, "w") as zf: + zf.writestr("test_no_source_date_epoch.txt", "Testing without SOURCE_DATE_EPOCH") + + with zipfile.ZipFile(TESTFN, "r") as zf: + zip_info = zf.getinfo("test_no_source_date_epoch.txt") + current_time = time.localtime()[:6] + for z_time, c_time in zip(zip_info.date_time, current_time): + self.assertAlmostEqual(z_time, c_time, delta=1) + def test_close(self): """Check that the zipfile is closed after the 'with' block.""" with zipfile.ZipFile(TESTFN2, "w") as zipfp: @@ -3491,5 +3521,87 @@ def test_too_short(self): b"zzz", zipfile._Extra.strip(b"zzz", (self.ZIP64_EXTRA,))) +class StatIO(_pyio.BytesIO): + """Buffer which remembers the number of bytes that were read.""" + + def __init__(self): + super().__init__() + self.bytes_read = 0 + + def read(self, size=-1): + bs = super().read(size) + self.bytes_read += len(bs) + return bs + + +class StoredZipExtFileRandomReadTest(unittest.TestCase): + """Tests whether an uncompressed, unencrypted zip entry can be randomly + seek and read without reading redundant bytes.""" + def test_stored_seek_and_read(self): + + sio = StatIO() + # 20000 bytes + txt = b'0123456789' * 2000 + + # The seek length must be greater than ZipExtFile.MIN_READ_SIZE + # as `ZipExtFile._read2()` reads in blocks of this size and we + # need to seek out of the buffered data + read_buffer_size = zipfile.ZipExtFile.MIN_READ_SIZE + self.assertGreaterEqual(10002, read_buffer_size) # for forward seek test + self.assertGreaterEqual(5003, read_buffer_size) # for backward seek test + # The read length must be less than MIN_READ_SIZE, since we assume that + # only 1 block is read in the test. + read_length = 100 + self.assertGreaterEqual(read_buffer_size, read_length) # for read() calls + + with zipfile.ZipFile(sio, "w", compression=zipfile.ZIP_STORED) as zipf: + zipf.writestr("foo.txt", txt) + + # check random seek and read on a file + with zipfile.ZipFile(sio, "r") as zipf: + with zipf.open("foo.txt", "r") as fp: + # Test this optimized read hasn't rewound and read from the + # start of the file (as in the case of the unoptimized path) + + # forward seek + old_count = sio.bytes_read + forward_seek_len = 10002 + current_pos = 0 + fp.seek(forward_seek_len, os.SEEK_CUR) + current_pos += forward_seek_len + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(fp._left, fp._compress_left) + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + self.assertEqual(fp._left, fp._compress_left) + read_count = sio.bytes_read - old_count + self.assertLessEqual(read_count, read_buffer_size) + + # backward seek + old_count = sio.bytes_read + backward_seek_len = 5003 + fp.seek(-backward_seek_len, os.SEEK_CUR) + current_pos -= backward_seek_len + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(fp._left, fp._compress_left) + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(fp.tell(), current_pos) + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + self.assertEqual(fp._left, fp._compress_left) + read_count = sio.bytes_read - old_count + self.assertLessEqual(read_count, read_buffer_size) + + # eof flags test + fp.seek(0, os.SEEK_END) + fp.seek(12345, os.SEEK_SET) + current_pos = 12345 + arr = fp.read(read_length) + current_pos += read_length + self.assertEqual(arr, txt[current_pos - read_length:current_pos]) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 1a60fd32a77ea4..9ce95a62d961ba 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -169,6 +169,7 @@ def __init__(self): self.prev_row = 1 self.prev_col = 0 self.prev_type = None + self.prev_line = "" self.encoding = None def add_whitespace(self, start): @@ -176,14 +177,28 @@ def add_whitespace(self, start): if row < self.prev_row or row == self.prev_row and col < self.prev_col: raise ValueError("start ({},{}) precedes previous end ({},{})" .format(row, col, self.prev_row, self.prev_col)) - row_offset = row - self.prev_row - if row_offset: - self.tokens.append("\\\n" * row_offset) - self.prev_col = 0 + self.add_backslash_continuation(start) col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) + def add_backslash_continuation(self, start): + """Add backslash continuation characters if the row has increased + without encountering a newline token. + + This also inserts the correct amount of whitespace before the backslash. + """ + row = start[0] + row_offset = row - self.prev_row + if row_offset == 0: + return + + newline = '\r\n' if self.prev_line.endswith('\r\n') else '\n' + line = self.prev_line.rstrip('\\\r\n') + ws = ''.join(_itertools.takewhile(str.isspace, reversed(line))) + self.tokens.append(ws + f"\\{newline}" * row_offset) + self.prev_col = 0 + def escape_brackets(self, token): characters = [] consume_until_next_bracket = False @@ -243,8 +258,6 @@ def untokenize(self, iterable): end_line, end_col = end extra_chars = last_line.count("{{") + last_line.count("}}") end = (end_line, end_col + extra_chars) - elif tok_type in (STRING, FSTRING_START) and self.prev_type in (STRING, FSTRING_END): - self.tokens.append(" ") self.add_whitespace(start) self.tokens.append(token) @@ -253,6 +266,7 @@ def untokenize(self, iterable): self.prev_row += 1 self.prev_col = 0 self.prev_type = tok_type + self.prev_line = line return "".join(self.tokens) def compat(self, token, iterable): diff --git a/Lib/tomllib/_parser.py b/Lib/tomllib/_parser.py index 4d208bcfb4a9a6..0e522c3a69e6fe 100644 --- a/Lib/tomllib/_parser.py +++ b/Lib/tomllib/_parser.py @@ -4,11 +4,7 @@ from __future__ import annotations -from collections.abc import Iterable -import string from types import MappingProxyType -from typing import Any, BinaryIO, NamedTuple -import warnings from ._re import ( RE_DATETIME, @@ -18,7 +14,13 @@ match_to_localtime, match_to_number, ) -from ._types import Key, ParseFloat, Pos + +TYPE_CHECKING = False +if TYPE_CHECKING: + from collections.abc import Iterable + from typing import IO, Any + + from ._types import Key, ParseFloat, Pos ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) @@ -34,9 +36,11 @@ TOML_WS = frozenset(" \t") TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") -BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") +BARE_KEY_CHARS = frozenset( + "abcdefghijklmnopqrstuvwxyz" "ABCDEFGHIJKLMNOPQRSTUVWXYZ" "0123456789" "-_" +) KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") -HEXDIGIT_CHARS = frozenset(string.hexdigits) +HEXDIGIT_CHARS = frozenset("abcdef" "ABCDEF" "0123456789") BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( { @@ -80,6 +84,8 @@ def __init__( or not isinstance(doc, str) or not isinstance(pos, int) ): + import warnings + warnings.warn( "Free-form arguments for TOMLDecodeError are deprecated. " "Please set 'msg' (str), 'doc' (str) and 'pos' (int) arguments only.", @@ -115,7 +121,7 @@ def __init__( self.colno = colno -def load(fp: BinaryIO, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: +def load(fp: IO[bytes], /, *, parse_float: ParseFloat = float) -> dict[str, Any]: """Parse TOML from a binary file object.""" b = fp.read() try: @@ -139,7 +145,7 @@ def loads(s: str, /, *, parse_float: ParseFloat = float) -> dict[str, Any]: # n f"Expected str object, not '{type(s).__qualname__}'" ) from None pos = 0 - out = Output(NestedDict(), Flags()) + out = Output() header: Key = () parse_float = make_safe_parse_float(parse_float) @@ -290,9 +296,10 @@ def append_nest_to_list(self, key: Key) -> None: cont[last_key] = [{}] -class Output(NamedTuple): - data: NestedDict - flags: Flags +class Output: + def __init__(self) -> None: + self.data = NestedDict() + self.flags = Flags() def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: diff --git a/Lib/tomllib/_re.py b/Lib/tomllib/_re.py index 9eacefc729544e..1ca6bef77a0b03 100644 --- a/Lib/tomllib/_re.py +++ b/Lib/tomllib/_re.py @@ -7,9 +7,12 @@ from datetime import date, datetime, time, timedelta, timezone, tzinfo from functools import lru_cache import re -from typing import Any -from ._types import ParseFloat +TYPE_CHECKING = False +if TYPE_CHECKING: + from typing import Any + + from ._types import ParseFloat # E.g. # - 00:32:00.999999 diff --git a/Lib/traceback.py b/Lib/traceback.py index 6367c00e4d4b86..31c73efcef5a52 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -135,7 +135,7 @@ def print_exception(exc, /, value=_sentinel, tb=_sentinel, limit=None, \ def _print_exception_bltin(exc, /): file = sys.stderr if sys.stderr is not None else sys.__stderr__ - colorize = _colorize.can_colorize() + colorize = _colorize.can_colorize(file=file) return print_exception(exc, limit=BUILTIN_EXCEPTION_LIMIT, file=file, colorize=colorize) @@ -1283,7 +1283,7 @@ def _format_syntax_error(self, stype, **kwargs): filename_suffix = ' ({})'.format(self.filename) text = self.text - if text is not None: + if isinstance(text, str): # text = " foo\n" # rtext = " foo" # ltext = "foo" @@ -1292,10 +1292,17 @@ def _format_syntax_error(self, stype, **kwargs): spaces = len(rtext) - len(ltext) if self.offset is None: yield ' {}\n'.format(ltext) - else: + elif isinstance(self.offset, int): offset = self.offset if self.lineno == self.end_lineno: - end_offset = self.end_offset if self.end_offset not in {None, 0} else offset + end_offset = ( + self.end_offset + if ( + isinstance(self.end_offset, int) + and self.end_offset != 0 + ) + else offset + ) else: end_offset = len(rtext) + 1 diff --git a/Lib/turtle.py b/Lib/turtle.py index 1320cfd93fd6db..e88981d298ad52 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -107,6 +107,7 @@ from os.path import isfile, split, join from pathlib import Path +from contextlib import contextmanager from copy import deepcopy from tkinter import simpledialog @@ -114,23 +115,24 @@ 'RawTurtle', 'Turtle', 'RawPen', 'Pen', 'Shape', 'Vec2D'] _tg_screen_functions = ['addshape', 'bgcolor', 'bgpic', 'bye', 'clearscreen', 'colormode', 'delay', 'exitonclick', 'getcanvas', - 'getshapes', 'listen', 'mainloop', 'mode', 'numinput', + 'getshapes', 'listen', 'mainloop', 'mode', 'no_animation', 'numinput', 'onkey', 'onkeypress', 'onkeyrelease', 'onscreenclick', 'ontimer', 'register_shape', 'resetscreen', 'screensize', 'save', 'setup', - 'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', 'update', - 'window_height', 'window_width'] + 'setworldcoordinates', 'textinput', 'title', 'tracer', 'turtles', + 'update', 'window_height', 'window_width'] _tg_turtle_functions = ['back', 'backward', 'begin_fill', 'begin_poly', 'bk', 'circle', 'clear', 'clearstamp', 'clearstamps', 'clone', 'color', 'degrees', 'distance', 'dot', 'down', 'end_fill', 'end_poly', 'fd', - 'fillcolor', 'filling', 'forward', 'get_poly', 'getpen', 'getscreen', 'get_shapepoly', - 'getturtle', 'goto', 'heading', 'hideturtle', 'home', 'ht', 'isdown', - 'isvisible', 'left', 'lt', 'onclick', 'ondrag', 'onrelease', 'pd', - 'pen', 'pencolor', 'pendown', 'pensize', 'penup', 'pos', 'position', - 'pu', 'radians', 'right', 'reset', 'resizemode', 'rt', - 'seth', 'setheading', 'setpos', 'setposition', - 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', 'shapetransform', 'shearfactor', 'showturtle', - 'speed', 'st', 'stamp', 'teleport', 'tilt', 'tiltangle', 'towards', - 'turtlesize', 'undo', 'undobufferentries', 'up', 'width', + 'fillcolor', 'fill', 'filling', 'forward', 'get_poly', 'getpen', + 'getscreen', 'get_shapepoly', 'getturtle', 'goto', 'heading', + 'hideturtle', 'home', 'ht', 'isdown', 'isvisible', 'left', 'lt', + 'onclick', 'ondrag', 'onrelease', 'pd', 'pen', 'pencolor', 'pendown', + 'pensize', 'penup', 'poly', 'pos', 'position', 'pu', 'radians', 'right', + 'reset', 'resizemode', 'rt', 'seth', 'setheading', 'setpos', + 'setposition', 'setundobuffer', 'setx', 'sety', 'shape', 'shapesize', + 'shapetransform', 'shearfactor', 'showturtle', 'speed', 'st', 'stamp', + 'teleport', 'tilt', 'tiltangle', 'towards', 'turtlesize', 'undo', + 'undobufferentries', 'up', 'width', 'write', 'xcor', 'ycor'] _tg_utilities = ['write_docstringdict', 'done'] @@ -1275,6 +1277,26 @@ def delay(self, delay=None): return self._delayvalue self._delayvalue = int(delay) + @contextmanager + def no_animation(self): + """Temporarily turn off auto-updating the screen. + + This is useful for drawing complex shapes where even the fastest setting + is too slow. Once this context manager is exited, the drawing will + be displayed. + + Example (for a TurtleScreen instance named screen + and a Turtle instance named turtle): + >>> with screen.no_animation(): + ... turtle.circle(50) + """ + tracer = self.tracer() + try: + self.tracer(0) + yield + finally: + self.tracer(tracer) + def _incrementudc(self): """Increment update counter.""" if not TurtleScreen._RUNNING: @@ -3380,6 +3402,24 @@ def filling(self): """ return isinstance(self._fillpath, list) + @contextmanager + def fill(self): + """A context manager for filling a shape. + + Implicitly ensures the code block is wrapped with + begin_fill() and end_fill(). + + Example (for a Turtle instance named turtle): + >>> turtle.color("black", "red") + >>> with turtle.fill(): + ... turtle.circle(60) + """ + self.begin_fill() + try: + yield + finally: + self.end_fill() + def begin_fill(self): """Called just before drawing a shape to be filled. @@ -3400,7 +3440,6 @@ def begin_fill(self): self.undobuffer.push(("beginfill", self._fillitem)) self._update() - def end_fill(self): """Fill the shape drawn after the call begin_fill(). @@ -3504,6 +3543,27 @@ def write(self, arg, move=False, align="left", font=("Arial", 8, "normal")): if self.undobuffer: self.undobuffer.cumulate = False + @contextmanager + def poly(self): + """A context manager for recording the vertices of a polygon. + + Implicitly ensures that the code block is wrapped with + begin_poly() and end_poly() + + Example (for a Turtle instance named turtle) where we create a + triangle as the polygon and move the turtle 100 steps forward: + >>> with turtle.poly(): + ... for side in range(3) + ... turtle.forward(50) + ... turtle.right(60) + >>> turtle.forward(100) + """ + self.begin_poly() + try: + yield + finally: + self.end_poly() + def begin_poly(self): """Start recording the vertices of a polygon. diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index e9ef551d0b3ded..10c3b7e122371e 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -1372,16 +1372,20 @@ def assertHasAttr(self, obj, name, msg=None): if not hasattr(obj, name): if isinstance(obj, types.ModuleType): standardMsg = f'module {obj.__name__!r} has no attribute {name!r}' + elif isinstance(obj, type): + standardMsg = f'type object {obj.__name__!r} has no attribute {name!r}' else: - standardMsg = f'{type(obj).__name__} instance has no attribute {name!r}' + standardMsg = f'{type(obj).__name__!r} object has no attribute {name!r}' self.fail(self._formatMessage(msg, standardMsg)) def assertNotHasAttr(self, obj, name, msg=None): if hasattr(obj, name): if isinstance(obj, types.ModuleType): standardMsg = f'module {obj.__name__!r} has unexpected attribute {name!r}' + elif isinstance(obj, type): + standardMsg = f'type object {obj.__name__!r} has unexpected attribute {name!r}' else: - standardMsg = f'{type(obj).__name__} instance has unexpected attribute {name!r}' + standardMsg = f'{type(obj).__name__!r} object has unexpected attribute {name!r}' self.fail(self._formatMessage(msg, standardMsg)) def assertRaisesRegex(self, expected_exception, expected_regex, diff --git a/Lib/unittest/result.py b/Lib/unittest/result.py index 97262735aa8311..b8ea396db6772e 100644 --- a/Lib/unittest/result.py +++ b/Lib/unittest/result.py @@ -191,7 +191,8 @@ def _exc_info_to_string(self, err, test): capture_locals=self.tb_locals, compact=True) from _colorize import can_colorize - msgLines = list(tb_e.format(colorize=can_colorize())) + colorize = hasattr(self, "stream") and can_colorize(file=self.stream) + msgLines = list(tb_e.format(colorize=colorize)) if self.buffer: output = sys.stdout.getvalue() diff --git a/Lib/unittest/runner.py b/Lib/unittest/runner.py index d60c295a1eddf7..eb0234a2617680 100644 --- a/Lib/unittest/runner.py +++ b/Lib/unittest/runner.py @@ -45,7 +45,7 @@ def __init__(self, stream, descriptions, verbosity, *, durations=None): self.showAll = verbosity > 1 self.dots = verbosity == 1 self.descriptions = descriptions - self._ansi = get_colors() + self._ansi = get_colors(file=stream) self._newline = True self.durations = durations @@ -286,7 +286,7 @@ def run(self, test): expected_fails, unexpected_successes, skipped = results infos = [] - ansi = get_colors() + ansi = get_colors(file=self.stream) bold_red = ansi.BOLD_RED green = ansi.GREEN red = ansi.RED diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py index 052ef47b8f6598..b8b496ad9471f4 100644 --- a/Lib/zipfile/__init__.py +++ b/Lib/zipfile/__init__.py @@ -13,7 +13,6 @@ import sys import threading import time -from typing import Self try: import zlib # We may need its compression method @@ -606,7 +605,7 @@ def from_file(cls, filename, arcname=None, *, strict_timestamps=True): return zinfo - def _for_archive(self, archive: ZipFile) -> Self: + def _for_archive(self, archive): """Resolve suitable defaults from the archive. Resolve the date_time, compression attributes, and external attributes @@ -614,7 +613,11 @@ def _for_archive(self, archive: ZipFile) -> Self: Return self. """ - self.date_time = time.localtime(time.time())[:6] + # gh-91279: Set the SOURCE_DATE_EPOCH to a specific timestamp + epoch = os.environ.get('SOURCE_DATE_EPOCH') + get_time = int(epoch) if epoch else time.time() + self.date_time = time.localtime(get_time)[:6] + self.compress_type = archive.compression self.compress_level = archive.compresslevel if self.filename.endswith('/'): # pragma: no cover @@ -1184,13 +1187,15 @@ def seek(self, offset, whence=os.SEEK_SET): self._offset = buff_offset read_offset = 0 # Fast seek uncompressed unencrypted file - elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset > 0: + elif self._compress_type == ZIP_STORED and self._decrypter is None and read_offset != 0: # disable CRC checking after first seeking - it would be invalid self._expected_crc = None # seek actual file taking already buffered data into account read_offset -= len(self._readbuffer) - self._offset self._fileobj.seek(read_offset, os.SEEK_CUR) self._left -= read_offset + self._compress_left -= read_offset + self._eof = self._left <= 0 read_offset = 0 # flush read buffer self._readbuffer = b'' diff --git a/Misc/ACKS b/Misc/ACKS index deda334bee7417..7759bd0b95ed8b 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1922,6 +1922,7 @@ Bill Tutt Fraser Tweedale Doobee R. Tzeck Eren Türkay +Stan Ulbrych Lionel Ulmer Adnan Umer Utkarsh Upadhyay diff --git a/Misc/NEWS.d/3.10.0b1.rst b/Misc/NEWS.d/3.10.0b1.rst index 25c6b827146e82..406a5d7853edc0 100644 --- a/Misc/NEWS.d/3.10.0b1.rst +++ b/Misc/NEWS.d/3.10.0b1.rst @@ -941,7 +941,7 @@ result from ``entry_points()`` as deprecated. .. -.. gh: 47383 +.. gh-issue: 47383 .. date: 2021-04-08-19-32-26 .. nonce: YI1hdL .. section: Library diff --git a/Misc/NEWS.d/3.11.0b1.rst b/Misc/NEWS.d/3.11.0b1.rst index 85cb0f1b5cffbd..87442dbbbd17f5 100644 --- a/Misc/NEWS.d/3.11.0b1.rst +++ b/Misc/NEWS.d/3.11.0b1.rst @@ -570,7 +570,7 @@ planned). Patch by Alex Waygood. .. -.. gh: 78157 +.. gh-issue: 78157 .. date: 2022-05-05-20-40-45 .. nonce: IA_9na .. section: Library @@ -1289,7 +1289,7 @@ Deprecate the chunk module. .. -.. gh: 91498 +.. gh-issue: 91498 .. date: 2022-04-10-08-39-44 .. nonce: 8oII92 .. section: Library diff --git a/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst b/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst new file mode 100644 index 00000000000000..42ac492498e029 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2025-01-16-03-35-37.gh-issue-128902.Dt7xtV.rst @@ -0,0 +1 @@ +Fix compile errors with Clang 9 and older due to lack of ``__attribute__((fallthrough))`` support. diff --git a/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst b/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst new file mode 100644 index 00000000000000..6cf5fd2872cd43 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2024-12-14-03-40-15.gh-issue-127925.FF7aov.rst @@ -0,0 +1,3 @@ +Convert the :mod:`decimal` module to use :pep:`757` C API (export-import +integers), offering some speed-up if the integer part of the +:class:`~decimal.Decimal` instance is small. Patch by Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst b/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst new file mode 100644 index 00000000000000..8362ee3a2b1760 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-01-03-25-38.gh-issue-126599.MRCYlH.rst @@ -0,0 +1 @@ +Remove some internal test APIs for the experimental JIT compiler. diff --git a/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst b/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst new file mode 100644 index 00000000000000..b2e5664138fba2 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-15-11-42-07.gh-issue-128863.C9MkB_.rst @@ -0,0 +1,19 @@ +The following private functions are deprecated and planned for removal in +Python 3.18: + +* :c:func:`!_PyBytes_Join`: use :c:func:`PyBytes_Join`. +* :c:func:`!_PyDict_GetItemStringWithError`: use :c:func:`PyDict_GetItemStringRef`. +* :c:func:`!_PyDict_Pop()`: use :c:func:`PyDict_Pop`. +* :c:func:`!_PyLong_Sign()`: use :c:func:`PyLong_GetSign`. +* :c:func:`!_PyLong_FromDigits` and :c:func:`!_PyLong_New`: + use :c:func:`PyLongWriter_Create`. +* :c:func:`!_PyThreadState_UncheckedGet`: use :c:func:`PyThreadState_GetUnchecked`. +* :c:func:`!_PyUnicode_AsString`: use :c:func:`PyUnicode_AsUTF8`. +* :c:func:`!_Py_HashPointer`: use :c:func:`Py_HashPointer`. +* :c:func:`!_Py_fopen_obj`: use :c:func:`Py_fopen`. + +The `pythoncapi-compat project +`__ can be used to get these new +public functions on Python 3.13 and older. + +Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst b/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst new file mode 100644 index 00000000000000..3cd19cc48e3416 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-19-23-17-58.gh-issue-129033.cpRivP.rst @@ -0,0 +1,3 @@ +Remove the private ``_Py_InitializeMain()`` function. It was a +:term:`provisional API` added to Python 3.8 by :pep:`587`. Patch by Victor +Stinner. diff --git a/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst b/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst new file mode 100644 index 00000000000000..c0c109d5ce1ca2 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-01-20-10-40-11.gh-issue-129033.d1jltB.rst @@ -0,0 +1,4 @@ +Remove ``_PyInterpreterState_GetConfigCopy()`` and +``_PyInterpreterState_SetConfig()`` private functions. Use instead +:c:func:`PyConfig_Get` and :c:func:`PyConfig_Set`, public C API added by +:pep:`741` "Python Configuration C API". Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst new file mode 100644 index 00000000000000..ee1f33f95647bd --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2023-12-04-15-53-25.gh-issue-112713.Zrhv77.rst @@ -0,0 +1 @@ +Added support for the ``Partitioned`` cookie flag in :mod:`http.cookies`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst new file mode 100644 index 00000000000000..717804be95b18b --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-02-29-16-55-52.gh-issue-115911.Vnkue_.rst @@ -0,0 +1,3 @@ +If the current working directory cannot be determined due to permissions, +then import will no longer raise :exc:`PermissionError`. Patch by Alex +Willmer. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst new file mode 100644 index 00000000000000..aecc8c9abf3ce9 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-03-06-05-16.gh-issue-126349.7YwWsI.rst @@ -0,0 +1,2 @@ +Add :func:`turtle.fill`, :func:`turtle.poly` and :func:`turtle.no_animation` context managers. +Patch by Marie Roald and Yngve Mardal Moe. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst new file mode 100644 index 00000000000000..62ca6f62f521a8 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-11-18-12-17-45.gh-issue-125723.tW_hFG.rst @@ -0,0 +1,2 @@ +Fix crash with ``gi_frame.f_locals`` when generator frames outlive their +generator. Patch by Mikhail Efimov. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst new file mode 100644 index 00000000000000..60b1c5d8b80793 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-17-46.gh-issue-126004.-p8MAS.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.xmlcharrefreplace_errors` error handler. +Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst new file mode 100644 index 00000000000000..619d73042a9bb8 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-30-58.gh-issue-126004.-p8MAS.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.backslashreplace_errors` error handler. Patch by +Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst new file mode 100644 index 00000000000000..de70c59ee48eec --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-06-11-32-58.gh-issue-126004.CYAwTB.rst @@ -0,0 +1,3 @@ +Fix handling of :attr:`UnicodeError.start` and :attr:`UnicodeError.end` +values in the :func:`codecs.replace_errors` error handler. Patch by Bénédikt +Tran. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst new file mode 100644 index 00000000000000..0832d777bc3251 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-17-09-28-17.gh-issue-128016.DPqhah.rst @@ -0,0 +1 @@ +Improved the ``SyntaxWarning`` message for invalid escape sequences to clarify that such sequences will raise a ``SyntaxError`` in future Python releases. The new message also suggests a potential fix, i.e., ``Did you mean "\\e"?``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst new file mode 100644 index 00000000000000..8fe628d18f4bd4 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-29-15-09-21.gh-issue-128330.IaYL7G.rst @@ -0,0 +1 @@ +Restore terminal control characters on REPL exit. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst new file mode 100644 index 00000000000000..f19afcd90b16ea --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2024-12-30-15-49-31.gh-issue-127953.B4_6L9.rst @@ -0,0 +1,2 @@ +The time to handle a ``LINE`` event in sys.monitoring (and sys.settrace) is +now independent of the number of lines in the code object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst new file mode 100644 index 00000000000000..f58c1fc767515e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-10-23-54-16.gh-issue-100239.ijOOUs.rst @@ -0,0 +1,2 @@ +Add opcode ``BINARY_OP_EXTEND`` which executes a pair of functions (guard and +specialization functions) accessed from the inline cache. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst new file mode 100644 index 00000000000000..34952e9abb66e5 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-13-17-03-49.gh-issue-128807.BGGBxD.rst @@ -0,0 +1,6 @@ +Add a marking phase to the free-threaded GC. This is similar to what was +done in GH-126491. Since the free-threaded GC does not have generations and +is not incremental, the marking phase looks for all objects reachable from +known roots. The roots are objects known to not be garbage, like the module +dictionary for :mod:`sys`. For most programs, this marking phase should +make the GC a bit faster since typically less work is done per object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst new file mode 100644 index 00000000000000..e095ba9ebf6be4 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-16-18-16-18.gh-issue-128910.9pqfab.rst @@ -0,0 +1,2 @@ +Undocumented and unused private C-API functions ``_PyTrash_begin`` and +``_PyTrash_end`` are removed. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst new file mode 100644 index 00000000000000..eb2361bb5d4525 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-18-01-06-58.gh-issue-128799.vSNagk.rst @@ -0,0 +1 @@ +Add frame of ``except*`` to traceback when it wraps a naked exception. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst new file mode 100644 index 00000000000000..553aa5a4dd573e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-19-48-30.gh-issue-124363.vOFhHW.rst @@ -0,0 +1 @@ +Treat debug expressions in f-string as raw strings. Patch by Pablo Galindo diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst new file mode 100644 index 00000000000000..067d52eff2da1e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-21-23-35-41.gh-issue-129093.0rvETC.rst @@ -0,0 +1,2 @@ +Fix f-strings such as ``f'{expr=}'`` sometimes not displaying the full +expression when the expression contains ``!=``. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst new file mode 100644 index 00000000000000..d946f6a2fea185 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-01-22-14-24-44.gh-issue-129149.wAYu43.rst @@ -0,0 +1,2 @@ +Add fast path for medium-size integers in :c:func:`PyLong_FromUnsignedLong`, +:c:func:`PyLong_FromUnsignedLongLong` and :c:func:`PyLong_FromSize_t`. diff --git a/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst b/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst new file mode 100644 index 00000000000000..bf6253eed2eb90 --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2025-01-16-18-59-11.gh-issue-125722.eHHRga.rst @@ -0,0 +1,2 @@ +Require Sphinx 8.1.3 or later to build the Python documentation. Patch by +Adam Turner. diff --git a/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst b/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst new file mode 100644 index 00000000000000..31493686daec97 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2023-02-01-16-41-31.gh-issue-101410.Dt2aQE.rst @@ -0,0 +1,3 @@ +Support custom messages for domain errors in the :mod:`math` module +(:func:`math.sqrt`, :func:`math.log` and :func:`math.atanh` were modified as +examples). Patch by Charlie Zhao and Sergey B Kirpichev. diff --git a/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst b/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst new file mode 100644 index 00000000000000..8f3f863d93e021 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-07-14-23-19-20.gh-issue-119257.9OEzcN.rst @@ -0,0 +1,2 @@ +Show tab completions menu below the current line, which results in less +janky behaviour, and fixes a cursor movement bug. Patch by Daniel Hollas diff --git a/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst b/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst new file mode 100644 index 00000000000000..b110900e7efd33 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-12-14-24-25.gh-issue-123987.7_OD1p.rst @@ -0,0 +1,3 @@ +Fixed issue in NamespaceReader where a non-path item in a namespace path, +such as a sentinel added by an editable installer, would break resource +loading. diff --git a/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst b/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst new file mode 100644 index 00000000000000..e55d3539355d73 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-09-27-19-21-53.gh-issue-124703.lYTLEv.rst @@ -0,0 +1 @@ +Quitting :mod:`pdb` in ``inline`` mode will emit a confirmation prompt and exit gracefully now, instead of printing an exception traceback. diff --git a/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst b/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst new file mode 100644 index 00000000000000..c2faf470ffc9cf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-02-11-17-23.gh-issue-91048.QWY-b1.rst @@ -0,0 +1,2 @@ +Add :func:`asyncio.capture_call_graph` and +:func:`asyncio.print_call_graph` functions. diff --git a/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst b/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst new file mode 100644 index 00000000000000..291c5e6f6f2181 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-10-26-16-59-02.gh-issue-125553.4pDLzt.rst @@ -0,0 +1,2 @@ +Fix round-trip invariance for backslash continuations in +:func:`tokenize.untokenize`. diff --git a/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst b/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst new file mode 100644 index 00000000000000..9277797ddc745e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-10-19-45-01.gh-issue-126332.WCCKoH.rst @@ -0,0 +1 @@ +Fix _pyrepl crash when entering a double CTRL-Z on an overflowing line. diff --git a/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst b/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst new file mode 100644 index 00000000000000..8619296143b7d7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-11-24-22-06-42.gh-issue-127096.R7LLpQ.rst @@ -0,0 +1,2 @@ +Do not recreate unnamed section on every read in +:class:`configparser.ConfigParser`. Patch by Andrey Efremov. diff --git a/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst b/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst new file mode 100644 index 00000000000000..d7575c7efb6e88 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-12-18-25-50.gh-issue-127873.WJRwfz.rst @@ -0,0 +1,3 @@ +When ``-E`` is set, only ignore ``PYTHON_COLORS`` and not +``FORCE_COLOR``/``NO_COLOR``/``TERM`` when colourising output. +Patch by Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst b/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst new file mode 100644 index 00000000000000..f4c4ebce10729c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-21-03-20-12.gh-issue-128131.QpPmNt.rst @@ -0,0 +1,2 @@ +Completely support random access of uncompressed unencrypted read-only +zip files obtained by :meth:`ZipFile.open `. diff --git a/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst b/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst new file mode 100644 index 00000000000000..b78bc5aaf44217 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-23-02-09-44.gh-issue-58956.4OdMdT.rst @@ -0,0 +1 @@ +Fixed a frame reference leak in :mod:`bdb`. diff --git a/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst b/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst new file mode 100644 index 00000000000000..30ee2ea5efd069 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2024-12-30-19-53-14.gh-issue-91279.EeOJk1.rst @@ -0,0 +1,3 @@ +:meth:`zipfile.ZipFile.writestr` now respect ``SOURCE_DATE_EPOCH`` that +distributions can set centrally and have build tools consume this in order +to produce reproducible output. diff --git a/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst b/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst new file mode 100644 index 00000000000000..fc3b4d5a5273a6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-04-11-10-04.gh-issue-128479.jvOrF-.rst @@ -0,0 +1 @@ +Fix :func:`!asyncio.staggered.staggered_race` leaking tasks and issuing an unhandled exception. diff --git a/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst b/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst new file mode 100644 index 00000000000000..9a241e37c20a44 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-07-21-48-32.gh-issue-128498.n6jtlW.rst @@ -0,0 +1,2 @@ +Default to stdout isatty for color detection instead of stderr. Patch by +Hugo van Kemenade. diff --git a/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst b/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst new file mode 100644 index 00000000000000..80c9840b585530 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-08-22-30-38.gh-issue-128636.jQfWXj.rst @@ -0,0 +1,2 @@ +Fix PyREPL failure when :data:`os.environ` is overwritten with an invalid +value. diff --git a/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst b/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst new file mode 100644 index 00000000000000..ec6a55040ae6cb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-09-16-20-34.gh-issue-128156.GfObBq.rst @@ -0,0 +1,3 @@ +When using macOS system ``libffi``, support for complex types in +:mod:`ctypes` is now checked at runtime (macOS 10.15 or newer). The types +must also be available at build time. diff --git a/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst b/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst new file mode 100644 index 00000000000000..efa613876a35fd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-13-07-54-32.gh-issue-128308.kYSDRF.rst @@ -0,0 +1 @@ +Support the *name* keyword argument for eager tasks in :func:`asyncio.loop.create_task`, :func:`asyncio.create_task` and :func:`asyncio.TaskGroup.create_task`, by passing on all *kwargs* to the task factory set by :func:`asyncio.loop.set_task_factory`. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst b/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst new file mode 100644 index 00000000000000..38d18b7f4ca05e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-09-45-43.gh-issue-118761.TvAC8E.rst @@ -0,0 +1,3 @@ +Reduce the import time of :mod:`csv` by up to five times, by importing +:mod:`re` on demand. In particular, ``re`` is no more implicitly exposed +as ``csv.re``. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst b/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst new file mode 100644 index 00000000000000..4144ef8f40e6dd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-18-54-48.gh-issue-118761.G1dv6E.rst @@ -0,0 +1,2 @@ +Reduce the import time of :mod:`optparse` when no help text is printed. +Patch by Eli Schwartz. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst b/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst new file mode 100644 index 00000000000000..0eef8777512dd8 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-19-16-50.gh-issue-118761.cbW2ZL.rst @@ -0,0 +1,3 @@ +Reduce import time of :mod:`gettext` by up to ten times, by importing +:mod:`re` on demand. In particular, ``re`` is no longer implicitly +exposed as ``gettext.re``. Patch by Eli Schwartz. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst b/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst new file mode 100644 index 00000000000000..79d845bbab7cfc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-19-32-23.gh-issue-128891.ojUxKo.rst @@ -0,0 +1 @@ +Add specialized opcodes to ``opcode.opname``. diff --git a/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst b/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst new file mode 100644 index 00000000000000..5c108da5703c00 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-15-21-41-51.gh-issue-128679.tq10F2.rst @@ -0,0 +1,3 @@ +:mod:`tracemalloc`: Fix race conditions when :func:`tracemalloc.stop` is +called by a thread, while other threads are tracing memory allocations. +Patch by Victor Stinner. diff --git a/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst b/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst new file mode 100644 index 00000000000000..ea71ecaaeb2936 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-16-10-06-40.gh-issue-118761.z100LC.rst @@ -0,0 +1,2 @@ +Improve import time of :mod:`tomllib` by removing ``typing``, ``string``, +and ``tomllib._types`` imports. Patch by Taneli Hukkinen. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst b/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst new file mode 100644 index 00000000000000..f2db341ef81621 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-11-46-16.gh-issue-128916.GEePbO.rst @@ -0,0 +1,3 @@ +Do not attempt to set ``SO_REUSEPORT`` on sockets of address families +other than ``AF_INET`` and ``AF_INET6``, as it is meaningless with these +address families, and the call with fail with Linux kernel 6.12.9 and newer. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst b/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst new file mode 100644 index 00000000000000..7e015103a95713 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-17-20-51.gh-issue-128894.gX1-8J.rst @@ -0,0 +1,2 @@ +Fix ``traceback.TracebackException._format_syntax_error`` not to fail on +exceptions with custom metadata. diff --git a/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst b/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst new file mode 100644 index 00000000000000..9c985df77743da --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-17-21-33-11.gh-issue-128961.XwvyIZ.rst @@ -0,0 +1 @@ +Fix a crash when setting state on an exhausted :class:`array.array` iterator. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst b/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst new file mode 100644 index 00000000000000..521496d6a2f8c2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-11-04-44.gh-issue-128978.hwg7-w.rst @@ -0,0 +1,2 @@ +Fix a :exc:`NameError` in :func:`!sysconfig.expand_makefile_vars`. Patch by +Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst b/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst new file mode 100644 index 00000000000000..3b3f3f7d98c5d6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-11-24-02.gh-issue-118761.G8MmxY.rst @@ -0,0 +1,2 @@ +Reduce import time of :mod:`pstats` and :mod:`zipfile` by up to 20%, by +removing unnecessary imports to :mod:`typing`. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst b/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst new file mode 100644 index 00000000000000..64fa04fb53e89c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-18-16-58-10.gh-issue-128991.EzJit9.rst @@ -0,0 +1 @@ +Release the enter frame reference within :mod:`bdb` callback diff --git a/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst b/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst new file mode 100644 index 00000000000000..f59feac795ea18 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-13-12-39.gh-issue-128550.AJ5TOL.rst @@ -0,0 +1 @@ +Removed an incorrect optimization relating to eager tasks in :class:`asyncio.TaskGroup` that resulted in cancellations being missed. diff --git a/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst b/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst new file mode 100644 index 00000000000000..93a1eda350d1fc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-16-02-38.gh-issue-129064.JXasgJ.rst @@ -0,0 +1,2 @@ +Deprecate :func:`!sysconfig.expand_makefile_vars`, in favor of using +:func:`sysconfig.get_paths` with the ``vars`` argument. diff --git a/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst b/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst new file mode 100644 index 00000000000000..632ca03bbf8dd2 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-20-20-59-26.gh-issue-92897.G0xH8o.rst @@ -0,0 +1,2 @@ +Scheduled the deprecation of the ``check_home`` argument of +:func:`sysconfig.is_python_build` to Python 3.15. diff --git a/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst b/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst new file mode 100644 index 00000000000000..c4ed76408f32f6 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-22-16-54-25.gh-issue-129205.FMqrUt.rst @@ -0,0 +1 @@ +Add :func:`os.readinto` to read into a :ref:`buffer object ` from a file descriptor. diff --git a/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst b/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst new file mode 100644 index 00000000000000..daf7297387dabd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-01-24-10-48-32.gh-issue-129195.89d5NU.rst @@ -0,0 +1 @@ +Support reporting call graph information from :func:`!asyncio.staggered.staggered_race`. diff --git a/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst b/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst new file mode 100644 index 00000000000000..0f0661d0b1cf4a --- /dev/null +++ b/Misc/NEWS.d/next/Security/2024-08-06-11-43-08.gh-issue-80222.wfR4BU.rst @@ -0,0 +1,6 @@ +Fix bug in the folding of quoted strings when flattening an email message using +a modern email policy. Previously when a quoted string was folded so that +it spanned more than one line, the surrounding quotes and internal escapes +would be omitted. This could theoretically be used to spoof header lines +using a carefully constructed quoted string if the resulting rendered email +was transmitted or re-parsed. diff --git a/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst b/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst new file mode 100644 index 00000000000000..e3c781cf10cf50 --- /dev/null +++ b/Misc/NEWS.d/next/Tools-Demos/2025-01-24-14-49-40.gh-issue-129248.JAapG2.rst @@ -0,0 +1,2 @@ +The iOS test runner now strips the log prefix from each line output by the +test suite. diff --git a/Modules/Setup.stdlib.in b/Modules/Setup.stdlib.in index b7357f41768a2f..6b6a8ae57a5119 100644 --- a/Modules/Setup.stdlib.in +++ b/Modules/Setup.stdlib.in @@ -163,7 +163,7 @@ @MODULE__TESTBUFFER_TRUE@_testbuffer _testbuffer.c @MODULE__TESTINTERNALCAPI_TRUE@_testinternalcapi _testinternalcapi.c _testinternalcapi/test_lock.c _testinternalcapi/pytime.c _testinternalcapi/set.c _testinternalcapi/test_critical_sections.c @MODULE__TESTCAPI_TRUE@_testcapi _testcapimodule.c _testcapi/vectorcall.c _testcapi/heaptype.c _testcapi/abstract.c _testcapi/unicode.c _testcapi/dict.c _testcapi/set.c _testcapi/list.c _testcapi/tuple.c _testcapi/getargs.c _testcapi/datetime.c _testcapi/docstring.c _testcapi/mem.c _testcapi/watchers.c _testcapi/long.c _testcapi/float.c _testcapi/complex.c _testcapi/numbers.c _testcapi/structmember.c _testcapi/exceptions.c _testcapi/code.c _testcapi/buffer.c _testcapi/pyatomic.c _testcapi/run.c _testcapi/file.c _testcapi/codec.c _testcapi/immortal.c _testcapi/gc.c _testcapi/hash.c _testcapi/time.c _testcapi/bytes.c _testcapi/object.c _testcapi/monitoring.c _testcapi/config.c -@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/version.c +@MODULE__TESTLIMITEDCAPI_TRUE@_testlimitedcapi _testlimitedcapi.c _testlimitedcapi/abstract.c _testlimitedcapi/bytearray.c _testlimitedcapi/bytes.c _testlimitedcapi/codec.c _testlimitedcapi/complex.c _testlimitedcapi/dict.c _testlimitedcapi/eval.c _testlimitedcapi/float.c _testlimitedcapi/heaptype_relative.c _testlimitedcapi/import.c _testlimitedcapi/list.c _testlimitedcapi/long.c _testlimitedcapi/object.c _testlimitedcapi/pyos.c _testlimitedcapi/set.c _testlimitedcapi/sys.c _testlimitedcapi/tuple.c _testlimitedcapi/unicode.c _testlimitedcapi/vectorcall_limited.c _testlimitedcapi/version.c @MODULE__TESTCLINIC_TRUE@_testclinic _testclinic.c @MODULE__TESTCLINIC_LIMITED_TRUE@_testclinic_limited _testclinic_limited.c diff --git a/Modules/_asynciomodule.c b/Modules/_asynciomodule.c index 48f0ef95934fa4..d5d49658555f1a 100644 --- a/Modules/_asynciomodule.c +++ b/Modules/_asynciomodule.c @@ -9,6 +9,7 @@ #include "pycore_llist.h" // struct llist_node #include "pycore_modsupport.h" // _PyArg_CheckPositional() #include "pycore_moduleobject.h" // _PyModule_GetState() +#include "pycore_object.h" // _PyObject_SetMaybeWeakref #include "pycore_pyerrors.h" // _PyErr_ClearExcState() #include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing() #include "pycore_pystate.h" // _PyThreadState_GET() @@ -40,12 +41,17 @@ typedef enum { PyObject *prefix##_source_tb; \ PyObject *prefix##_cancel_msg; \ PyObject *prefix##_cancelled_exc; \ + PyObject *prefix##_awaited_by; \ fut_state prefix##_state; \ - /* These bitfields need to be at the end of the struct - so that these and bitfields from TaskObj are contiguous. + /* Used by profilers to make traversing the stack from an external \ + process faster. */ \ + char prefix##_is_task; \ + char prefix##_awaited_by_is_set; \ + /* These bitfields need to be at the end of the struct \ + so that these and bitfields from TaskObj are contiguous. \ */ \ unsigned prefix##_log_tb: 1; \ - unsigned prefix##_blocking: 1; + unsigned prefix##_blocking: 1; \ typedef struct { FutureObj_HEAD(fut) @@ -69,12 +75,24 @@ typedef struct { PyObject *sw_arg; } TaskStepMethWrapper; - #define Future_CheckExact(state, obj) Py_IS_TYPE(obj, state->FutureType) #define Task_CheckExact(state, obj) Py_IS_TYPE(obj, state->TaskType) -#define Future_Check(state, obj) PyObject_TypeCheck(obj, state->FutureType) -#define Task_Check(state, obj) PyObject_TypeCheck(obj, state->TaskType) +#define Future_Check(state, obj) \ + (Future_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->FutureType)) + +#define Task_Check(state, obj) \ + (Task_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->TaskType)) + +// This macro is optimized to quickly return for native Future *or* Task +// objects by inlining fast "exact" checks to be called first. +#define TaskOrFuture_Check(state, obj) \ + (Task_CheckExact(state, obj) \ + || Future_CheckExact(state, obj) \ + || PyObject_TypeCheck(obj, state->FutureType) \ + || PyObject_TypeCheck(obj, state->TaskType)) #ifdef Py_GIL_DISABLED # define ASYNCIO_STATE_LOCK(state) Py_BEGIN_CRITICAL_SECTION_MUT(&state->mutex) @@ -84,6 +102,37 @@ typedef struct { # define ASYNCIO_STATE_UNLOCK(state) ((void)state) #endif +typedef struct _Py_AsyncioModuleDebugOffsets { + struct _asyncio_task_object { + uint64_t size; + uint64_t task_name; + uint64_t task_awaited_by; + uint64_t task_is_task; + uint64_t task_awaited_by_is_set; + uint64_t task_coro; + } asyncio_task_object; + struct _asyncio_thread_state { + uint64_t size; + uint64_t asyncio_running_loop; + uint64_t asyncio_running_task; + } asyncio_thread_state; +} Py_AsyncioModuleDebugOffsets; + +GENERATE_DEBUG_SECTION(AsyncioDebug, Py_AsyncioModuleDebugOffsets AsyncioDebug) + = {.asyncio_task_object = { + .size = sizeof(TaskObj), + .task_name = offsetof(TaskObj, task_name), + .task_awaited_by = offsetof(TaskObj, task_awaited_by), + .task_is_task = offsetof(TaskObj, task_is_task), + .task_awaited_by_is_set = offsetof(TaskObj, task_awaited_by_is_set), + .task_coro = offsetof(TaskObj, task_coro), + }, + .asyncio_thread_state = { + .size = sizeof(_PyThreadStateImpl), + .asyncio_running_loop = offsetof(_PyThreadStateImpl, asyncio_running_loop), + .asyncio_running_task = offsetof(_PyThreadStateImpl, asyncio_running_task), + }}; + /* State of the _asyncio module */ typedef struct { #ifdef Py_GIL_DISABLED @@ -185,6 +234,22 @@ static PyObject * task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *result); +static void +clear_task_coro(TaskObj *task) +{ + Py_CLEAR(task->task_coro); +} + + +static void +set_task_coro(TaskObj *task, PyObject *coro) +{ + assert(coro != NULL); + Py_INCREF(coro); + Py_XSETREF(task->task_coro, coro); +} + + static int _is_coroutine(asyncio_state *state, PyObject *coro) { @@ -437,10 +502,13 @@ future_init(FutureObj *fut, PyObject *loop) Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); Py_CLEAR(fut->fut_cancelled_exc); + Py_CLEAR(fut->fut_awaited_by); fut->fut_state = STATE_PENDING; fut->fut_log_tb = 0; fut->fut_blocking = 0; + fut->fut_awaited_by_is_set = 0; + fut->fut_is_task = 0; if (loop == Py_None) { asyncio_state *state = get_asyncio_state_by_def((PyObject *)fut); @@ -480,6 +548,115 @@ future_init(FutureObj *fut, PyObject *loop) return 0; } +static int +future_awaited_by_add(asyncio_state *state, PyObject *fut, PyObject *thing) +{ + if (!TaskOrFuture_Check(state, fut) || !TaskOrFuture_Check(state, thing)) { + // We only want to support native asyncio Futures. + // For further insight see the comment in the Python + // implementation of "future_add_to_awaited_by()". + return 0; + } + + FutureObj *_fut = (FutureObj *)fut; + + /* Most futures/task are only awaited by one entity, so we want + to avoid always creating a set for `fut_awaited_by`. + */ + if (_fut->fut_awaited_by == NULL) { + assert(!_fut->fut_awaited_by_is_set); + Py_INCREF(thing); + _fut->fut_awaited_by = thing; + return 0; + } + + if (_fut->fut_awaited_by_is_set) { + assert(PySet_CheckExact(_fut->fut_awaited_by)); + return PySet_Add(_fut->fut_awaited_by, thing); + } + + PyObject *set = PySet_New(NULL); + if (set == NULL) { + return -1; + } + if (PySet_Add(set, thing)) { + Py_DECREF(set); + return -1; + } + if (PySet_Add(set, _fut->fut_awaited_by)) { + Py_DECREF(set); + return -1; + } + Py_SETREF(_fut->fut_awaited_by, set); + _fut->fut_awaited_by_is_set = 1; + return 0; +} + +static int +future_awaited_by_discard(asyncio_state *state, PyObject *fut, PyObject *thing) +{ + if (!TaskOrFuture_Check(state, fut) || !TaskOrFuture_Check(state, thing)) { + // We only want to support native asyncio Futures. + // For further insight see the comment in the Python + // implementation of "future_add_to_awaited_by()". + return 0; + } + + FutureObj *_fut = (FutureObj *)fut; + + /* Following the semantics of 'set.discard()' here in not + raising an error if `thing` isn't in the `awaited_by` "set". + */ + if (_fut->fut_awaited_by == NULL) { + return 0; + } + if (_fut->fut_awaited_by == thing) { + Py_CLEAR(_fut->fut_awaited_by); + return 0; + } + if (_fut->fut_awaited_by_is_set) { + assert(PySet_CheckExact(_fut->fut_awaited_by)); + int err = PySet_Discard(_fut->fut_awaited_by, thing); + if (err < 0) { + return -1; + } else { + return 0; + } + } + return 0; +} + +/*[clinic input] +@critical_section +@getter +_asyncio.Future._asyncio_awaited_by +[clinic start generated code]*/ + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get_impl(FutureObj *self) +/*[clinic end generated code: output=932af76d385d2e2a input=64c1783df2d44d2b]*/ +{ + /* Implementation of a Python getter. */ + if (self->fut_awaited_by == NULL) { + Py_RETURN_NONE; + } + if (self->fut_awaited_by_is_set) { + /* Already a set, just wrap it into a frozen set and return. */ + assert(PySet_CheckExact(self->fut_awaited_by)); + return PyFrozenSet_New(self->fut_awaited_by); + } + + PyObject *set = PyFrozenSet_New(NULL); + if (set == NULL) { + return NULL; + } + if (PySet_Add(set, self->fut_awaited_by)) { + Py_DECREF(set); + return NULL; + } + return set; +} + static PyObject * future_set_result(asyncio_state *state, FutureObj *fut, PyObject *res) { @@ -780,6 +957,8 @@ FutureObj_clear(FutureObj *fut) Py_CLEAR(fut->fut_source_tb); Py_CLEAR(fut->fut_cancel_msg); Py_CLEAR(fut->fut_cancelled_exc); + Py_CLEAR(fut->fut_awaited_by); + fut->fut_awaited_by_is_set = 0; PyObject_ClearManagedDict((PyObject *)fut); return 0; } @@ -798,6 +977,7 @@ FutureObj_traverse(FutureObj *fut, visitproc visit, void *arg) Py_VISIT(fut->fut_source_tb); Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->fut_cancelled_exc); + Py_VISIT(fut->fut_awaited_by); PyObject_VisitManagedDict((PyObject *)fut, visit, arg); return 0; } @@ -1577,6 +1757,7 @@ static PyGetSetDef FutureType_getsetlist[] = { _ASYNCIO_FUTURE__LOG_TRACEBACK_GETSETDEF _ASYNCIO_FUTURE__SOURCE_TRACEBACK_GETSETDEF _ASYNCIO_FUTURE__CANCEL_MESSAGE_GETSETDEF + _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {NULL} /* Sentinel */ }; @@ -1883,6 +2064,8 @@ static int task_call_step_soon(asyncio_state *state, TaskObj *, PyObject *); static PyObject * task_wakeup(TaskObj *, PyObject *); static PyObject * task_step(asyncio_state *, TaskObj *, PyObject *); static int task_eager_start(asyncio_state *state, TaskObj *task); +static inline void clear_ts_asyncio_running_task(PyObject *loop); +static inline void set_ts_asyncio_running_task(PyObject *loop, PyObject *task); /* ----- Task._step wrapper */ @@ -2053,7 +2236,10 @@ enter_task(asyncio_state *state, PyObject *loop, PyObject *task) Py_DECREF(item); return -1; } - Py_DECREF(item); + + assert(task == item); + Py_CLEAR(item); + set_ts_asyncio_running_task(loop, task); return 0; } @@ -2078,7 +2264,6 @@ leave_task_predicate(PyObject *item, void *task) static int leave_task(asyncio_state *state, PyObject *loop, PyObject *task) -/*[clinic end generated code: output=0ebf6db4b858fb41 input=51296a46313d1ad8]*/ { int res = _PyDict_DelItemIf(state->current_tasks, loop, leave_task_predicate, task); @@ -2086,6 +2271,7 @@ leave_task(asyncio_state *state, PyObject *loop, PyObject *task) // task was not found return err_leave_task(Py_None, task); } + clear_ts_asyncio_running_task(loop); return res; } @@ -2112,6 +2298,7 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) { PyObject *prev_task; + clear_ts_asyncio_running_task(loop); if (task == Py_None) { if (PyDict_Pop(state->current_tasks, loop, &prev_task) < 0) { return NULL; @@ -2131,9 +2318,63 @@ swap_current_task(asyncio_state *state, PyObject *loop, PyObject *task) Py_BEGIN_CRITICAL_SECTION(current_tasks); prev_task = swap_current_task_lock_held(current_tasks, loop, hash, task); Py_END_CRITICAL_SECTION(); + set_ts_asyncio_running_task(loop, task); return prev_task; } +static inline void +set_ts_asyncio_running_task(PyObject *loop, PyObject *task) +{ + // We want to enable debuggers and profilers to be able to quickly + // introspect the asyncio running state from another process. + // When we do that, we need to essentially traverse the address space + // of a Python process and understand what every Python thread in it is + // currently doing, mainly: + // + // * current frame + // * current asyncio task + // + // A naive solution would be to require profilers and debuggers to + // find the current task in the "_asynciomodule" module state, but + // unfortunately that would require a lot of complicated remote + // memory reads and logic, as Python's dict is a notoriously complex + // and ever-changing data structure. + // + // So the easier solution is to put a strong reference to the currently + // running `asyncio.Task` on the current thread state (the current loop + // is also stored there.) + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + if (ts->asyncio_running_loop == loop) { + // Protect from a situation when someone calls this method + // from another thread. This shouldn't ever happen though, + // as `enter_task` and `leave_task` can either be called by: + // + // - `asyncio.Task` itself, in `Task.__step()`. That method + // can only be called by the event loop itself. + // + // - third-party Task "from scratch" implementations, that + // our `capture_call_graph` API doesn't support anyway. + // + // That said, we still want to make sure we don't end up in + // a broken state, so we check that we're in the correct thread + // by comparing the *loop* argument to the event loop running + // in the current thread. If they match we know we're in the + // right thread, as asyncio event loops don't change threads. + assert(ts->asyncio_running_task == NULL); + ts->asyncio_running_task = Py_NewRef(task); + } +} + +static inline void +clear_ts_asyncio_running_task(PyObject *loop) +{ + // See comment in set_ts_asyncio_running_task() for details. + _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); + if (ts->asyncio_running_loop == NULL || ts->asyncio_running_loop == loop) { + Py_CLEAR(ts->asyncio_running_task); + } +} + /* ----- Task */ /*[clinic input] @@ -2158,6 +2399,7 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, if (future_init((FutureObj*)self, loop)) { return -1; } + self->task_is_task = 1; asyncio_state *state = get_asyncio_state_by_def((PyObject *)self); int is_coro = is_coroutine(state, coro); @@ -2185,8 +2427,7 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, self->task_must_cancel = 0; self->task_log_destroy_pending = 1; self->task_num_cancels_requested = 0; - Py_INCREF(coro); - Py_XSETREF(self->task_coro, coro); + set_task_coro(self, coro); if (name == Py_None) { // optimization: defer task name formatting @@ -2226,6 +2467,11 @@ _asyncio_Task___init___impl(TaskObj *self, PyObject *coro, PyObject *loop, if (task_call_step_soon(state, self, NULL)) { return -1; } +#ifdef Py_GIL_DISABLED + // This is required so that _Py_TryIncref(self) + // works correctly in non-owning threads. + _PyObject_SetMaybeWeakref((PyObject *)self); +#endif register_task(state, self); return 0; } @@ -2234,8 +2480,8 @@ static int TaskObj_clear(TaskObj *task) { (void)FutureObj_clear((FutureObj*) task); + clear_task_coro(task); Py_CLEAR(task->task_context); - Py_CLEAR(task->task_coro); Py_CLEAR(task->task_name); Py_CLEAR(task->task_fut_waiter); return 0; @@ -2260,6 +2506,7 @@ TaskObj_traverse(TaskObj *task, visitproc visit, void *arg) Py_VISIT(fut->fut_source_tb); Py_VISIT(fut->fut_cancel_msg); Py_VISIT(fut->fut_cancelled_exc); + Py_VISIT(fut->fut_awaited_by); PyObject_VisitManagedDict((PyObject *)fut, visit, arg); return 0; } @@ -3050,6 +3297,10 @@ task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *resu goto yield_insteadof_yf; } + if (future_awaited_by_add(state, result, (PyObject *)task)) { + goto fail; + } + fut->fut_blocking = 0; /* result.add_done_callback(task._wakeup) */ @@ -3139,6 +3390,10 @@ task_step_handle_result_impl(asyncio_state *state, TaskObj *task, PyObject *resu goto yield_insteadof_yf; } + if (future_awaited_by_add(state, result, (PyObject *)task)) { + goto fail; + } + /* result._asyncio_future_blocking = False */ if (PyObject_SetAttr( result, &_Py_ID(_asyncio_future_blocking), Py_False) == -1) { @@ -3335,7 +3590,7 @@ task_eager_start(asyncio_state *state, TaskObj *task) register_task(state, task); } else { // This seems to really help performance on pyperformance benchmarks - Py_CLEAR(task->task_coro); + clear_task_coro(task); } return retval; @@ -3350,6 +3605,11 @@ task_wakeup_lock_held(TaskObj *task, PyObject *o) assert(o); asyncio_state *state = get_asyncio_state_by_def((PyObject *)task); + + if (future_awaited_by_discard(state, o, (PyObject *)task)) { + return NULL; + } + if (Future_CheckExact(state, o) || Task_CheckExact(state, o)) { PyObject *fut_result = NULL; int res; @@ -3833,6 +4093,50 @@ _asyncio_all_tasks_impl(PyObject *module, PyObject *loop) return res; } +/*[clinic input] +_asyncio.future_add_to_awaited_by + + fut: object + waiter: object + / + +Record that `fut` is awaited on by `waiter`. + +[clinic start generated code]*/ + +static PyObject * +_asyncio_future_add_to_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter) +/*[clinic end generated code: output=0ab9a1a63389e4df input=06e6eaac51f532b9]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (future_awaited_by_add(state, fut, waiter)) { + return NULL; + } + Py_RETURN_NONE; +} + +/*[clinic input] +_asyncio.future_discard_from_awaited_by + + fut: object + waiter: object + / + +[clinic start generated code]*/ + +static PyObject * +_asyncio_future_discard_from_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter) +/*[clinic end generated code: output=a03b0b4323b779de input=3833f7639e88e483]*/ +{ + asyncio_state *state = get_asyncio_state(module); + if (future_awaited_by_discard(state, fut, waiter)) { + return NULL; + } + Py_RETURN_NONE; +} + static int module_traverse(PyObject *mod, visitproc visit, void *arg) { @@ -3896,6 +4200,7 @@ module_clear(PyObject *mod) // those get cleared in PyThreadState_Clear. _PyThreadStateImpl *ts = (_PyThreadStateImpl *)_PyThreadState_GET(); Py_CLEAR(ts->asyncio_running_loop); + Py_CLEAR(ts->asyncio_running_task); return 0; } @@ -3926,7 +4231,6 @@ module_init(asyncio_state *state) goto fail; } - state->context_kwname = Py_BuildValue("(s)", "context"); if (state->context_kwname == NULL) { goto fail; @@ -4007,6 +4311,8 @@ static PyMethodDef asyncio_methods[] = { _ASYNCIO__LEAVE_TASK_METHODDEF _ASYNCIO__SWAP_CURRENT_TASK_METHODDEF _ASYNCIO_ALL_TASKS_METHODDEF + _ASYNCIO_FUTURE_ADD_TO_AWAITED_BY_METHODDEF + _ASYNCIO_FUTURE_DISCARD_FROM_AWAITED_BY_METHODDEF {NULL, NULL} }; diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c index c4d130a5ec1d52..d86adec0aeca58 100644 --- a/Modules/_ctypes/_ctypes.c +++ b/Modules/_ctypes/_ctypes.c @@ -146,9 +146,12 @@ typedef struct { PyObject *dict; } DictRemoverObject; +#define _DictRemoverObject_CAST(op) ((DictRemoverObject *)(op)) + static int -_DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) +_DictRemover_traverse(PyObject *myself, visitproc visit, void *arg) { + DictRemoverObject *self = _DictRemoverObject_CAST(myself); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->key); Py_VISIT(self->dict); @@ -156,8 +159,9 @@ _DictRemover_traverse(DictRemoverObject *self, visitproc visit, void *arg) } static int -_DictRemover_clear(DictRemoverObject *self) +_DictRemover_clear(PyObject *myself) { + DictRemoverObject *self = _DictRemoverObject_CAST(myself); Py_CLEAR(self->key); Py_CLEAR(self->dict); return 0; @@ -167,9 +171,8 @@ static void _DictRemover_dealloc(PyObject *myself) { PyTypeObject *tp = Py_TYPE(myself); - DictRemoverObject *self = (DictRemoverObject *)myself; PyObject_GC_UnTrack(myself); - (void)_DictRemover_clear(self); + (void)_DictRemover_clear(myself); tp->tp_free(myself); Py_DECREF(tp); } @@ -177,7 +180,7 @@ _DictRemover_dealloc(PyObject *myself) static PyObject * _DictRemover_call(PyObject *myself, PyObject *args, PyObject *kw) { - DictRemoverObject *self = (DictRemoverObject *)myself; + DictRemoverObject *self = _DictRemoverObject_CAST(myself); if (self->key && self->dict) { if (-1 == PyDict_DelItem(self->dict, self->key)) { PyErr_FormatUnraisable("Exception ignored on calling _ctypes.DictRemover"); @@ -402,16 +405,19 @@ typedef struct { PyObject *keep; // If set, a reference to the original CDataObject. } StructParamObject; +#define _StructParamObject_CAST(op) ((StructParamObject *)(op)) + static int -StructParam_traverse(StructParamObject *self, visitproc visit, void *arg) +StructParam_traverse(PyObject *self, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(self)); return 0; } static int -StructParam_clear(StructParamObject *self) +StructParam_clear(PyObject *myself) { + StructParamObject *self = _StructParamObject_CAST(myself); Py_CLEAR(self->keep); return 0; } @@ -419,10 +425,10 @@ StructParam_clear(StructParamObject *self) static void StructParam_dealloc(PyObject *myself) { - StructParamObject *self = (StructParamObject *)myself; + StructParamObject *self = _StructParamObject_CAST(myself); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(myself); - (void)StructParam_clear(self); + (void)StructParam_clear(myself); PyMem_Free(self->ptr); tp->tp_free(myself); Py_DECREF(tp); @@ -675,7 +681,7 @@ StructUnionType_init(PyObject *self, PyObject *args, PyObject *kwds, int isStruc info->paramfunc = StructUnionType_paramfunc; - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_fields_), &fields) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_fields_), &fields) < 0) { Py_DECREF(attrdict); return -1; } @@ -1411,11 +1417,12 @@ static PyType_Spec pycpointer_type_spec = { */ static int -CharArray_set_raw(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +CharArray_set_raw(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { char *ptr; Py_ssize_t size; Py_buffer view; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); @@ -1441,9 +1448,10 @@ CharArray_set_raw(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) } static PyObject * -CharArray_get_raw(CDataObject *self, void *Py_UNUSED(ignored)) +CharArray_get_raw(PyObject *op, void *Py_UNUSED(ignored)) { PyObject *res; + CDataObject *self = _CDataObject_CAST(op); LOCK_PTR(self); res = PyBytes_FromStringAndSize(self->b_ptr, self->b_size); UNLOCK_PTR(self); @@ -1451,10 +1459,11 @@ CharArray_get_raw(CDataObject *self, void *Py_UNUSED(ignored)) } static PyObject * -CharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +CharArray_get_value(PyObject *op, void *Py_UNUSED(ignored)) { Py_ssize_t i; PyObject *res; + CDataObject *self = _CDataObject_CAST(op); LOCK_PTR(self); char *ptr = self->b_ptr; for (i = 0; i < self->b_size; ++i) @@ -1466,10 +1475,11 @@ CharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) } static int -CharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +CharArray_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { const char *ptr; Py_ssize_t size; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -1504,18 +1514,17 @@ CharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored) } static PyGetSetDef CharArray_getsets[] = { - { "raw", (getter)CharArray_get_raw, (setter)CharArray_set_raw, - "value", NULL }, - { "value", (getter)CharArray_get_value, (setter)CharArray_set_value, - "string value"}, + { "raw", CharArray_get_raw, CharArray_set_raw, "value", NULL }, + { "value", CharArray_get_value, CharArray_set_value, "string value" }, { NULL, NULL } }; static PyObject * -WCharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +WCharArray_get_value(PyObject *op, void *Py_UNUSED(ignored)) { Py_ssize_t i; PyObject *res; + CDataObject *self = _CDataObject_CAST(op); wchar_t *ptr = (wchar_t *)self->b_ptr; LOCK_PTR(self); for (i = 0; i < self->b_size/(Py_ssize_t)sizeof(wchar_t); ++i) @@ -1527,8 +1536,10 @@ WCharArray_get_value(CDataObject *self, void *Py_UNUSED(ignored)) } static int -WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +WCharArray_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { + CDataObject *self = _CDataObject_CAST(op); + if (value == NULL) { PyErr_SetString(PyExc_TypeError, "can't delete attribute"); @@ -1561,8 +1572,7 @@ WCharArray_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored } static PyGetSetDef WCharArray_getsets[] = { - { "value", (getter)WCharArray_get_value, (setter)WCharArray_set_value, - "string value"}, + { "value", WCharArray_get_value, WCharArray_set_value, "string value" }, { NULL, NULL } }; @@ -1776,11 +1786,6 @@ class _ctypes.c_void_p "PyObject *" "clinic_state_sub()->PyCSimpleType_Type" [clinic start generated code]*/ /*[clinic end generated code: output=da39a3ee5e6b4b0d input=dd4d9646c56f43a9]*/ -#if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) -static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdCEFfuzZqQPXOv?g"; -#else -static const char SIMPLE_TYPE_CHARS[] = "cbBhHiIlLdfuzZqQPXOv?g"; -#endif /*[clinic input] _ctypes.c_wchar_p.from_param as c_wchar_p_from_param @@ -2252,17 +2257,13 @@ PyCSimpleType_init(PyObject *self, PyObject *args, PyObject *kwds) "which must be a string of length 1"); goto error; } - if (!strchr(SIMPLE_TYPE_CHARS, *proto_str)) { + fmt = _ctypes_get_fielddesc(proto_str); + if (!fmt) { PyErr_Format(PyExc_AttributeError, "class must define a '_type_' attribute which must be\n" - "a single character string containing one of '%s'.", - SIMPLE_TYPE_CHARS); - goto error; - } - fmt = _ctypes_get_fielddesc(proto_str); - if (fmt == NULL) { - PyErr_Format(PyExc_ValueError, - "_type_ '%s' not supported", proto_str); + "a single character string containing one of the\n" + "supported types: '%s'.", + _ctypes_get_simple_type_chars()); goto error; } @@ -2636,7 +2637,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) stginfo->getfunc = NULL; stginfo->ffi_type_pointer = ffi_type_pointer; - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_flags_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_flags_), &ob) < 0) { return -1; } if (!ob || !PyLong_Check(ob)) { @@ -2649,7 +2650,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) Py_DECREF(ob); /* _argtypes_ is optional... */ - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_argtypes_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_argtypes_), &ob) < 0) { return -1; } if (ob) { @@ -2662,7 +2663,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) stginfo->converters = converters; } - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py_ID(_restype_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py_ID(_restype_), &ob) < 0) { return -1; } if (ob) { @@ -2684,7 +2685,7 @@ make_funcptrtype_dict(ctypes_state *st, PyObject *attrdict, StgInfo *stginfo) } } /* XXX later, maybe. - if (PyDict_GetItemRef((PyObject *)attrdict, &_Py _ID(_errcheck_), &ob) < 0) { + if (PyDict_GetItemRef(attrdict, &_Py _ID(_errcheck_), &ob) < 0) { return -1; } if (ob) { @@ -2886,8 +2887,9 @@ class _ctypes.PyCData "PyObject *" "clinic_state()->PyCData_Type" static int -PyCData_traverse(CDataObject *self, visitproc visit, void *arg) +PyCData_traverse(PyObject *op, visitproc visit, void *arg) { + CDataObject *self = _CDataObject_CAST(op); Py_VISIT(self->b_objects); Py_VISIT((PyObject *)self->b_base); PyTypeObject *type = Py_TYPE(self); @@ -2896,8 +2898,9 @@ PyCData_traverse(CDataObject *self, visitproc visit, void *arg) } static int -PyCData_clear(CDataObject *self) +PyCData_clear(PyObject *op) { + CDataObject *self = _CDataObject_CAST(op); Py_CLEAR(self->b_objects); if ((self->b_needsfree) && _CDataObject_HasExternalBuffer(self)) @@ -2912,7 +2915,7 @@ PyCData_dealloc(PyObject *self) { PyTypeObject *type = Py_TYPE(self); PyObject_GC_UnTrack(self); - PyCData_clear((CDataObject *)self); + (void)PyCData_clear(self); type->tp_free(self); Py_DECREF(type); } @@ -2955,7 +2958,7 @@ PyCData_item_type(ctypes_state *st, PyObject *type) static int PyCData_NewGetBuffer(PyObject *myself, Py_buffer *view, int flags) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *info; @@ -3014,7 +3017,7 @@ static PyObject * PyCData_reduce_impl(PyObject *myself, PyTypeObject *cls) /*[clinic end generated code: output=1a025ccfdd8c935d input=34097a5226ea63c1]*/ { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); ctypes_state *st = get_module_state_by_class(cls); StgInfo *info; @@ -3047,7 +3050,7 @@ PyCData_setstate(PyObject *myself, PyObject *args) Py_ssize_t len; int res; PyObject *dict, *mydict; - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (!PyArg_ParseTuple(args, "O!s#", &PyDict_Type, &dict, &data, &len)) { @@ -3247,10 +3250,7 @@ PyObject * PyCData_get(ctypes_state *st, PyObject *type, GETFUNC getfunc, PyObject *src, Py_ssize_t index, Py_ssize_t size, char *adr) { -#ifdef Py_GIL_DISABLED - // This isn't used if the GIL is enabled, so it causes a compiler warning. - CDataObject *cdata = (CDataObject *)src; -#endif + CDataObject *cdata = _CDataObject_CAST(src); if (getfunc) { PyObject *res; LOCK_PTR(cdata); @@ -4403,7 +4403,7 @@ _build_result(PyObject *result, PyObject *callargs, } static PyObject * -PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) +PyCFuncPtr_call(PyObject *op, PyObject *inargs, PyObject *kwds) { PyObject *restype; PyObject *converters; @@ -4416,6 +4416,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) IUnknown *piunk = NULL; #endif void *pProc = NULL; + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); int inoutmask; int outmask; @@ -4423,7 +4424,7 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return NULL; } assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */ @@ -4541,8 +4542,9 @@ PyCFuncPtr_call(PyCFuncPtrObject *self, PyObject *inargs, PyObject *kwds) } static int -PyCFuncPtr_traverse(PyCFuncPtrObject *self, visitproc visit, void *arg) +PyCFuncPtr_traverse(PyObject *op, visitproc visit, void *arg) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); Py_VISIT(self->callable); Py_VISIT(self->restype); Py_VISIT(self->checker); @@ -4551,12 +4553,13 @@ PyCFuncPtr_traverse(PyCFuncPtrObject *self, visitproc visit, void *arg) Py_VISIT(self->converters); Py_VISIT(self->paramflags); Py_VISIT(self->thunk); - return PyCData_traverse((CDataObject *)self, visit, arg); + return PyCData_traverse(op, visit, arg); } static int -PyCFuncPtr_clear(PyCFuncPtrObject *self) +PyCFuncPtr_clear(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); Py_CLEAR(self->callable); Py_CLEAR(self->restype); Py_CLEAR(self->checker); @@ -4565,22 +4568,23 @@ PyCFuncPtr_clear(PyCFuncPtrObject *self) Py_CLEAR(self->converters); Py_CLEAR(self->paramflags); Py_CLEAR(self->thunk); - return PyCData_clear((CDataObject *)self); + return PyCData_clear(op); } static void -PyCFuncPtr_dealloc(PyCFuncPtrObject *self) +PyCFuncPtr_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); - PyCFuncPtr_clear(self); + (void)PyCFuncPtr_clear(self); PyTypeObject *type = Py_TYPE(self); - type->tp_free((PyObject *)self); + type->tp_free(self); Py_DECREF(type); } static PyObject * -PyCFuncPtr_repr(PyCFuncPtrObject *self) +PyCFuncPtr_repr(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); #ifdef MS_WIN32 if (self->index) return PyUnicode_FromFormat("", @@ -4594,8 +4598,9 @@ PyCFuncPtr_repr(PyCFuncPtrObject *self) } static int -PyCFuncPtr_bool(PyCFuncPtrObject *self) +PyCFuncPtr_bool(PyObject *op) { + PyCFuncPtrObject *self = _PyCFuncPtrObject_CAST(op); return ((*(void **)self->b_ptr != NULL) #ifdef MS_WIN32 || (self->index != 0) @@ -4783,7 +4788,7 @@ static PyType_Spec pycunion_spec = { PyCArray_Type */ static int -Array_init(CDataObject *self, PyObject *args, PyObject *kw) +Array_init(PyObject *self, PyObject *args, PyObject *kw) { Py_ssize_t i; Py_ssize_t n; @@ -4797,7 +4802,7 @@ Array_init(CDataObject *self, PyObject *args, PyObject *kw) for (i = 0; i < n; ++i) { PyObject *v; v = PyTuple_GET_ITEM(args, i); - if (-1 == PySequence_SetItem((PyObject *)self, i, v)) + if (-1 == PySequence_SetItem(self, i, v)) return -1; } return 0; @@ -4806,7 +4811,7 @@ Array_init(CDataObject *self, PyObject *args, PyObject *kw) static PyObject * Array_item(PyObject *myself, Py_ssize_t index) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t offset, size; if (index < 0 || index >= self->b_length) { @@ -4817,7 +4822,7 @@ Array_item(PyObject *myself, Py_ssize_t index) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } @@ -4827,14 +4832,14 @@ Array_item(PyObject *myself, Py_ssize_t index) size = stginfo->size / stginfo->length; offset = index * size; - return PyCData_get(st, stginfo->proto, stginfo->getfunc, (PyObject *)self, - index, size, self->b_ptr + offset); + return PyCData_get(st, stginfo->proto, stginfo->getfunc, myself, + index, size, self->b_ptr + offset); } static PyObject * Array_subscript(PyObject *myself, PyObject *item) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (PyIndex_Check(item)) { Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); @@ -4858,7 +4863,7 @@ Array_subscript(PyObject *myself, PyObject *item) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for array object instances */ @@ -4959,7 +4964,7 @@ Array_subscript(PyObject *myself, PyObject *item) static int Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size, offset; char *ptr; @@ -4971,7 +4976,7 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for array object instances */ @@ -4985,14 +4990,14 @@ Array_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) offset = index * size; ptr = self->b_ptr + offset; - return PyCData_set(st, (PyObject *)self, stginfo->proto, stginfo->setfunc, value, - index, size, ptr); + return PyCData_set(st, myself, stginfo->proto, stginfo->setfunc, value, + index, size, ptr); } static int Array_ass_subscript(PyObject *myself, PyObject *item, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -5049,7 +5054,7 @@ Array_ass_subscript(PyObject *myself, PyObject *item, PyObject *value) static Py_ssize_t Array_length(PyObject *myself) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); return self->b_length; } @@ -5166,9 +5171,10 @@ class _ctypes.Simple "PyObject *" "clinic_state()->Simple_Type" /*[clinic end generated code: output=da39a3ee5e6b4b0d input=016c476c7aa8b8a8]*/ static int -Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) +Simple_set_value(PyObject *op, PyObject *value, void *Py_UNUSED(ignored)) { PyObject *result; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -5178,7 +5184,7 @@ Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return -1; } assert(info); /* Cannot be NULL for CDataObject instances */ @@ -5195,7 +5201,7 @@ Simple_set_value(CDataObject *self, PyObject *value, void *Py_UNUSED(ignored)) } static int -Simple_init(CDataObject *self, PyObject *args, PyObject *kw) +Simple_init(PyObject *self, PyObject *args, PyObject *kw) { PyObject *value = NULL; if (!PyArg_UnpackTuple(args, "__init__", 0, 1, &value)) @@ -5206,11 +5212,12 @@ Simple_init(CDataObject *self, PyObject *args, PyObject *kw) } static PyObject * -Simple_get_value(CDataObject *self, void *Py_UNUSED(ignored)) +Simple_get_value(PyObject *op, void *Py_UNUSED(ignored)) { + CDataObject *self = _CDataObject_CAST(op); ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *info; - if (PyStgInfo_FromObject(st, (PyObject *)self, &info) < 0) { + if (PyStgInfo_FromObject(st, op, &info) < 0) { return NULL; } assert(info); /* Cannot be NULL for CDataObject instances */ @@ -5223,7 +5230,7 @@ Simple_get_value(CDataObject *self, void *Py_UNUSED(ignored)) } static PyGetSetDef Simple_getsets[] = { - { "value", (getter)Simple_get_value, (setter)Simple_set_value, + { "value", Simple_get_value, Simple_set_value, "current value", NULL }, { NULL, NULL } }; @@ -5245,7 +5252,7 @@ Simple_from_outparm_impl(PyObject *self, PyTypeObject *cls) return Py_NewRef(self); } /* call stginfo->getfunc */ - return Simple_get_value((CDataObject *)self, NULL); + return Simple_get_value(self, NULL); } static PyMethodDef Simple_methods[] = { @@ -5253,9 +5260,11 @@ static PyMethodDef Simple_methods[] = { { NULL, NULL }, }; -static int Simple_bool(CDataObject *self) +static int +Simple_bool(PyObject *op) { int cmp; + CDataObject *self = _CDataObject_CAST(op); LOCK_PTR(self); cmp = memcmp(self->b_ptr, "\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", self->b_size); UNLOCK_PTR(self); @@ -5264,7 +5273,7 @@ static int Simple_bool(CDataObject *self) /* "%s(%s)" % (self.__class__.__name__, self.value) */ static PyObject * -Simple_repr(CDataObject *self) +Simple_repr(PyObject *self) { PyObject *val, *result; ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); @@ -5311,7 +5320,7 @@ static PyType_Spec pycsimple_spec = { static PyObject * Pointer_item(PyObject *myself, Py_ssize_t index) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size; Py_ssize_t offset; PyObject *proto; @@ -5325,7 +5334,7 @@ Pointer_item(PyObject *myself, Py_ssize_t index) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer object instances */ @@ -5343,14 +5352,14 @@ Pointer_item(PyObject *myself, Py_ssize_t index) size = iteminfo->size; offset = index * iteminfo->size; - return PyCData_get(st, proto, stginfo->getfunc, (PyObject *)self, + return PyCData_get(st, proto, stginfo->getfunc, myself, index, size, (char *)((char *)deref + offset)); } static int Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); Py_ssize_t size; Py_ssize_t offset; PyObject *proto; @@ -5370,7 +5379,7 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5388,14 +5397,14 @@ Pointer_ass_item(PyObject *myself, Py_ssize_t index, PyObject *value) size = iteminfo->size; offset = index * iteminfo->size; - return PyCData_set(st, (PyObject *)self, proto, stginfo->setfunc, value, + return PyCData_set(st, myself, proto, stginfo->setfunc, value, index, size, ((char *)deref + offset)); } static PyObject * -Pointer_get_contents(CDataObject *self, void *closure) +Pointer_get_contents(PyObject *self, void *closure) { - void *deref = locked_deref(self); + void *deref = locked_deref(_CDataObject_CAST(self)); if (deref == NULL) { PyErr_SetString(PyExc_ValueError, "NULL pointer access"); @@ -5404,21 +5413,20 @@ Pointer_get_contents(CDataObject *self, void *closure) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, self, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer instances */ - return PyCData_FromBaseObj(st, stginfo->proto, - (PyObject *)self, 0, - deref); + return PyCData_FromBaseObj(st, stginfo->proto, self, 0, deref); } static int -Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) +Pointer_set_contents(PyObject *op, PyObject *value, void *closure) { CDataObject *dst; PyObject *keep; + CDataObject *self = _CDataObject_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -5427,7 +5435,7 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) } ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, op, &stginfo) < 0) { return -1; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5466,17 +5474,15 @@ Pointer_set_contents(CDataObject *self, PyObject *value, void *closure) } static PyGetSetDef Pointer_getsets[] = { - { "contents", (getter)Pointer_get_contents, - (setter)Pointer_set_contents, + { "contents", Pointer_get_contents, Pointer_set_contents, "the object this pointer points to (read-write)", NULL }, { NULL, NULL } }; static int -Pointer_init(CDataObject *self, PyObject *args, PyObject *kw) +Pointer_init(PyObject *self, PyObject *args, PyObject *kw) { PyObject *value = NULL; - if (!PyArg_UnpackTuple(args, "POINTER", 0, 1, &value)) return -1; if (value == NULL) @@ -5503,7 +5509,7 @@ Pointer_new(PyTypeObject *type, PyObject *args, PyObject *kw) static PyObject * Pointer_subscript(PyObject *myself, PyObject *item) { - CDataObject *self = (CDataObject *)myself; + CDataObject *self = _CDataObject_CAST(myself); if (PyIndex_Check(item)) { Py_ssize_t i = PyNumber_AsSsize_t(item, PyExc_IndexError); if (i == -1 && PyErr_Occurred()) @@ -5569,7 +5575,7 @@ Pointer_subscript(PyObject *myself, PyObject *item) ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(myself))); StgInfo *stginfo; - if (PyStgInfo_FromObject(st, (PyObject *)self, &stginfo) < 0) { + if (PyStgInfo_FromObject(st, myself, &stginfo) < 0) { return NULL; } assert(stginfo); /* Cannot be NULL for pointer instances */ @@ -5651,13 +5657,13 @@ Pointer_subscript(PyObject *myself, PyObject *item) } static int -Pointer_bool(CDataObject *self) +Pointer_bool(PyObject *self) { - return locked_deref(self) != NULL; + return locked_deref(_CDataObject_CAST(self)) != NULL; } static PyType_Slot pycpointer_slots[] = { - {Py_tp_doc, PyDoc_STR("XXX to be provided")}, + {Py_tp_doc, (void *)PyDoc_STR("XXX to be provided")}, {Py_tp_getset, Pointer_getsets}, {Py_tp_init, Pointer_init}, {Py_tp_new, Pointer_new}, diff --git a/Modules/_ctypes/_ctypes_test_generated.c.h b/Modules/_ctypes/_ctypes_test_generated.c.h index 46a3e4b01e2259..d70b33eaa8b515 100644 --- a/Modules/_ctypes/_ctypes_test_generated.c.h +++ b/Modules/_ctypes/_ctypes_test_generated.c.h @@ -56,7 +56,8 @@ struct SingleInt { int a; }; - struct SingleInt value = {0}; + struct SingleInt value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleInt")); APPEND(PyLong_FromLong(sizeof(struct SingleInt))); APPEND(PyLong_FromLong(_Alignof(struct SingleInt))); @@ -69,7 +70,8 @@ union SingleInt_Union { int a; }; - union SingleInt_Union value = {0}; + union SingleInt_Union value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleInt_Union")); APPEND(PyLong_FromLong(sizeof(union SingleInt_Union))); APPEND(PyLong_FromLong(_Alignof(union SingleInt_Union))); @@ -82,7 +84,8 @@ struct SingleU32 { uint32_t a; }; - struct SingleU32 value = {0}; + struct SingleU32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SingleU32")); APPEND(PyLong_FromLong(sizeof(struct SingleU32))); APPEND(PyLong_FromLong(_Alignof(struct SingleU32))); @@ -97,7 +100,8 @@ int8_t y; uint16_t z; }; - struct SimpleStruct value = {0}; + struct SimpleStruct value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SimpleStruct")); APPEND(PyLong_FromLong(sizeof(struct SimpleStruct))); APPEND(PyLong_FromLong(_Alignof(struct SimpleStruct))); @@ -114,7 +118,8 @@ int8_t y; uint16_t z; }; - union SimpleUnion value = {0}; + union SimpleUnion value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("SimpleUnion")); APPEND(PyLong_FromLong(sizeof(union SimpleUnion))); APPEND(PyLong_FromLong(_Alignof(union SimpleUnion))); @@ -136,7 +141,8 @@ int64_t i64; uint64_t u64; }; - struct ManyTypes value = {0}; + struct ManyTypes value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("ManyTypes")); APPEND(PyLong_FromLong(sizeof(struct ManyTypes))); APPEND(PyLong_FromLong(_Alignof(struct ManyTypes))); @@ -163,7 +169,8 @@ int64_t i64; uint64_t u64; }; - union ManyTypesU value = {0}; + union ManyTypesU value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("ManyTypesU")); APPEND(PyLong_FromLong(sizeof(union ManyTypesU))); APPEND(PyLong_FromLong(_Alignof(union ManyTypesU))); @@ -197,7 +204,8 @@ uint16_t z; }; }; - struct Nested value = {0}; + struct Nested value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Nested")); APPEND(PyLong_FromLong(sizeof(struct Nested))); APPEND(PyLong_FromLong(_Alignof(struct Nested))); @@ -223,7 +231,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed1 value = {0}; + struct Packed1 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed1")); APPEND(PyLong_FromLong(sizeof(struct Packed1))); APPEND(PyLong_FromLong(_Alignof(struct Packed1))); @@ -247,7 +256,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed2 value = {0}; + struct Packed2 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed2")); APPEND(PyLong_FromLong(sizeof(struct Packed2))); APPEND(PyLong_FromLong(_Alignof(struct Packed2))); @@ -271,7 +281,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed3 value = {0}; + struct Packed3 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed3")); APPEND(PyLong_FromLong(sizeof(struct Packed3))); APPEND(PyLong_FromLong(_Alignof(struct Packed3))); @@ -295,7 +306,8 @@ int64_t b; }; #pragma pack(pop) - struct Packed4 value = {0}; + struct Packed4 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Packed4")); APPEND(PyLong_FromLong(sizeof(struct Packed4))); APPEND(PyLong_FromLong(_Alignof(struct Packed4))); @@ -316,7 +328,8 @@ int64_t b; int32_t c; }; - struct X86_32EdgeCase value = {0}; + struct X86_32EdgeCase value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("X86_32EdgeCase")); APPEND(PyLong_FromLong(sizeof(struct X86_32EdgeCase))); APPEND(PyLong_FromLong(_Alignof(struct X86_32EdgeCase))); @@ -333,7 +346,8 @@ unsigned int b :5; unsigned int c :7; }; - struct MSBitFieldExample value = {0}; + struct MSBitFieldExample value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("MSBitFieldExample")); APPEND(PyLong_FromLong(sizeof(struct MSBitFieldExample))); APPEND(PyLong_FromLong(_Alignof(struct MSBitFieldExample))); @@ -351,7 +365,8 @@ unsigned int may_straddle :30; unsigned int last :18; }; - struct MSStraddlingExample value = {0}; + struct MSStraddlingExample value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("MSStraddlingExample")); APPEND(PyLong_FromLong(sizeof(struct MSStraddlingExample))); APPEND(PyLong_FromLong(_Alignof(struct MSStraddlingExample))); @@ -375,7 +390,8 @@ int H :8; int I :9; }; - struct IntBits value = {0}; + struct IntBits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("IntBits")); APPEND(PyLong_FromLong(sizeof(struct IntBits))); APPEND(PyLong_FromLong(_Alignof(struct IntBits))); @@ -413,7 +429,8 @@ short R :6; short S :7; }; - struct Bits value = {0}; + struct Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Bits")); APPEND(PyLong_FromLong(sizeof(struct Bits))); APPEND(PyLong_FromLong(_Alignof(struct Bits))); @@ -456,7 +473,8 @@ int H :8; int I :9; }; - struct IntBits_MSVC value = {0}; + struct IntBits_MSVC value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("IntBits_MSVC")); APPEND(PyLong_FromLong(sizeof(struct IntBits_MSVC))); APPEND(PyLong_FromLong(_Alignof(struct IntBits_MSVC))); @@ -499,7 +517,8 @@ short R :6; short S :7; }; - struct Bits_MSVC value = {0}; + struct Bits_MSVC value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Bits_MSVC")); APPEND(PyLong_FromLong(sizeof(struct Bits_MSVC))); APPEND(PyLong_FromLong(_Alignof(struct Bits_MSVC))); @@ -536,7 +555,8 @@ int64_t b :62; int64_t c :1; }; - struct I64Bits value = {0}; + struct I64Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("I64Bits")); APPEND(PyLong_FromLong(sizeof(struct I64Bits))); APPEND(PyLong_FromLong(_Alignof(struct I64Bits))); @@ -560,7 +580,8 @@ uint64_t b :62; uint64_t c :1; }; - struct U64Bits value = {0}; + struct U64Bits value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("U64Bits")); APPEND(PyLong_FromLong(sizeof(struct U64Bits))); APPEND(PyLong_FromLong(_Alignof(struct U64Bits))); @@ -584,7 +605,8 @@ int8_t b :3; int8_t c :1; }; - struct Struct331_8 value = {0}; + struct Struct331_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_8")); APPEND(PyLong_FromLong(sizeof(struct Struct331_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_8))); @@ -608,7 +630,8 @@ int8_t b :6; int8_t c :1; }; - struct Struct1x1_8 value = {0}; + struct Struct1x1_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_8")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_8))); @@ -633,7 +656,8 @@ int8_t b :6; int8_t c :1; }; - struct Struct1nx1_8 value = {0}; + struct Struct1nx1_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_8")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_8))); @@ -658,7 +682,8 @@ int8_t b :6; int8_t c :6; }; - struct Struct3xx_8 value = {0}; + struct Struct3xx_8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_8")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_8))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_8))); @@ -682,7 +707,8 @@ uint8_t b :3; uint8_t c :1; }; - struct Struct331_u8 value = {0}; + struct Struct331_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u8))); @@ -706,7 +732,8 @@ uint8_t b :6; uint8_t c :1; }; - struct Struct1x1_u8 value = {0}; + struct Struct1x1_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u8))); @@ -731,7 +758,8 @@ uint8_t b :6; uint8_t c :1; }; - struct Struct1nx1_u8 value = {0}; + struct Struct1nx1_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u8))); @@ -756,7 +784,8 @@ uint8_t b :6; uint8_t c :6; }; - struct Struct3xx_u8 value = {0}; + struct Struct3xx_u8 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u8")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u8))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u8))); @@ -780,7 +809,8 @@ int16_t b :3; int16_t c :1; }; - struct Struct331_16 value = {0}; + struct Struct331_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_16")); APPEND(PyLong_FromLong(sizeof(struct Struct331_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_16))); @@ -804,7 +834,8 @@ int16_t b :14; int16_t c :1; }; - struct Struct1x1_16 value = {0}; + struct Struct1x1_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_16")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_16))); @@ -829,7 +860,8 @@ int16_t b :14; int16_t c :1; }; - struct Struct1nx1_16 value = {0}; + struct Struct1nx1_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_16")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_16))); @@ -854,7 +886,8 @@ int16_t b :14; int16_t c :14; }; - struct Struct3xx_16 value = {0}; + struct Struct3xx_16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_16")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_16))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_16))); @@ -878,7 +911,8 @@ uint16_t b :3; uint16_t c :1; }; - struct Struct331_u16 value = {0}; + struct Struct331_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u16))); @@ -902,7 +936,8 @@ uint16_t b :14; uint16_t c :1; }; - struct Struct1x1_u16 value = {0}; + struct Struct1x1_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u16))); @@ -927,7 +962,8 @@ uint16_t b :14; uint16_t c :1; }; - struct Struct1nx1_u16 value = {0}; + struct Struct1nx1_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u16))); @@ -952,7 +988,8 @@ uint16_t b :14; uint16_t c :14; }; - struct Struct3xx_u16 value = {0}; + struct Struct3xx_u16 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u16")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u16))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u16))); @@ -976,7 +1013,8 @@ int32_t b :3; int32_t c :1; }; - struct Struct331_32 value = {0}; + struct Struct331_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_32")); APPEND(PyLong_FromLong(sizeof(struct Struct331_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_32))); @@ -1000,7 +1038,8 @@ int32_t b :30; int32_t c :1; }; - struct Struct1x1_32 value = {0}; + struct Struct1x1_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_32")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_32))); @@ -1025,7 +1064,8 @@ int32_t b :30; int32_t c :1; }; - struct Struct1nx1_32 value = {0}; + struct Struct1nx1_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_32")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_32))); @@ -1050,7 +1090,8 @@ int32_t b :30; int32_t c :30; }; - struct Struct3xx_32 value = {0}; + struct Struct3xx_32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_32")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_32))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_32))); @@ -1074,7 +1115,8 @@ uint32_t b :3; uint32_t c :1; }; - struct Struct331_u32 value = {0}; + struct Struct331_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u32))); @@ -1098,7 +1140,8 @@ uint32_t b :30; uint32_t c :1; }; - struct Struct1x1_u32 value = {0}; + struct Struct1x1_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u32))); @@ -1123,7 +1166,8 @@ uint32_t b :30; uint32_t c :1; }; - struct Struct1nx1_u32 value = {0}; + struct Struct1nx1_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u32))); @@ -1148,7 +1192,8 @@ uint32_t b :30; uint32_t c :30; }; - struct Struct3xx_u32 value = {0}; + struct Struct3xx_u32 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u32")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u32))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u32))); @@ -1172,7 +1217,8 @@ int64_t b :3; int64_t c :1; }; - struct Struct331_64 value = {0}; + struct Struct331_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_64")); APPEND(PyLong_FromLong(sizeof(struct Struct331_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_64))); @@ -1196,7 +1242,8 @@ int64_t b :62; int64_t c :1; }; - struct Struct1x1_64 value = {0}; + struct Struct1x1_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_64")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_64))); @@ -1221,7 +1268,8 @@ int64_t b :62; int64_t c :1; }; - struct Struct1nx1_64 value = {0}; + struct Struct1nx1_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_64")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_64))); @@ -1246,7 +1294,8 @@ int64_t b :62; int64_t c :62; }; - struct Struct3xx_64 value = {0}; + struct Struct3xx_64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_64")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_64))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_64))); @@ -1270,7 +1319,8 @@ uint64_t b :3; uint64_t c :1; }; - struct Struct331_u64 value = {0}; + struct Struct331_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct331_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct331_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct331_u64))); @@ -1294,7 +1344,8 @@ uint64_t b :62; uint64_t c :1; }; - struct Struct1x1_u64 value = {0}; + struct Struct1x1_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1x1_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct1x1_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1x1_u64))); @@ -1319,7 +1370,8 @@ uint64_t b :62; uint64_t c :1; }; - struct Struct1nx1_u64 value = {0}; + struct Struct1nx1_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct1nx1_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct1nx1_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct1nx1_u64))); @@ -1344,7 +1396,8 @@ uint64_t b :62; uint64_t c :62; }; - struct Struct3xx_u64 value = {0}; + struct Struct3xx_u64 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Struct3xx_u64")); APPEND(PyLong_FromLong(sizeof(struct Struct3xx_u64))); APPEND(PyLong_FromLong(_Alignof(struct Struct3xx_u64))); @@ -1367,7 +1420,8 @@ signed char a :4; int b :4; }; - struct Mixed1 value = {0}; + struct Mixed1 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed1")); APPEND(PyLong_FromLong(sizeof(struct Mixed1))); APPEND(PyLong_FromLong(_Alignof(struct Mixed1))); @@ -1389,7 +1443,8 @@ signed char a :4; int32_t b :32; }; - struct Mixed2 value = {0}; + struct Mixed2 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed2")); APPEND(PyLong_FromLong(sizeof(struct Mixed2))); APPEND(PyLong_FromLong(_Alignof(struct Mixed2))); @@ -1411,7 +1466,8 @@ signed char a :4; unsigned char b :4; }; - struct Mixed3 value = {0}; + struct Mixed3 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed3")); APPEND(PyLong_FromLong(sizeof(struct Mixed3))); APPEND(PyLong_FromLong(_Alignof(struct Mixed3))); @@ -1437,7 +1493,8 @@ short e :4; int f :24; }; - struct Mixed4 value = {0}; + struct Mixed4 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed4")); APPEND(PyLong_FromLong(sizeof(struct Mixed4))); APPEND(PyLong_FromLong(_Alignof(struct Mixed4))); @@ -1463,7 +1520,8 @@ unsigned int A :1; unsigned short B :16; }; - struct Mixed5 value = {0}; + struct Mixed5 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed5")); APPEND(PyLong_FromLong(sizeof(struct Mixed5))); APPEND(PyLong_FromLong(_Alignof(struct Mixed5))); @@ -1485,7 +1543,8 @@ unsigned long long A :1; unsigned int B :32; }; - struct Mixed6 value = {0}; + struct Mixed6 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed6")); APPEND(PyLong_FromLong(sizeof(struct Mixed6))); APPEND(PyLong_FromLong(_Alignof(struct Mixed6))); @@ -1508,7 +1567,8 @@ uint32_t B :20; uint64_t C :24; }; - struct Mixed7 value = {0}; + struct Mixed7 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed7")); APPEND(PyLong_FromLong(sizeof(struct Mixed7))); APPEND(PyLong_FromLong(_Alignof(struct Mixed7))); @@ -1532,7 +1592,8 @@ uint32_t B :32; unsigned long long C :1; }; - struct Mixed8_a value = {0}; + struct Mixed8_a value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed8_a")); APPEND(PyLong_FromLong(sizeof(struct Mixed8_a))); APPEND(PyLong_FromLong(_Alignof(struct Mixed8_a))); @@ -1556,7 +1617,8 @@ uint32_t B; unsigned long long C :1; }; - struct Mixed8_b value = {0}; + struct Mixed8_b value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed8_b")); APPEND(PyLong_FromLong(sizeof(struct Mixed8_b))); APPEND(PyLong_FromLong(_Alignof(struct Mixed8_b))); @@ -1579,7 +1641,8 @@ uint8_t A; uint32_t B :1; }; - struct Mixed9 value = {0}; + struct Mixed9 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed9")); APPEND(PyLong_FromLong(sizeof(struct Mixed9))); APPEND(PyLong_FromLong(_Alignof(struct Mixed9))); @@ -1601,7 +1664,8 @@ uint32_t A :1; uint64_t B :1; }; - struct Mixed10 value = {0}; + struct Mixed10 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Mixed10")); APPEND(PyLong_FromLong(sizeof(struct Mixed10))); APPEND(PyLong_FromLong(_Alignof(struct Mixed10))); @@ -1623,7 +1687,8 @@ uint32_t A :1; uint64_t B :1; }; - struct Example_gh_95496 value = {0}; + struct Example_gh_95496 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_95496")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_95496))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_95496))); @@ -1655,7 +1720,8 @@ uint16_t b1 :12; }; #pragma pack(pop) - struct Example_gh_84039_bad value = {0}; + struct Example_gh_84039_bad value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_bad")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_bad))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_bad))); @@ -1693,7 +1759,8 @@ uint8_t a7 :1; }; #pragma pack(pop) - struct Example_gh_84039_good_a value = {0}; + struct Example_gh_84039_good_a value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_good_a")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_good_a))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_good_a))); @@ -1735,7 +1802,8 @@ uint16_t b1 :12; }; #pragma pack(pop) - struct Example_gh_84039_good value = {0}; + struct Example_gh_84039_good value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_84039_good")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_84039_good))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_84039_good))); @@ -1775,7 +1843,8 @@ uint32_t R2 :2; }; #pragma pack(pop) - struct Example_gh_73939 value = {0}; + struct Example_gh_73939 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_73939")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_73939))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_73939))); @@ -1806,7 +1875,8 @@ uint8_t b :8; uint32_t c :16; }; - struct Example_gh_86098 value = {0}; + struct Example_gh_86098 value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_86098")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_86098))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_86098))); @@ -1832,7 +1902,8 @@ uint32_t c :16; }; #pragma pack(pop) - struct Example_gh_86098_pack value = {0}; + struct Example_gh_86098_pack value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("Example_gh_86098_pack")); APPEND(PyLong_FromLong(sizeof(struct Example_gh_86098_pack))); APPEND(PyLong_FromLong(_Alignof(struct Example_gh_86098_pack))); @@ -1858,7 +1929,8 @@ }; signed char y; }; - struct AnonBitfields value = {0}; + struct AnonBitfields value; + memset(&value, 0, sizeof(value)); APPEND(PyUnicode_FromString("AnonBitfields")); APPEND(PyLong_FromLong(sizeof(struct AnonBitfields))); APPEND(PyLong_FromLong(_Alignof(struct AnonBitfields))); diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 92eedff5ec94f1..c6b6460126ca90 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -493,27 +493,29 @@ PyCArgObject_new(ctypes_state *st) } static int -PyCArg_traverse(PyCArgObject *self, visitproc visit, void *arg) +PyCArg_traverse(PyObject *op, visitproc visit, void *arg) { + PyCArgObject *self = _PyCArgObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->obj); return 0; } static int -PyCArg_clear(PyCArgObject *self) +PyCArg_clear(PyObject *op) { + PyCArgObject *self = _PyCArgObject_CAST(op); Py_CLEAR(self->obj); return 0; } static void -PyCArg_dealloc(PyCArgObject *self) +PyCArg_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); (void)PyCArg_clear(self); - tp->tp_free((PyObject *)self); + tp->tp_free(self); Py_DECREF(tp); } @@ -524,8 +526,9 @@ is_literal_char(unsigned char c) } static PyObject * -PyCArg_repr(PyCArgObject *self) +PyCArg_repr(PyObject *op) { + PyCArgObject *self = _PyCArgObject_CAST(op); switch(self->tag) { case 'b': case 'B': diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index dcac9da75360a4..9924d62c0881d1 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -194,17 +194,18 @@ PyCField_new_impl(PyTypeObject *type, PyObject *name, PyObject *proto, static int -PyCField_set(CFieldObject *self, PyObject *inst, PyObject *value) +PyCField_set(PyObject *op, PyObject *inst, PyObject *value) { CDataObject *dst; char *ptr; + CFieldObject *self = _CFieldObject_CAST(op); ctypes_state *st = get_module_state_by_class(Py_TYPE(self)); if (!CDataObject_Check(st, inst)) { PyErr_SetString(PyExc_TypeError, "not a ctype instance"); return -1; } - dst = (CDataObject *)inst; + dst = _CDataObject_CAST(inst); ptr = dst->b_ptr + self->offset; if (value == NULL) { PyErr_SetString(PyExc_TypeError, @@ -212,13 +213,14 @@ PyCField_set(CFieldObject *self, PyObject *inst, PyObject *value) return -1; } return PyCData_set(st, inst, self->proto, self->setfunc, value, - self->index, self->size, ptr); + self->index, self->size, ptr); } static PyObject * -PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) +PyCField_get(PyObject *op, PyObject *inst, PyTypeObject *type) { CDataObject *src; + CFieldObject *self = _CFieldObject_CAST(op); if (inst == NULL) { return Py_NewRef(self); } @@ -228,21 +230,21 @@ PyCField_get(CFieldObject *self, PyObject *inst, PyTypeObject *type) "not a ctype instance"); return NULL; } - src = (CDataObject *)inst; + src = _CDataObject_CAST(inst); return PyCData_get(st, self->proto, self->getfunc, inst, - self->index, self->size, src->b_ptr + self->offset); + self->index, self->size, src->b_ptr + self->offset); } static PyObject * PyCField_get_offset(PyObject *self, void *data) { - return PyLong_FromSsize_t(((CFieldObject *)self)->offset); + return PyLong_FromSsize_t(_CFieldObject_CAST(self)->offset); } static PyObject * PyCField_get_size(PyObject *self, void *data) { - return PyLong_FromSsize_t(((CFieldObject *)self)->size); + return PyLong_FromSsize_t(_CFieldObject_CAST(self)->size); } static PyGetSetDef PyCField_getset[] = { @@ -252,17 +254,20 @@ static PyGetSetDef PyCField_getset[] = { }; static int -PyCField_traverse(CFieldObject *self, visitproc visit, void *arg) +PyCField_traverse(PyObject *op, visitproc visit, void *arg) { + CFieldObject *self = _CFieldObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->proto); return 0; } static int -PyCField_clear(CFieldObject *self) +PyCField_clear(PyObject *op) { + CFieldObject *self = _CFieldObject_CAST(op); Py_CLEAR(self->proto); + Py_CLEAR(self->name); return 0; } @@ -271,17 +276,16 @@ PyCField_dealloc(PyObject *self) { PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); - CFieldObject *self_cf = (CFieldObject *)self; - (void)PyCField_clear(self_cf); - Py_CLEAR(self_cf->name); + (void)PyCField_clear(self); Py_TYPE(self)->tp_free(self); Py_DECREF(tp); } static PyObject * -PyCField_repr(CFieldObject *self) +PyCField_repr(PyObject *op) { PyObject *result; + CFieldObject *self = _CFieldObject_CAST(op); Py_ssize_t bits = NUM_BITS(self->size); Py_ssize_t size = LOW_BIT(self->size); const char *name; @@ -1255,6 +1259,10 @@ for code in 'sbBcdCEFgfhHiIlLqQPzuUZXvO': // always contains NULLs: struct fielddesc fmt_nil; + + // Result of _ctypes_get_simple_type_chars. Initialized just after + // the rest of formattable, so we stash it here. + char simple_type_chars[26]; }; static struct formattable formattable; @@ -1315,8 +1323,8 @@ _Py_COMP_DIAG_PUSH /* Delayed initialization. Windows cannot statically reference dynamically loaded addresses from DLLs. */ -void -_ctypes_init_fielddesc(void) +static void +_ctypes_init_fielddesc_locked(void) { /* Fixed-width integers */ @@ -1432,9 +1440,11 @@ for base_code, base_c_type in [ TABLE_ENTRY_SW(d, &ffi_type_double); #if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) - TABLE_ENTRY(C, &ffi_type_complex_double); - TABLE_ENTRY(E, &ffi_type_complex_float); - TABLE_ENTRY(F, &ffi_type_complex_longdouble); + if (Py_FFI_COMPLEX_AVAILABLE) { + TABLE_ENTRY(C, &ffi_type_complex_double); + TABLE_ENTRY(E, &ffi_type_complex_float); + TABLE_ENTRY(F, &ffi_type_complex_longdouble); + } #endif TABLE_ENTRY(g, &ffi_type_longdouble); TABLE_ENTRY_SW(f, &ffi_type_float); @@ -1466,21 +1476,75 @@ for base_code, base_c_type in [ formattable.fmt_bool.code = '?'; formattable.fmt_bool.setfunc = bool_set; formattable.fmt_bool.getfunc = bool_get; + +/*[python input] +all_chars = "cbBhHiIlLdCEFfuzZqQPXOv?g" +print(f' assert(sizeof(formattable.simple_type_chars) == {len(all_chars)+1});') +print(f' int i = 0;') +for char in all_chars: + ident_char = {'?': 'bool'}.get(char, char) + print(f" if (formattable.fmt_{ident_char}.code) " + + f"formattable.simple_type_chars[i++] = '{char}';") +print(f" formattable.simple_type_chars[i] = 0;") +[python start generated code]*/ + assert(sizeof(formattable.simple_type_chars) == 26); + int i = 0; + if (formattable.fmt_c.code) formattable.simple_type_chars[i++] = 'c'; + if (formattable.fmt_b.code) formattable.simple_type_chars[i++] = 'b'; + if (formattable.fmt_B.code) formattable.simple_type_chars[i++] = 'B'; + if (formattable.fmt_h.code) formattable.simple_type_chars[i++] = 'h'; + if (formattable.fmt_H.code) formattable.simple_type_chars[i++] = 'H'; + if (formattable.fmt_i.code) formattable.simple_type_chars[i++] = 'i'; + if (formattable.fmt_I.code) formattable.simple_type_chars[i++] = 'I'; + if (formattable.fmt_l.code) formattable.simple_type_chars[i++] = 'l'; + if (formattable.fmt_L.code) formattable.simple_type_chars[i++] = 'L'; + if (formattable.fmt_d.code) formattable.simple_type_chars[i++] = 'd'; + if (formattable.fmt_C.code) formattable.simple_type_chars[i++] = 'C'; + if (formattable.fmt_E.code) formattable.simple_type_chars[i++] = 'E'; + if (formattable.fmt_F.code) formattable.simple_type_chars[i++] = 'F'; + if (formattable.fmt_f.code) formattable.simple_type_chars[i++] = 'f'; + if (formattable.fmt_u.code) formattable.simple_type_chars[i++] = 'u'; + if (formattable.fmt_z.code) formattable.simple_type_chars[i++] = 'z'; + if (formattable.fmt_Z.code) formattable.simple_type_chars[i++] = 'Z'; + if (formattable.fmt_q.code) formattable.simple_type_chars[i++] = 'q'; + if (formattable.fmt_Q.code) formattable.simple_type_chars[i++] = 'Q'; + if (formattable.fmt_P.code) formattable.simple_type_chars[i++] = 'P'; + if (formattable.fmt_X.code) formattable.simple_type_chars[i++] = 'X'; + if (formattable.fmt_O.code) formattable.simple_type_chars[i++] = 'O'; + if (formattable.fmt_v.code) formattable.simple_type_chars[i++] = 'v'; + if (formattable.fmt_bool.code) formattable.simple_type_chars[i++] = '?'; + if (formattable.fmt_g.code) formattable.simple_type_chars[i++] = 'g'; + formattable.simple_type_chars[i] = 0; +/*[python end generated code: output=e6e5098a02f4b606 input=72031a625eac00c1]*/ + } #undef FIXINT_FIELDDESC_FOR _Py_COMP_DIAG_POP -struct fielddesc * -_ctypes_get_fielddesc(const char *fmt) +static void +_ctypes_init_fielddesc(void) { static bool initialized = false; static PyMutex mutex = {0}; PyMutex_Lock(&mutex); if (!initialized) { - _ctypes_init_fielddesc(); + _ctypes_init_fielddesc_locked(); initialized = true; } PyMutex_Unlock(&mutex); +} + +char * +_ctypes_get_simple_type_chars(void) { + _ctypes_init_fielddesc(); + return formattable.simple_type_chars; +} + +struct fielddesc * +_ctypes_get_fielddesc(const char *fmt) +{ + _ctypes_init_fielddesc(); + struct fielddesc *result = NULL; switch(fmt[0]) { /*[python input] diff --git a/Modules/_ctypes/clinic/_ctypes.c.h b/Modules/_ctypes/clinic/_ctypes.c.h index 405a3c9238d77d..1f2e871137ed79 100644 --- a/Modules/_ctypes/clinic/_ctypes.c.h +++ b/Modules/_ctypes/clinic/_ctypes.c.h @@ -331,7 +331,7 @@ PyCPointerType_set_type_impl(PyTypeObject *self, PyTypeObject *cls, PyObject *type); static PyObject * -PyCPointerType_set_type(PyTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +PyCPointerType_set_type(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -356,7 +356,7 @@ PyCPointerType_set_type(PyTypeObject *self, PyTypeObject *cls, PyObject *const * goto exit; } type = args[0]; - return_value = PyCPointerType_set_type_impl(self, cls, type); + return_value = PyCPointerType_set_type_impl((PyTypeObject *)self, cls, type); exit: return return_value; @@ -616,12 +616,12 @@ static int _ctypes_CFuncPtr_errcheck_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_errcheck_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_errcheck_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_errcheck_set_impl(self, value); + return_value = _ctypes_CFuncPtr_errcheck_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -648,12 +648,12 @@ static PyObject * _ctypes_CFuncPtr_errcheck_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_errcheck_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_errcheck_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_errcheck_get_impl(self); + return_value = _ctypes_CFuncPtr_errcheck_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -673,12 +673,12 @@ static int _ctypes_CFuncPtr_restype_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_restype_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_restype_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_restype_set_impl(self, value); + return_value = _ctypes_CFuncPtr_restype_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -705,12 +705,12 @@ static PyObject * _ctypes_CFuncPtr_restype_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_restype_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_restype_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_restype_get_impl(self); + return_value = _ctypes_CFuncPtr_restype_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -730,12 +730,12 @@ static int _ctypes_CFuncPtr_argtypes_set_impl(PyCFuncPtrObject *self, PyObject *value); static int -_ctypes_CFuncPtr_argtypes_set(PyCFuncPtrObject *self, PyObject *value, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_argtypes_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_argtypes_set_impl(self, value); + return_value = _ctypes_CFuncPtr_argtypes_set_impl((PyCFuncPtrObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -762,12 +762,12 @@ static PyObject * _ctypes_CFuncPtr_argtypes_get_impl(PyCFuncPtrObject *self); static PyObject * -_ctypes_CFuncPtr_argtypes_get(PyCFuncPtrObject *self, void *Py_UNUSED(context)) +_ctypes_CFuncPtr_argtypes_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ctypes_CFuncPtr_argtypes_get_impl(self); + return_value = _ctypes_CFuncPtr_argtypes_get_impl((PyCFuncPtrObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -793,4 +793,4 @@ Simple_from_outparm(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py } return Simple_from_outparm_impl(self, cls); } -/*[clinic end generated code: output=cb3583522a2c5ce5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a18d87239b6fb8ca input=a9049054013a1b77]*/ diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h index cc09639e21f7c2..07049d0968c790 100644 --- a/Modules/_ctypes/ctypes.h +++ b/Modules/_ctypes/ctypes.h @@ -5,8 +5,21 @@ #include "pycore_moduleobject.h" // _PyModule_GetState() #include "pycore_typeobject.h" // _PyType_GetModuleState() +// Do we support C99 complex types in ffi? +// For Apple's libffi, this must be determined at runtime (see gh-128156). #if defined(Py_HAVE_C_COMPLEX) && defined(Py_FFI_SUPPORT_C_COMPLEX) # include "../_complex.h" // complex +# if USING_APPLE_OS_LIBFFI && defined(__has_builtin) +# if __has_builtin(__builtin_available) +# define Py_FFI_COMPLEX_AVAILABLE __builtin_available(macOS 10.15, *) +# else +# define Py_FFI_COMPLEX_AVAILABLE 1 +# endif +# else +# define Py_FFI_COMPLEX_AVAILABLE 1 +# endif +#else +# define Py_FFI_COMPLEX_AVAILABLE 0 #endif #ifndef MS_WIN32 @@ -112,7 +125,10 @@ extern PyType_Spec cfield_spec; extern PyType_Spec cthunk_spec; typedef struct tagPyCArgObject PyCArgObject; +#define _PyCArgObject_CAST(op) ((PyCArgObject *)(op)) + typedef struct tagCDataObject CDataObject; +#define _CDataObject_CAST(op) ((CDataObject *)(op)) // GETFUNC: convert the C value at *ptr* to Python object, return the object // SETFUNC: write content of the PyObject *value* to the location at *ptr*; @@ -176,6 +192,8 @@ typedef struct { ffi_type *ffi_restype; ffi_type *atypes[1]; } CThunkObject; + +#define _CThunkObject_CAST(op) ((CThunkObject *)(op)) #define CThunk_CheckExact(st, v) Py_IS_TYPE(v, st->PyCThunk_Type) typedef struct { @@ -209,6 +227,8 @@ typedef struct { PyObject *paramflags; } PyCFuncPtrObject; +#define _PyCFuncPtrObject_CAST(op) ((PyCFuncPtrObject *)(op)) + extern int PyCStructUnionType_update_stginfo(PyObject *fields, PyObject *type, int isStruct); extern int PyType_stginfo(PyTypeObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); extern int PyObject_stginfo(PyObject *self, Py_ssize_t *psize, Py_ssize_t *palign, Py_ssize_t *plength); @@ -255,6 +275,9 @@ struct fielddesc { GETFUNC getfunc_swapped; }; +// Get all single-character type codes (for use in error messages) +extern char *_ctypes_get_simple_type_chars(void); + typedef struct CFieldObject { PyObject_HEAD Py_ssize_t offset; @@ -269,6 +292,8 @@ typedef struct CFieldObject { PyObject *name; /* exact PyUnicode */ } CFieldObject; +#define _CFieldObject_CAST(op) ((CFieldObject *)(op)) + /**************************************************************** StgInfo @@ -405,6 +430,8 @@ struct tagPyCArgObject { Py_ssize_t size; /* for the 'V' tag */ }; +#define _PyCArgObject_CAST(op) ((PyCArgObject *)(op)) + #define PyCArg_CheckExact(st, v) Py_IS_TYPE(v, st->PyCArg_Type) extern PyCArgObject *PyCArgObject_new(ctypes_state *st); @@ -550,8 +577,12 @@ PyStgInfo_Init(ctypes_state *state, PyTypeObject *type) # define LOCK_PTR(self) Py_BEGIN_CRITICAL_SECTION(self) # define UNLOCK_PTR(self) Py_END_CRITICAL_SECTION() #else -# define LOCK_PTR(self) -# define UNLOCK_PTR(self) +/* + * Dummy functions instead of macros so that 'self' can be + * unused in the caller without triggering a compiler warning. + */ +static inline void LOCK_PTR(CDataObject *Py_UNUSED(self)) {} +static inline void UNLOCK_PTR(CDataObject *Py_UNUSED(self)) {} #endif static inline void diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index 368d10411366c4..ff2e6d6a098ad9 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -1849,9 +1849,10 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, * is expensive, don't unless they're actually used. */ - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto Error; + } Py_ssize_t flen = PyUnicode_GET_LENGTH(format); Py_ssize_t i = 0; @@ -1955,11 +1956,11 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, if (ch == 'C') { n -= 2; } - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, end) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, end) < 0) { goto Error; } start = i; - if (_PyUnicodeWriter_WriteASCIIString(&writer, buf, n) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, buf, n) < 0) { goto Error; } continue; @@ -1971,25 +1972,25 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, } assert(replacement != NULL); assert(PyUnicode_Check(replacement)); - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, end) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, end) < 0) { goto Error; } start = i; - if (_PyUnicodeWriter_WriteStr(&writer, replacement) < 0) { + if (PyUnicodeWriter_WriteStr(writer, replacement) < 0) { goto Error; } } /* end while() */ PyObject *newformat; if (start == 0) { - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); newformat = Py_NewRef(format); } else { - if (_PyUnicodeWriter_WriteSubstring(&writer, format, start, flen) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format, start, flen) < 0) { goto Error; } - newformat = _PyUnicodeWriter_Finish(&writer); + newformat = PyUnicodeWriter_Finish(writer); if (newformat == NULL) { goto Done; } @@ -2007,7 +2008,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple, return result; Error: - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); goto Done; } @@ -4947,7 +4948,7 @@ datetime.time.replace minute: int(c_default="TIME_GET_MINUTE(self)") = unchanged second: int(c_default="TIME_GET_SECOND(self)") = unchanged microsecond: int(c_default="TIME_GET_MICROSECOND(self)") = unchanged - tzinfo: object(c_default="HASTZINFO(self) ? self->tzinfo : Py_None") = unchanged + tzinfo: object(c_default="HASTZINFO(self) ? ((PyDateTime_Time *)self)->tzinfo : Py_None") = unchanged * fold: int(c_default="TIME_GET_FOLD(self)") = unchanged @@ -4958,7 +4959,7 @@ static PyObject * datetime_time_replace_impl(PyDateTime_Time *self, int hour, int minute, int second, int microsecond, PyObject *tzinfo, int fold) -/*[clinic end generated code: output=0b89a44c299e4f80 input=9b6a35b1e704b0ca]*/ +/*[clinic end generated code: output=0b89a44c299e4f80 input=abf23656e8df4e97]*/ { return new_time_subclass_fold_ex(hour, minute, second, microsecond, tzinfo, fold, (PyObject *)Py_TYPE(self)); @@ -6449,7 +6450,7 @@ datetime.datetime.replace minute: int(c_default="DATE_GET_MINUTE(self)") = unchanged second: int(c_default="DATE_GET_SECOND(self)") = unchanged microsecond: int(c_default="DATE_GET_MICROSECOND(self)") = unchanged - tzinfo: object(c_default="HASTZINFO(self) ? self->tzinfo : Py_None") = unchanged + tzinfo: object(c_default="HASTZINFO(self) ? ((PyDateTime_DateTime *)self)->tzinfo : Py_None") = unchanged * fold: int(c_default="DATE_GET_FOLD(self)") = unchanged @@ -6461,7 +6462,7 @@ datetime_datetime_replace_impl(PyDateTime_DateTime *self, int year, int month, int day, int hour, int minute, int second, int microsecond, PyObject *tzinfo, int fold) -/*[clinic end generated code: output=00bc96536833fddb input=9b38253d56d9bcad]*/ +/*[clinic end generated code: output=00bc96536833fddb input=fd972762d604d3e7]*/ { return new_datetime_subclass_fold_ex(year, month, day, hour, minute, second, microsecond, tzinfo, fold, diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 0def463c7d8b9e..b9abd8bd2e7a53 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -2336,15 +2336,16 @@ dec_from_long(decimal_state *state, PyTypeObject *type, PyObject *v, } if (export_long.digits) { const PyLongLayout *layout = PyLong_GetNativeLayout(); - uint32_t base = (uint32_t)1 << layout->bits_per_digit; - uint8_t sign = export_long.negative ? MPD_NEG : MPD_POS; - Py_ssize_t len = export_long.ndigits; - assert(layout->bits_per_digit <= 32); + assert(layout->bits_per_digit < 32); assert(layout->digits_order == -1); assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); assert(layout->digit_size == 2 || layout->digit_size == 4); + uint32_t base = (uint32_t)1 << layout->bits_per_digit; + uint8_t sign = export_long.negative ? MPD_NEG : MPD_POS; + Py_ssize_t len = export_long.ndigits; + if (layout->digit_size == 4) { mpd_qimport_u32(MPD(dec), export_long.digits, len, sign, base, ctx, status); @@ -3642,13 +3643,6 @@ dec_format(PyObject *dec, PyObject *args) static PyObject * dec_as_long(PyObject *dec, PyObject *context, int round) { - PyLongObject *pylong; - digit *ob_digit; - size_t n; - mpd_t *x; - mpd_context_t workctx; - uint32_t status = 0; - if (mpd_isspecial(MPD(dec))) { if (mpd_isnan(MPD(dec))) { PyErr_SetString(PyExc_ValueError, @@ -3661,12 +3655,16 @@ dec_as_long(PyObject *dec, PyObject *context, int round) return NULL; } - x = mpd_qnew(); + mpd_t *x = mpd_qnew(); + if (x == NULL) { PyErr_NoMemory(); return NULL; } - workctx = *CTX(context); + + mpd_context_t workctx = *CTX(context); + uint32_t status = 0; + workctx.round = round; mpd_qround_to_int(x, MPD(dec), &workctx, &status); if (dec_addstatus(context, status)) { @@ -3675,34 +3673,56 @@ dec_as_long(PyObject *dec, PyObject *context, int round) } status = 0; - ob_digit = NULL; -#if PYLONG_BITS_IN_DIGIT == 30 - n = mpd_qexport_u32(&ob_digit, 0, PyLong_BASE, x, &status); -#elif PYLONG_BITS_IN_DIGIT == 15 - n = mpd_qexport_u16(&ob_digit, 0, PyLong_BASE, x, &status); -#else - #error "PYLONG_BITS_IN_DIGIT should be 15 or 30" -#endif + int64_t val = mpd_qget_i64(x, &status); + + if (!status) { + mpd_del(x); + return PyLong_FromInt64(val); + } + assert(!mpd_iszero(x)); + + const PyLongLayout *layout = PyLong_GetNativeLayout(); + + assert(layout->bits_per_digit < 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + uint32_t base = (uint32_t)1 << layout->bits_per_digit; + /* We use a temporary buffer for digits for now, as for nonzero rdata + mpd_qexport_u32/u16() require either space "allocated by one of + libmpdec’s allocation functions" or "rlen MUST be correct" (to avoid + reallocation). This can be further optimized by using rlen from + mpd_sizeinbase(). See gh-127925. */ + void *tmp_digits = NULL; + size_t n; + + status = 0; + if (layout->digit_size == 4) { + n = mpd_qexport_u32((uint32_t **)&tmp_digits, 0, base, x, &status); + } + else { + n = mpd_qexport_u16((uint16_t **)&tmp_digits, 0, base, x, &status); + } if (n == SIZE_MAX) { PyErr_NoMemory(); mpd_del(x); + mpd_free(tmp_digits); return NULL; } - if (n == 1) { - sdigit val = mpd_arith_sign(x) * ob_digit[0]; - mpd_free(ob_digit); - mpd_del(x); - return PyLong_FromLong(val); - } + void *digits; + PyLongWriter *writer = PyLongWriter_Create(mpd_isnegative(x), n, &digits); - assert(n > 0); - assert(!mpd_iszero(x)); - pylong = _PyLong_FromDigits(mpd_isnegative(x), n, ob_digit); - mpd_free(ob_digit); mpd_del(x); - return (PyObject *) pylong; + if (writer == NULL) { + mpd_free(tmp_digits); + return NULL; + } + memcpy(digits, tmp_digits, layout->digit_size*n); + mpd_free(tmp_digits); + return PyLongWriter_Finish(writer); } /* Convert a Decimal to its exact integer ratio representation. */ diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c index fb66d3db0f7a1f..16095333db6638 100644 --- a/Modules/_io/bytesio.c +++ b/Modules/_io/bytesio.c @@ -588,7 +588,7 @@ _io_BytesIO_readinto_impl(bytesio *self, Py_buffer *buffer) /*[clinic input] _io.BytesIO.truncate - size: Py_ssize_t(accept={int, NoneType}, c_default="self->pos") = None + size: Py_ssize_t(accept={int, NoneType}, c_default="((bytesio *)self)->pos") = None / Truncate the file to at most size bytes. @@ -599,7 +599,7 @@ The current file position is unchanged. Returns the new size. static PyObject * _io_BytesIO_truncate_impl(bytesio *self, Py_ssize_t size) -/*[clinic end generated code: output=9ad17650c15fa09b input=423759dd42d2f7c1]*/ +/*[clinic end generated code: output=9ad17650c15fa09b input=dae4295e11c1bbb4]*/ { CHECK_CLOSED(self); CHECK_EXPORTS(self); diff --git a/Modules/_io/clinic/bufferedio.c.h b/Modules/_io/clinic/bufferedio.c.h index e035bd99baca5f..8ab8000fafee02 100644 --- a/Modules/_io/clinic/bufferedio.c.h +++ b/Modules/_io/clinic/bufferedio.c.h @@ -288,12 +288,12 @@ static PyObject * _io__Buffered___sizeof___impl(buffered *self); static PyObject * -_io__Buffered___sizeof__(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered___sizeof___impl(self); + return_value = _io__Buffered___sizeof___impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -319,12 +319,12 @@ static PyObject * _io__Buffered_simple_flush_impl(buffered *self); static PyObject * -_io__Buffered_simple_flush(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_simple_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_simple_flush_impl(self); + return_value = _io__Buffered_simple_flush_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -344,12 +344,12 @@ static PyObject * _io__Buffered_closed_get_impl(buffered *self); static PyObject * -_io__Buffered_closed_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_closed_get_impl(self); + return_value = _io__Buffered_closed_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -367,12 +367,12 @@ static PyObject * _io__Buffered_close_impl(buffered *self); static PyObject * -_io__Buffered_close(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_close_impl(self); + return_value = _io__Buffered_close_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -390,12 +390,12 @@ static PyObject * _io__Buffered_detach_impl(buffered *self); static PyObject * -_io__Buffered_detach(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_detach_impl(self); + return_value = _io__Buffered_detach_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -413,12 +413,12 @@ static PyObject * _io__Buffered_seekable_impl(buffered *self); static PyObject * -_io__Buffered_seekable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_seekable_impl(self); + return_value = _io__Buffered_seekable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -436,12 +436,12 @@ static PyObject * _io__Buffered_readable_impl(buffered *self); static PyObject * -_io__Buffered_readable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readable_impl(self); + return_value = _io__Buffered_readable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -459,12 +459,12 @@ static PyObject * _io__Buffered_writable_impl(buffered *self); static PyObject * -_io__Buffered_writable(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_writable_impl(self); + return_value = _io__Buffered_writable_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -484,12 +484,12 @@ static PyObject * _io__Buffered_name_get_impl(buffered *self); static PyObject * -_io__Buffered_name_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_name_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_name_get_impl(self); + return_value = _io__Buffered_name_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -509,12 +509,12 @@ static PyObject * _io__Buffered_mode_get_impl(buffered *self); static PyObject * -_io__Buffered_mode_get(buffered *self, void *Py_UNUSED(context)) +_io__Buffered_mode_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_mode_get_impl(self); + return_value = _io__Buffered_mode_get_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -532,12 +532,12 @@ static PyObject * _io__Buffered_fileno_impl(buffered *self); static PyObject * -_io__Buffered_fileno(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_fileno_impl(self); + return_value = _io__Buffered_fileno_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -555,12 +555,12 @@ static PyObject * _io__Buffered_isatty_impl(buffered *self); static PyObject * -_io__Buffered_isatty(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_isatty_impl(self); + return_value = _io__Buffered_isatty_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -578,12 +578,12 @@ static PyObject * _io__Buffered_flush_impl(buffered *self); static PyObject * -_io__Buffered_flush(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_flush_impl(self); + return_value = _io__Buffered_flush_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -601,7 +601,7 @@ static PyObject * _io__Buffered_peek_impl(buffered *self, Py_ssize_t size); static PyObject * -_io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_peek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = 0; @@ -626,7 +626,7 @@ _io__Buffered_peek(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_peek_impl(self, size); + return_value = _io__Buffered_peek_impl((buffered *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -645,7 +645,7 @@ static PyObject * _io__Buffered_read_impl(buffered *self, Py_ssize_t n); static PyObject * -_io__Buffered_read(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -661,7 +661,7 @@ _io__Buffered_read(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_read_impl(self, n); + return_value = _io__Buffered_read_impl((buffered *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -680,7 +680,7 @@ static PyObject * _io__Buffered_read1_impl(buffered *self, Py_ssize_t n); static PyObject * -_io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_read1(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -705,7 +705,7 @@ _io__Buffered_read1(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_read1_impl(self, n); + return_value = _io__Buffered_read1_impl((buffered *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -724,7 +724,7 @@ static PyObject * _io__Buffered_readinto_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io__Buffered_readinto(buffered *self, PyObject *arg) +_io__Buffered_readinto(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -734,7 +734,7 @@ _io__Buffered_readinto(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readinto_impl(self, &buffer); + return_value = _io__Buffered_readinto_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -758,7 +758,7 @@ static PyObject * _io__Buffered_readinto1_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io__Buffered_readinto1(buffered *self, PyObject *arg) +_io__Buffered_readinto1(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -768,7 +768,7 @@ _io__Buffered_readinto1(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readinto1_impl(self, &buffer); + return_value = _io__Buffered_readinto1_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -792,7 +792,7 @@ static PyObject * _io__Buffered_readline_impl(buffered *self, Py_ssize_t size); static PyObject * -_io__Buffered_readline(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -808,7 +808,7 @@ _io__Buffered_readline(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_readline_impl(self, size); + return_value = _io__Buffered_readline_impl((buffered *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -827,12 +827,12 @@ static PyObject * _io__Buffered_tell_impl(buffered *self); static PyObject * -_io__Buffered_tell(buffered *self, PyObject *Py_UNUSED(ignored)) +_io__Buffered_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_tell_impl(self); + return_value = _io__Buffered_tell_impl((buffered *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -850,7 +850,7 @@ static PyObject * _io__Buffered_seek_impl(buffered *self, PyObject *targetobj, int whence); static PyObject * -_io__Buffered_seek(buffered *self, PyObject *const *args, Py_ssize_t nargs) +_io__Buffered_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *targetobj; @@ -869,7 +869,7 @@ _io__Buffered_seek(buffered *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_seek_impl(self, targetobj, whence); + return_value = _io__Buffered_seek_impl((buffered *)self, targetobj, whence); Py_END_CRITICAL_SECTION(); exit: @@ -888,7 +888,7 @@ static PyObject * _io__Buffered_truncate_impl(buffered *self, PyTypeObject *cls, PyObject *pos); static PyObject * -_io__Buffered_truncate(buffered *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__Buffered_truncate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -918,7 +918,7 @@ _io__Buffered_truncate(buffered *self, PyTypeObject *cls, PyObject *const *args, pos = args[0]; skip_optional_posonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io__Buffered_truncate_impl(self, cls, pos); + return_value = _io__Buffered_truncate_impl((buffered *)self, cls, pos); Py_END_CRITICAL_SECTION(); exit: @@ -1089,7 +1089,7 @@ static PyObject * _io_BufferedWriter_write_impl(buffered *self, Py_buffer *buffer); static PyObject * -_io_BufferedWriter_write(buffered *self, PyObject *arg) +_io_BufferedWriter_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -1098,7 +1098,7 @@ _io_BufferedWriter_write(buffered *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_BufferedWriter_write_impl(self, &buffer); + return_value = _io_BufferedWriter_write_impl((buffered *)self, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -1246,4 +1246,4 @@ _io_BufferedRandom___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=8f28a97987a9fbe1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f019d29701ba2556 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/bytesio.c.h b/Modules/_io/clinic/bytesio.c.h index 98d88698c5e9b7..5528df952c33fb 100644 --- a/Modules/_io/clinic/bytesio.c.h +++ b/Modules/_io/clinic/bytesio.c.h @@ -22,9 +22,9 @@ static PyObject * _io_BytesIO_readable_impl(bytesio *self); static PyObject * -_io_BytesIO_readable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_readable_impl(self); + return _io_BytesIO_readable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_writable__doc__, @@ -40,9 +40,9 @@ static PyObject * _io_BytesIO_writable_impl(bytesio *self); static PyObject * -_io_BytesIO_writable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_writable_impl(self); + return _io_BytesIO_writable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_seekable__doc__, @@ -58,9 +58,9 @@ static PyObject * _io_BytesIO_seekable_impl(bytesio *self); static PyObject * -_io_BytesIO_seekable(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_seekable_impl(self); + return _io_BytesIO_seekable_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_flush__doc__, @@ -76,9 +76,9 @@ static PyObject * _io_BytesIO_flush_impl(bytesio *self); static PyObject * -_io_BytesIO_flush(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_flush_impl(self); + return _io_BytesIO_flush_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_getbuffer__doc__, @@ -94,13 +94,13 @@ static PyObject * _io_BytesIO_getbuffer_impl(bytesio *self, PyTypeObject *cls); static PyObject * -_io_BytesIO_getbuffer(bytesio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_BytesIO_getbuffer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getbuffer() takes no arguments"); return NULL; } - return _io_BytesIO_getbuffer_impl(self, cls); + return _io_BytesIO_getbuffer_impl((bytesio *)self, cls); } PyDoc_STRVAR(_io_BytesIO_getvalue__doc__, @@ -116,9 +116,9 @@ static PyObject * _io_BytesIO_getvalue_impl(bytesio *self); static PyObject * -_io_BytesIO_getvalue(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_getvalue(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_getvalue_impl(self); + return _io_BytesIO_getvalue_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_isatty__doc__, @@ -136,9 +136,9 @@ static PyObject * _io_BytesIO_isatty_impl(bytesio *self); static PyObject * -_io_BytesIO_isatty(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_isatty_impl(self); + return _io_BytesIO_isatty_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_tell__doc__, @@ -154,9 +154,9 @@ static PyObject * _io_BytesIO_tell_impl(bytesio *self); static PyObject * -_io_BytesIO_tell(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_tell_impl(self); + return _io_BytesIO_tell_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO_read__doc__, @@ -175,7 +175,7 @@ static PyObject * _io_BytesIO_read_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_read(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -190,7 +190,7 @@ _io_BytesIO_read(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_read_impl(self, size); + return_value = _io_BytesIO_read_impl((bytesio *)self, size); exit: return return_value; @@ -212,7 +212,7 @@ static PyObject * _io_BytesIO_read1_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_read1(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_read1(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -227,7 +227,7 @@ _io_BytesIO_read1(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_read1_impl(self, size); + return_value = _io_BytesIO_read1_impl((bytesio *)self, size); exit: return return_value; @@ -250,7 +250,7 @@ static PyObject * _io_BytesIO_readline_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_readline(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -265,7 +265,7 @@ _io_BytesIO_readline(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_readline_impl(self, size); + return_value = _io_BytesIO_readline_impl((bytesio *)self, size); exit: return return_value; @@ -288,7 +288,7 @@ static PyObject * _io_BytesIO_readlines_impl(bytesio *self, PyObject *arg); static PyObject * -_io_BytesIO_readlines(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *arg = Py_None; @@ -301,7 +301,7 @@ _io_BytesIO_readlines(bytesio *self, PyObject *const *args, Py_ssize_t nargs) } arg = args[0]; skip_optional: - return_value = _io_BytesIO_readlines_impl(self, arg); + return_value = _io_BytesIO_readlines_impl((bytesio *)self, arg); exit: return return_value; @@ -323,7 +323,7 @@ static PyObject * _io_BytesIO_readinto_impl(bytesio *self, Py_buffer *buffer); static PyObject * -_io_BytesIO_readinto(bytesio *self, PyObject *arg) +_io_BytesIO_readinto(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -332,7 +332,7 @@ _io_BytesIO_readinto(bytesio *self, PyObject *arg) _PyArg_BadArgument("readinto", "argument", "read-write bytes-like object", arg); goto exit; } - return_value = _io_BytesIO_readinto_impl(self, &buffer); + return_value = _io_BytesIO_readinto_impl((bytesio *)self, &buffer); exit: /* Cleanup for buffer */ @@ -359,10 +359,10 @@ static PyObject * _io_BytesIO_truncate_impl(bytesio *self, Py_ssize_t size); static PyObject * -_io_BytesIO_truncate(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - Py_ssize_t size = self->pos; + Py_ssize_t size = ((bytesio *)self)->pos; if (!_PyArg_CheckPositional("truncate", nargs, 0, 1)) { goto exit; @@ -374,7 +374,7 @@ _io_BytesIO_truncate(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_truncate_impl(self, size); + return_value = _io_BytesIO_truncate_impl((bytesio *)self, size); exit: return return_value; @@ -399,7 +399,7 @@ static PyObject * _io_BytesIO_seek_impl(bytesio *self, Py_ssize_t pos, int whence); static PyObject * -_io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) +_io_BytesIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t pos; @@ -428,7 +428,7 @@ _io_BytesIO_seek(bytesio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_BytesIO_seek_impl(self, pos, whence); + return_value = _io_BytesIO_seek_impl((bytesio *)self, pos, whence); exit: return return_value; @@ -471,9 +471,9 @@ static PyObject * _io_BytesIO_close_impl(bytesio *self); static PyObject * -_io_BytesIO_close(bytesio *self, PyObject *Py_UNUSED(ignored)) +_io_BytesIO_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_BytesIO_close_impl(self); + return _io_BytesIO_close_impl((bytesio *)self); } PyDoc_STRVAR(_io_BytesIO___init____doc__, @@ -535,4 +535,4 @@ _io_BytesIO___init__(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=985ff54e89f6036e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8a5e153bc7584b55 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/fileio.c.h b/Modules/_io/clinic/fileio.c.h index 0b8b4a49ac24b6..22d27bce67799e 100644 --- a/Modules/_io/clinic/fileio.c.h +++ b/Modules/_io/clinic/fileio.c.h @@ -25,13 +25,13 @@ static PyObject * _io_FileIO_close_impl(fileio *self, PyTypeObject *cls); static PyObject * -_io_FileIO_close(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_close(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "close() takes no arguments"); return NULL; } - return _io_FileIO_close_impl(self, cls); + return _io_FileIO_close_impl((fileio *)self, cls); } PyDoc_STRVAR(_io_FileIO___init____doc__, @@ -151,9 +151,9 @@ static PyObject * _io_FileIO_fileno_impl(fileio *self); static PyObject * -_io_FileIO_fileno(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_fileno_impl(self); + return _io_FileIO_fileno_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_readable__doc__, @@ -169,9 +169,9 @@ static PyObject * _io_FileIO_readable_impl(fileio *self); static PyObject * -_io_FileIO_readable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_readable_impl(self); + return _io_FileIO_readable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_writable__doc__, @@ -187,9 +187,9 @@ static PyObject * _io_FileIO_writable_impl(fileio *self); static PyObject * -_io_FileIO_writable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_writable_impl(self); + return _io_FileIO_writable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_seekable__doc__, @@ -205,9 +205,9 @@ static PyObject * _io_FileIO_seekable_impl(fileio *self); static PyObject * -_io_FileIO_seekable(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_seekable_impl(self); + return _io_FileIO_seekable_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_readinto__doc__, @@ -223,7 +223,7 @@ static PyObject * _io_FileIO_readinto_impl(fileio *self, PyTypeObject *cls, Py_buffer *buffer); static PyObject * -_io_FileIO_readinto(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_readinto(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -251,7 +251,7 @@ _io_FileIO_readinto(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_s _PyArg_BadArgument("readinto", "argument 1", "read-write bytes-like object", args[0]); goto exit; } - return_value = _io_FileIO_readinto_impl(self, cls, &buffer); + return_value = _io_FileIO_readinto_impl((fileio *)self, cls, &buffer); exit: /* Cleanup for buffer */ @@ -278,9 +278,9 @@ static PyObject * _io_FileIO_readall_impl(fileio *self); static PyObject * -_io_FileIO_readall(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_readall_impl(self); + return _io_FileIO_readall_impl((fileio *)self); } PyDoc_STRVAR(_io_FileIO_read__doc__, @@ -300,7 +300,7 @@ static PyObject * _io_FileIO_read_impl(fileio *self, PyTypeObject *cls, Py_ssize_t size); static PyObject * -_io_FileIO_read(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_read(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -331,7 +331,7 @@ _io_FileIO_read(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize goto exit; } skip_optional_posonly: - return_value = _io_FileIO_read_impl(self, cls, size); + return_value = _io_FileIO_read_impl((fileio *)self, cls, size); exit: return return_value; @@ -354,7 +354,7 @@ static PyObject * _io_FileIO_write_impl(fileio *self, PyTypeObject *cls, Py_buffer *b); static PyObject * -_io_FileIO_write(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -381,7 +381,7 @@ _io_FileIO_write(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssiz if (PyObject_GetBuffer(args[0], &b, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _io_FileIO_write_impl(self, cls, &b); + return_value = _io_FileIO_write_impl((fileio *)self, cls, &b); exit: /* Cleanup for b */ @@ -413,7 +413,7 @@ static PyObject * _io_FileIO_seek_impl(fileio *self, PyObject *pos, int whence); static PyObject * -_io_FileIO_seek(fileio *self, PyObject *const *args, Py_ssize_t nargs) +_io_FileIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *pos; @@ -431,7 +431,7 @@ _io_FileIO_seek(fileio *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = _io_FileIO_seek_impl(self, pos, whence); + return_value = _io_FileIO_seek_impl((fileio *)self, pos, whence); exit: return return_value; @@ -452,9 +452,9 @@ static PyObject * _io_FileIO_tell_impl(fileio *self); static PyObject * -_io_FileIO_tell(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_tell_impl(self); + return _io_FileIO_tell_impl((fileio *)self); } #if defined(HAVE_FTRUNCATE) @@ -475,7 +475,7 @@ static PyObject * _io_FileIO_truncate_impl(fileio *self, PyTypeObject *cls, PyObject *posobj); static PyObject * -_io_FileIO_truncate(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_FileIO_truncate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -504,7 +504,7 @@ _io_FileIO_truncate(fileio *self, PyTypeObject *cls, PyObject *const *args, Py_s } posobj = args[0]; skip_optional_posonly: - return_value = _io_FileIO_truncate_impl(self, cls, posobj); + return_value = _io_FileIO_truncate_impl((fileio *)self, cls, posobj); exit: return return_value; @@ -525,12 +525,12 @@ static PyObject * _io_FileIO_isatty_impl(fileio *self); static PyObject * -_io_FileIO_isatty(fileio *self, PyObject *Py_UNUSED(ignored)) +_io_FileIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_FileIO_isatty_impl(self); + return _io_FileIO_isatty_impl((fileio *)self); } #ifndef _IO_FILEIO_TRUNCATE_METHODDEF #define _IO_FILEIO_TRUNCATE_METHODDEF #endif /* !defined(_IO_FILEIO_TRUNCATE_METHODDEF) */ -/*[clinic end generated code: output=1c262ae135da4dcb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=dcbeb6a0b13e4b1f input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/stringio.c.h b/Modules/_io/clinic/stringio.c.h index 6f9205af32f010..bc571698806bde 100644 --- a/Modules/_io/clinic/stringio.c.h +++ b/Modules/_io/clinic/stringio.c.h @@ -23,12 +23,12 @@ static PyObject * _io_StringIO_getvalue_impl(stringio *self); static PyObject * -_io_StringIO_getvalue(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_getvalue(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_getvalue_impl(self); + return_value = _io_StringIO_getvalue_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -47,12 +47,12 @@ static PyObject * _io_StringIO_tell_impl(stringio *self); static PyObject * -_io_StringIO_tell(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_tell_impl(self); + return_value = _io_StringIO_tell_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -74,7 +74,7 @@ static PyObject * _io_StringIO_read_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_read(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -90,7 +90,7 @@ _io_StringIO_read(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_read_impl(self, size); + return_value = _io_StringIO_read_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -112,7 +112,7 @@ static PyObject * _io_StringIO_readline_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_readline(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -128,7 +128,7 @@ _io_StringIO_readline(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_readline_impl(self, size); + return_value = _io_StringIO_readline_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -152,10 +152,10 @@ static PyObject * _io_StringIO_truncate_impl(stringio *self, Py_ssize_t size); static PyObject * -_io_StringIO_truncate(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; - Py_ssize_t size = self->pos; + Py_ssize_t size = ((stringio *)self)->pos; if (!_PyArg_CheckPositional("truncate", nargs, 0, 1)) { goto exit; @@ -168,7 +168,7 @@ _io_StringIO_truncate(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_truncate_impl(self, size); + return_value = _io_StringIO_truncate_impl((stringio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -194,7 +194,7 @@ static PyObject * _io_StringIO_seek_impl(stringio *self, Py_ssize_t pos, int whence); static PyObject * -_io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) +_io_StringIO_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t pos; @@ -224,7 +224,7 @@ _io_StringIO_seek(stringio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_seek_impl(self, pos, whence); + return_value = _io_StringIO_seek_impl((stringio *)self, pos, whence); Py_END_CRITICAL_SECTION(); exit: @@ -252,7 +252,7 @@ _io_StringIO_write(stringio *self, PyObject *obj) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_write_impl(self, obj); + return_value = _io_StringIO_write_impl((stringio *)self, obj); Py_END_CRITICAL_SECTION(); return return_value; @@ -276,12 +276,12 @@ static PyObject * _io_StringIO_close_impl(stringio *self); static PyObject * -_io_StringIO_close(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_close_impl(self); + return_value = _io_StringIO_close_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -371,12 +371,12 @@ static PyObject * _io_StringIO_readable_impl(stringio *self); static PyObject * -_io_StringIO_readable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_readable_impl(self); + return_value = _io_StringIO_readable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -395,12 +395,12 @@ static PyObject * _io_StringIO_writable_impl(stringio *self); static PyObject * -_io_StringIO_writable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_writable_impl(self); + return_value = _io_StringIO_writable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -419,12 +419,12 @@ static PyObject * _io_StringIO_seekable_impl(stringio *self); static PyObject * -_io_StringIO_seekable(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_seekable_impl(self); + return_value = _io_StringIO_seekable_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -442,12 +442,12 @@ static PyObject * _io_StringIO___getstate___impl(stringio *self); static PyObject * -_io_StringIO___getstate__(stringio *self, PyObject *Py_UNUSED(ignored)) +_io_StringIO___getstate__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO___getstate___impl(self); + return_value = _io_StringIO___getstate___impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -470,7 +470,7 @@ _io_StringIO___setstate__(stringio *self, PyObject *state) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO___setstate___impl(self, state); + return_value = _io_StringIO___setstate___impl((stringio *)self, state); Py_END_CRITICAL_SECTION(); return return_value; @@ -490,12 +490,12 @@ static PyObject * _io_StringIO_closed_get_impl(stringio *self); static PyObject * -_io_StringIO_closed_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_closed_get_impl(self); + return_value = _io_StringIO_closed_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -515,12 +515,12 @@ static PyObject * _io_StringIO_line_buffering_get_impl(stringio *self); static PyObject * -_io_StringIO_line_buffering_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_line_buffering_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_line_buffering_get_impl(self); + return_value = _io_StringIO_line_buffering_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -540,14 +540,14 @@ static PyObject * _io_StringIO_newlines_get_impl(stringio *self); static PyObject * -_io_StringIO_newlines_get(stringio *self, void *Py_UNUSED(context)) +_io_StringIO_newlines_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_StringIO_newlines_get_impl(self); + return_value = _io_StringIO_newlines_get_impl((stringio *)self); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=9d2b092274469d42 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7796e223e778a214 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/textio.c.h b/Modules/_io/clinic/textio.c.h index 0acc1f060c811b..9ce1d70ad71052 100644 --- a/Modules/_io/clinic/textio.c.h +++ b/Modules/_io/clinic/textio.c.h @@ -379,7 +379,7 @@ _io_IncrementalNewlineDecoder_decode_impl(nldecoder_object *self, PyObject *input, int final); static PyObject * -_io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_IncrementalNewlineDecoder_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -426,7 +426,7 @@ _io_IncrementalNewlineDecoder_decode(nldecoder_object *self, PyObject *const *ar goto exit; } skip_optional_pos: - return_value = _io_IncrementalNewlineDecoder_decode_impl(self, input, final); + return_value = _io_IncrementalNewlineDecoder_decode_impl((nldecoder_object *)self, input, final); exit: return return_value; @@ -444,9 +444,9 @@ static PyObject * _io_IncrementalNewlineDecoder_getstate_impl(nldecoder_object *self); static PyObject * -_io_IncrementalNewlineDecoder_getstate(nldecoder_object *self, PyObject *Py_UNUSED(ignored)) +_io_IncrementalNewlineDecoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_IncrementalNewlineDecoder_getstate_impl(self); + return _io_IncrementalNewlineDecoder_getstate_impl((nldecoder_object *)self); } PyDoc_STRVAR(_io_IncrementalNewlineDecoder_setstate__doc__, @@ -469,9 +469,9 @@ static PyObject * _io_IncrementalNewlineDecoder_reset_impl(nldecoder_object *self); static PyObject * -_io_IncrementalNewlineDecoder_reset(nldecoder_object *self, PyObject *Py_UNUSED(ignored)) +_io_IncrementalNewlineDecoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io_IncrementalNewlineDecoder_reset_impl(self); + return _io_IncrementalNewlineDecoder_reset_impl((nldecoder_object *)self); } PyDoc_STRVAR(_io_TextIOWrapper___init____doc__, @@ -654,7 +654,7 @@ _io_TextIOWrapper_reconfigure_impl(textio *self, PyObject *encoding, PyObject *write_through_obj); static PyObject * -_io_TextIOWrapper_reconfigure(textio *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io_TextIOWrapper_reconfigure(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -725,7 +725,7 @@ _io_TextIOWrapper_reconfigure(textio *self, PyObject *const *args, Py_ssize_t na write_through_obj = args[4]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_reconfigure_impl(self, encoding, errors, newline_obj, line_buffering_obj, write_through_obj); + return_value = _io_TextIOWrapper_reconfigure_impl((textio *)self, encoding, errors, newline_obj, line_buffering_obj, write_through_obj); Py_END_CRITICAL_SECTION(); exit: @@ -744,12 +744,12 @@ static PyObject * _io_TextIOWrapper_detach_impl(textio *self); static PyObject * -_io_TextIOWrapper_detach(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_detach_impl(self); + return_value = _io_TextIOWrapper_detach_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -767,7 +767,7 @@ static PyObject * _io_TextIOWrapper_write_impl(textio *self, PyObject *text); static PyObject * -_io_TextIOWrapper_write(textio *self, PyObject *arg) +_io_TextIOWrapper_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *text; @@ -778,7 +778,7 @@ _io_TextIOWrapper_write(textio *self, PyObject *arg) } text = arg; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_write_impl(self, text); + return_value = _io_TextIOWrapper_write_impl((textio *)self, text); Py_END_CRITICAL_SECTION(); exit: @@ -797,7 +797,7 @@ static PyObject * _io_TextIOWrapper_read_impl(textio *self, Py_ssize_t n); static PyObject * -_io_TextIOWrapper_read(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = -1; @@ -813,7 +813,7 @@ _io_TextIOWrapper_read(textio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_read_impl(self, n); + return_value = _io_TextIOWrapper_read_impl((textio *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -832,7 +832,7 @@ static PyObject * _io_TextIOWrapper_readline_impl(textio *self, Py_ssize_t size); static PyObject * -_io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t size = -1; @@ -857,7 +857,7 @@ _io_TextIOWrapper_readline(textio *self, PyObject *const *args, Py_ssize_t nargs } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_readline_impl(self, size); + return_value = _io_TextIOWrapper_readline_impl((textio *)self, size); Py_END_CRITICAL_SECTION(); exit: @@ -894,7 +894,7 @@ static PyObject * _io_TextIOWrapper_seek_impl(textio *self, PyObject *cookieObj, int whence); static PyObject * -_io_TextIOWrapper_seek(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *cookieObj; @@ -913,7 +913,7 @@ _io_TextIOWrapper_seek(textio *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_seek_impl(self, cookieObj, whence); + return_value = _io_TextIOWrapper_seek_impl((textio *)self, cookieObj, whence); Py_END_CRITICAL_SECTION(); exit: @@ -936,12 +936,12 @@ static PyObject * _io_TextIOWrapper_tell_impl(textio *self); static PyObject * -_io_TextIOWrapper_tell(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_tell_impl(self); + return_value = _io_TextIOWrapper_tell_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -959,7 +959,7 @@ static PyObject * _io_TextIOWrapper_truncate_impl(textio *self, PyObject *pos); static PyObject * -_io_TextIOWrapper_truncate(textio *self, PyObject *const *args, Py_ssize_t nargs) +_io_TextIOWrapper_truncate(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *pos = Py_None; @@ -973,7 +973,7 @@ _io_TextIOWrapper_truncate(textio *self, PyObject *const *args, Py_ssize_t nargs pos = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_truncate_impl(self, pos); + return_value = _io_TextIOWrapper_truncate_impl((textio *)self, pos); Py_END_CRITICAL_SECTION(); exit: @@ -992,12 +992,12 @@ static PyObject * _io_TextIOWrapper_fileno_impl(textio *self); static PyObject * -_io_TextIOWrapper_fileno(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_fileno_impl(self); + return_value = _io_TextIOWrapper_fileno_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1015,12 +1015,12 @@ static PyObject * _io_TextIOWrapper_seekable_impl(textio *self); static PyObject * -_io_TextIOWrapper_seekable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_seekable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_seekable_impl(self); + return_value = _io_TextIOWrapper_seekable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1038,12 +1038,12 @@ static PyObject * _io_TextIOWrapper_readable_impl(textio *self); static PyObject * -_io_TextIOWrapper_readable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_readable_impl(self); + return_value = _io_TextIOWrapper_readable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1061,12 +1061,12 @@ static PyObject * _io_TextIOWrapper_writable_impl(textio *self); static PyObject * -_io_TextIOWrapper_writable(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_writable_impl(self); + return_value = _io_TextIOWrapper_writable_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1084,12 +1084,12 @@ static PyObject * _io_TextIOWrapper_isatty_impl(textio *self); static PyObject * -_io_TextIOWrapper_isatty(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_isatty_impl(self); + return_value = _io_TextIOWrapper_isatty_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1107,12 +1107,12 @@ static PyObject * _io_TextIOWrapper_flush_impl(textio *self); static PyObject * -_io_TextIOWrapper_flush(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_flush_impl(self); + return_value = _io_TextIOWrapper_flush_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1130,12 +1130,12 @@ static PyObject * _io_TextIOWrapper_close_impl(textio *self); static PyObject * -_io_TextIOWrapper_close(textio *self, PyObject *Py_UNUSED(ignored)) +_io_TextIOWrapper_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_close_impl(self); + return_value = _io_TextIOWrapper_close_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1155,12 +1155,12 @@ static PyObject * _io_TextIOWrapper_name_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_name_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_name_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_name_get_impl(self); + return_value = _io_TextIOWrapper_name_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1180,12 +1180,12 @@ static PyObject * _io_TextIOWrapper_closed_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_closed_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_closed_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_closed_get_impl(self); + return_value = _io_TextIOWrapper_closed_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1205,12 +1205,12 @@ static PyObject * _io_TextIOWrapper_newlines_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_newlines_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_newlines_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_newlines_get_impl(self); + return_value = _io_TextIOWrapper_newlines_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1230,12 +1230,12 @@ static PyObject * _io_TextIOWrapper_errors_get_impl(textio *self); static PyObject * -_io_TextIOWrapper_errors_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper_errors_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper_errors_get_impl(self); + return_value = _io_TextIOWrapper_errors_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1255,12 +1255,12 @@ static PyObject * _io_TextIOWrapper__CHUNK_SIZE_get_impl(textio *self); static PyObject * -_io_TextIOWrapper__CHUNK_SIZE_get(textio *self, void *Py_UNUSED(context)) +_io_TextIOWrapper__CHUNK_SIZE_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper__CHUNK_SIZE_get_impl(self); + return_value = _io_TextIOWrapper__CHUNK_SIZE_get_impl((textio *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1280,14 +1280,14 @@ static int _io_TextIOWrapper__CHUNK_SIZE_set_impl(textio *self, PyObject *value); static int -_io_TextIOWrapper__CHUNK_SIZE_set(textio *self, PyObject *value, void *Py_UNUSED(context)) +_io_TextIOWrapper__CHUNK_SIZE_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _io_TextIOWrapper__CHUNK_SIZE_set_impl(self, value); + return_value = _io_TextIOWrapper__CHUNK_SIZE_set_impl((textio *)self, value); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=423a320f087792b9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e64e43113a97340 input=a9049054013a1b77]*/ diff --git a/Modules/_io/clinic/winconsoleio.c.h b/Modules/_io/clinic/winconsoleio.c.h index df281dfeb13fef..ba6dcde6e01064 100644 --- a/Modules/_io/clinic/winconsoleio.c.h +++ b/Modules/_io/clinic/winconsoleio.c.h @@ -27,13 +27,13 @@ static PyObject * _io__WindowsConsoleIO_close_impl(winconsoleio *self, PyTypeObject *cls); static PyObject * -_io__WindowsConsoleIO_close(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_close(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "close() takes no arguments"); return NULL; } - return _io__WindowsConsoleIO_close_impl(self, cls); + return _io__WindowsConsoleIO_close_impl((winconsoleio *)self, cls); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -154,9 +154,9 @@ static PyObject * _io__WindowsConsoleIO_fileno_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_fileno(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_fileno_impl(self); + return _io__WindowsConsoleIO_fileno_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -176,9 +176,9 @@ static PyObject * _io__WindowsConsoleIO_readable_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_readable(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_readable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_readable_impl(self); + return _io__WindowsConsoleIO_readable_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -198,9 +198,9 @@ static PyObject * _io__WindowsConsoleIO_writable_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_writable(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_writable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_writable_impl(self); + return _io__WindowsConsoleIO_writable_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -221,7 +221,7 @@ _io__WindowsConsoleIO_readinto_impl(winconsoleio *self, PyTypeObject *cls, Py_buffer *buffer); static PyObject * -_io__WindowsConsoleIO_readinto(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_readinto(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -249,7 +249,7 @@ _io__WindowsConsoleIO_readinto(winconsoleio *self, PyTypeObject *cls, PyObject * _PyArg_BadArgument("readinto", "argument 1", "read-write bytes-like object", args[0]); goto exit; } - return_value = _io__WindowsConsoleIO_readinto_impl(self, cls, &buffer); + return_value = _io__WindowsConsoleIO_readinto_impl((winconsoleio *)self, cls, &buffer); exit: /* Cleanup for buffer */ @@ -279,9 +279,9 @@ static PyObject * _io__WindowsConsoleIO_readall_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_readall(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_readall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_readall_impl(self); + return _io__WindowsConsoleIO_readall_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -306,7 +306,7 @@ _io__WindowsConsoleIO_read_impl(winconsoleio *self, PyTypeObject *cls, Py_ssize_t size); static PyObject * -_io__WindowsConsoleIO_read(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_read(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -337,7 +337,7 @@ _io__WindowsConsoleIO_read(winconsoleio *self, PyTypeObject *cls, PyObject *cons goto exit; } skip_optional_posonly: - return_value = _io__WindowsConsoleIO_read_impl(self, cls, size); + return_value = _io__WindowsConsoleIO_read_impl((winconsoleio *)self, cls, size); exit: return return_value; @@ -364,7 +364,7 @@ _io__WindowsConsoleIO_write_impl(winconsoleio *self, PyTypeObject *cls, Py_buffer *b); static PyObject * -_io__WindowsConsoleIO_write(winconsoleio *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_io__WindowsConsoleIO_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -391,7 +391,7 @@ _io__WindowsConsoleIO_write(winconsoleio *self, PyTypeObject *cls, PyObject *con if (PyObject_GetBuffer(args[0], &b, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _io__WindowsConsoleIO_write_impl(self, cls, &b); + return_value = _io__WindowsConsoleIO_write_impl((winconsoleio *)self, cls, &b); exit: /* Cleanup for b */ @@ -419,9 +419,9 @@ static PyObject * _io__WindowsConsoleIO_isatty_impl(winconsoleio *self); static PyObject * -_io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) +_io__WindowsConsoleIO_isatty(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _io__WindowsConsoleIO_isatty_impl(self); + return _io__WindowsConsoleIO_isatty_impl((winconsoleio *)self); } #endif /* defined(HAVE_WINDOWS_CONSOLE_IO) */ @@ -461,4 +461,4 @@ _io__WindowsConsoleIO_isatty(winconsoleio *self, PyObject *Py_UNUSED(ignored)) #ifndef _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #define _IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF #endif /* !defined(_IO__WINDOWSCONSOLEIO_ISATTY_METHODDEF) */ -/*[clinic end generated code: output=78e0f6abf4de2d6d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=edc47f5c49589045 input=a9049054013a1b77]*/ diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c index 65e8d97aa8ac19..d1a71298a9087f 100644 --- a/Modules/_io/stringio.c +++ b/Modules/_io/stringio.c @@ -30,7 +30,7 @@ typedef struct { _PyUnicodeWriter is destroyed. */ int state; - _PyUnicodeWriter writer; + PyUnicodeWriter *writer; char ok; /* initialized? */ char closed; @@ -129,14 +129,18 @@ resize_buffer(stringio *self, size_t size) static PyObject * make_intermediate(stringio *self) { - PyObject *intermediate = _PyUnicodeWriter_Finish(&self->writer); + PyObject *intermediate = PyUnicodeWriter_Finish(self->writer); + self->writer = NULL; self->state = STATE_REALIZED; if (intermediate == NULL) return NULL; - _PyUnicodeWriter_Init(&self->writer); - self->writer.overallocate = 1; - if (_PyUnicodeWriter_WriteStr(&self->writer, intermediate)) { + self->writer = PyUnicodeWriter_Create(0); + if (self->writer == NULL) { + Py_DECREF(intermediate); + return NULL; + } + if (PyUnicodeWriter_WriteStr(self->writer, intermediate)) { Py_DECREF(intermediate); return NULL; } @@ -155,7 +159,8 @@ realize(stringio *self) assert(self->state == STATE_ACCUMULATING); self->state = STATE_REALIZED; - intermediate = _PyUnicodeWriter_Finish(&self->writer); + intermediate = PyUnicodeWriter_Finish(self->writer); + self->writer = NULL; if (intermediate == NULL) return -1; @@ -217,7 +222,7 @@ write_str(stringio *self, PyObject *obj) if (self->state == STATE_ACCUMULATING) { if (self->string_size == self->pos) { - if (_PyUnicodeWriter_WriteStr(&self->writer, decoded)) + if (PyUnicodeWriter_WriteStr(self->writer, decoded)) goto fail; goto success; } @@ -437,7 +442,7 @@ stringio_iternext(stringio *self) /*[clinic input] @critical_section _io.StringIO.truncate - pos as size: Py_ssize_t(accept={int, NoneType}, c_default="self->pos") = None + pos as size: Py_ssize_t(accept={int, NoneType}, c_default="((stringio *)self)->pos") = None / Truncate size to pos. @@ -449,7 +454,7 @@ Returns the new absolute position. static PyObject * _io_StringIO_truncate_impl(stringio *self, Py_ssize_t size) -/*[clinic end generated code: output=eb3aef8e06701365 input=461b872dce238452]*/ +/*[clinic end generated code: output=eb3aef8e06701365 input=fa8a6c98bb2ba780]*/ { CHECK_INITIALIZED(self); CHECK_CLOSED(self); @@ -577,7 +582,8 @@ _io_StringIO_close_impl(stringio *self) /* Free up some memory */ if (resize_buffer(self, 0) < 0) return NULL; - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); + self->writer = NULL; Py_CLEAR(self->readnl); Py_CLEAR(self->writenl); Py_CLEAR(self->decoder); @@ -615,7 +621,7 @@ stringio_dealloc(stringio *self) PyMem_Free(self->buf); self->buf = NULL; } - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); (void)stringio_clear(self); if (self->weakreflist != NULL) { PyObject_ClearWeakRefs((PyObject *) self); @@ -699,7 +705,8 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, self->ok = 0; - _PyUnicodeWriter_Dealloc(&self->writer); + PyUnicodeWriter_Discard(self->writer); + self->writer = NULL; Py_CLEAR(self->readnl); Py_CLEAR(self->writenl); Py_CLEAR(self->decoder); @@ -754,8 +761,10 @@ _io_StringIO___init___impl(stringio *self, PyObject *value, /* Empty stringio object, we can start by accumulating */ if (resize_buffer(self, 0) < 0) return -1; - _PyUnicodeWriter_Init(&self->writer); - self->writer.overallocate = 1; + self->writer = PyUnicodeWriter_Create(0); + if (self->writer == NULL) { + return -1; + } self->state = STATE_ACCUMULATING; } self->pos = 0; diff --git a/Modules/_json.c b/Modules/_json.c index a99abbe72bf7a0..31a5e935e13ad9 100644 --- a/Modules/_json.c +++ b/Modules/_json.c @@ -353,6 +353,13 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) { return tpl; } +static inline int +_PyUnicodeWriter_IsEmpty(PyUnicodeWriter *writer_pub) +{ + _PyUnicodeWriter *writer = (_PyUnicodeWriter*)writer_pub; + return (writer->pos == 0); +} + static PyObject * scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr) { @@ -371,9 +378,10 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next const void *buf; int kind; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto bail; + } len = PyUnicode_GET_LENGTH(pystr); buf = PyUnicode_DATA(pystr); @@ -404,11 +412,12 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next if (c == '"') { // Fast path for simple case. - if (writer.buffer == NULL) { + if (_PyUnicodeWriter_IsEmpty(writer)) { PyObject *ret = PyUnicode_Substring(pystr, end, next); if (ret == NULL) { goto bail; } + PyUnicodeWriter_Discard(writer); *next_end_ptr = next + 1;; return ret; } @@ -420,7 +429,7 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next /* Pick up this chunk if it's not zero length */ if (next != end) { - if (_PyUnicodeWriter_WriteSubstring(&writer, pystr, end, next) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, pystr, end, next) < 0) { goto bail; } } @@ -511,18 +520,18 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next end -= 6; } } - if (_PyUnicodeWriter_WriteChar(&writer, c) < 0) { + if (PyUnicodeWriter_WriteChar(writer, c) < 0) { goto bail; } } - rval = _PyUnicodeWriter_Finish(&writer); + rval = PyUnicodeWriter_Finish(writer); *next_end_ptr = end; return rval; bail: *next_end_ptr = -1; - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Modules/_multiprocessing/clinic/semaphore.c.h b/Modules/_multiprocessing/clinic/semaphore.c.h index 2702c3369c76ed..e789137ec1e013 100644 --- a/Modules/_multiprocessing/clinic/semaphore.c.h +++ b/Modules/_multiprocessing/clinic/semaphore.c.h @@ -25,7 +25,7 @@ _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj); static PyObject * -_multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multiprocessing_SemLock_acquire(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -78,7 +78,7 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_acquire_impl(self, blocking, timeout_obj); + return_value = _multiprocessing_SemLock_acquire_impl((SemLockObject *)self, blocking, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -102,12 +102,12 @@ static PyObject * _multiprocessing_SemLock_release_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock_release(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_release_impl(self); + return_value = _multiprocessing_SemLock_release_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -131,7 +131,7 @@ _multiprocessing_SemLock_acquire_impl(SemLockObject *self, int blocking, PyObject *timeout_obj); static PyObject * -_multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multiprocessing_SemLock_acquire(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -184,7 +184,7 @@ _multiprocessing_SemLock_acquire(SemLockObject *self, PyObject *const *args, Py_ timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_acquire_impl(self, blocking, timeout_obj); + return_value = _multiprocessing_SemLock_acquire_impl((SemLockObject *)self, blocking, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -208,12 +208,12 @@ static PyObject * _multiprocessing_SemLock_release_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock_release(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock_release_impl(self); + return_value = _multiprocessing_SemLock_release_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -358,12 +358,12 @@ static PyObject * _multiprocessing_SemLock__count_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__count(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__count(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock__count_impl(self); + return_value = _multiprocessing_SemLock__count_impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -386,9 +386,9 @@ static PyObject * _multiprocessing_SemLock__is_mine_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__is_mine(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__is_mine(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__is_mine_impl(self); + return _multiprocessing_SemLock__is_mine_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -408,9 +408,9 @@ static PyObject * _multiprocessing_SemLock__get_value_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__get_value(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__get_value(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__get_value_impl(self); + return _multiprocessing_SemLock__get_value_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -430,9 +430,9 @@ static PyObject * _multiprocessing_SemLock__is_zero_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__is_zero(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__is_zero(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__is_zero_impl(self); + return _multiprocessing_SemLock__is_zero_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -452,9 +452,9 @@ static PyObject * _multiprocessing_SemLock__after_fork_impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock__after_fork(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock__after_fork(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multiprocessing_SemLock__after_fork_impl(self); + return _multiprocessing_SemLock__after_fork_impl((SemLockObject *)self); } #endif /* defined(HAVE_MP_SEMAPHORE) */ @@ -474,12 +474,12 @@ static PyObject * _multiprocessing_SemLock___enter___impl(SemLockObject *self); static PyObject * -_multiprocessing_SemLock___enter__(SemLockObject *self, PyObject *Py_UNUSED(ignored)) +_multiprocessing_SemLock___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock___enter___impl(self); + return_value = _multiprocessing_SemLock___enter___impl((SemLockObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -504,7 +504,7 @@ _multiprocessing_SemLock___exit___impl(SemLockObject *self, PyObject *exc_value, PyObject *exc_tb); static PyObject * -_multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py_ssize_t nargs) +_multiprocessing_SemLock___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type = Py_None; @@ -528,7 +528,7 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py exc_tb = args[2]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _multiprocessing_SemLock___exit___impl(self, exc_type, exc_value, exc_tb); + return_value = _multiprocessing_SemLock___exit___impl((SemLockObject *)self, exc_type, exc_value, exc_tb); Py_END_CRITICAL_SECTION(); exit: @@ -576,4 +576,4 @@ _multiprocessing_SemLock___exit__(SemLockObject *self, PyObject *const *args, Py #ifndef _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #define _MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF #endif /* !defined(_MULTIPROCESSING_SEMLOCK___EXIT___METHODDEF) */ -/*[clinic end generated code: output=9023d3e48a24afd2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e28d0fdbfefd1235 input=a9049054013a1b77]*/ diff --git a/Modules/_multiprocessing/semaphore.c b/Modules/_multiprocessing/semaphore.c index 9eef7c25636899..036db2cd4c6c85 100644 --- a/Modules/_multiprocessing/semaphore.c +++ b/Modules/_multiprocessing/semaphore.c @@ -28,6 +28,8 @@ typedef struct { char *name; } SemLockObject; +#define _SemLockObject_CAST(op) ((SemLockObject *)(op)) + /*[python input] class SEM_HANDLE_converter(CConverter): type = "SEM_HANDLE" @@ -576,8 +578,9 @@ _multiprocessing_SemLock__rebuild_impl(PyTypeObject *type, SEM_HANDLE handle, } static void -semlock_dealloc(SemLockObject* self) +semlock_dealloc(PyObject *op) { + SemLockObject *self = _SemLockObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); if (self->handle != SEM_FAILED) @@ -718,7 +721,7 @@ _multiprocessing_SemLock___exit___impl(SemLockObject *self, } static int -semlock_traverse(SemLockObject *s, visitproc visit, void *arg) +semlock_traverse(PyObject *s, visitproc visit, void *arg) { Py_VISIT(Py_TYPE(s)); return 0; diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 599b5f92c2a1f7..a6cfb2deeb23c1 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -2159,8 +2159,10 @@ save_long(PicklerObject *self, PyObject *obj) unsigned char *pdata; char header[5]; int i; - int sign = _PyLong_Sign(obj); + int sign; + assert(PyLong_Check(obj)); + (void)PyLong_GetSign(obj, &sign); if (sign == 0) { header[0] = LONG1; header[1] = 0; /* It's 0 -- an empty bytestring. */ diff --git a/Modules/_sqlite/clinic/blob.c.h b/Modules/_sqlite/clinic/blob.c.h index b95ba948aaf97f..921e7cbd7ffcab 100644 --- a/Modules/_sqlite/clinic/blob.c.h +++ b/Modules/_sqlite/clinic/blob.c.h @@ -17,9 +17,9 @@ static PyObject * blob_close_impl(pysqlite_Blob *self); static PyObject * -blob_close(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_close_impl(self); + return blob_close_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_read__doc__, @@ -42,7 +42,7 @@ static PyObject * blob_read_impl(pysqlite_Blob *self, int length); static PyObject * -blob_read(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int length = -1; @@ -58,7 +58,7 @@ blob_read(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = blob_read_impl(self, length); + return_value = blob_read_impl((pysqlite_Blob *)self, length); exit: return return_value; @@ -80,7 +80,7 @@ static PyObject * blob_write_impl(pysqlite_Blob *self, Py_buffer *data); static PyObject * -blob_write(pysqlite_Blob *self, PyObject *arg) +blob_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -88,7 +88,7 @@ blob_write(pysqlite_Blob *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = blob_write_impl(self, &data); + return_value = blob_write_impl((pysqlite_Blob *)self, &data); exit: /* Cleanup for data */ @@ -116,7 +116,7 @@ static PyObject * blob_seek_impl(pysqlite_Blob *self, int offset, int origin); static PyObject * -blob_seek(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_seek(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int offset; @@ -137,7 +137,7 @@ blob_seek(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = blob_seek_impl(self, offset, origin); + return_value = blob_seek_impl((pysqlite_Blob *)self, offset, origin); exit: return return_value; @@ -156,9 +156,9 @@ static PyObject * blob_tell_impl(pysqlite_Blob *self); static PyObject * -blob_tell(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_tell(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_tell_impl(self); + return blob_tell_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_enter__doc__, @@ -174,9 +174,9 @@ static PyObject * blob_enter_impl(pysqlite_Blob *self); static PyObject * -blob_enter(pysqlite_Blob *self, PyObject *Py_UNUSED(ignored)) +blob_enter(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return blob_enter_impl(self); + return blob_enter_impl((pysqlite_Blob *)self); } PyDoc_STRVAR(blob_exit__doc__, @@ -193,7 +193,7 @@ blob_exit_impl(pysqlite_Blob *self, PyObject *type, PyObject *val, PyObject *tb); static PyObject * -blob_exit(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) +blob_exit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *type; @@ -206,9 +206,9 @@ blob_exit(pysqlite_Blob *self, PyObject *const *args, Py_ssize_t nargs) type = args[0]; val = args[1]; tb = args[2]; - return_value = blob_exit_impl(self, type, val, tb); + return_value = blob_exit_impl((pysqlite_Blob *)self, type, val, tb); exit: return return_value; } -/*[clinic end generated code: output=31abd55660e0c5af input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f03f4ba622b67ae0 input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/connection.c.h b/Modules/_sqlite/clinic/connection.c.h index 42eb6eb2f12554..82fba44eb1b074 100644 --- a/Modules/_sqlite/clinic/connection.c.h +++ b/Modules/_sqlite/clinic/connection.c.h @@ -182,7 +182,7 @@ static PyObject * pysqlite_connection_cursor_impl(pysqlite_Connection *self, PyObject *factory); static PyObject * -pysqlite_connection_cursor(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_cursor(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -224,7 +224,7 @@ pysqlite_connection_cursor(pysqlite_Connection *self, PyObject *const *args, Py_ } factory = args[0]; skip_optional_pos: - return_value = pysqlite_connection_cursor_impl(self, factory); + return_value = pysqlite_connection_cursor_impl((pysqlite_Connection *)self, factory); exit: return return_value; @@ -255,7 +255,7 @@ blobopen_impl(pysqlite_Connection *self, const char *table, const char *col, sqlite3_int64 row, int readonly, const char *name); static PyObject * -blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +blobopen(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -351,7 +351,7 @@ blobopen(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyO goto exit; } skip_optional_kwonly: - return_value = blobopen_impl(self, table, col, row, readonly, name); + return_value = blobopen_impl((pysqlite_Connection *)self, table, col, row, readonly, name); exit: return return_value; @@ -372,9 +372,9 @@ static PyObject * pysqlite_connection_close_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_close(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_close_impl(self); + return pysqlite_connection_close_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_commit__doc__, @@ -392,9 +392,9 @@ static PyObject * pysqlite_connection_commit_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_commit(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_commit(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_commit_impl(self); + return pysqlite_connection_commit_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_rollback__doc__, @@ -412,9 +412,9 @@ static PyObject * pysqlite_connection_rollback_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_rollback(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_rollback(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_rollback_impl(self); + return pysqlite_connection_rollback_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_create_function__doc__, @@ -449,7 +449,7 @@ pysqlite_connection_create_function_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_create_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_function(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -525,7 +525,7 @@ pysqlite_connection_create_function(pysqlite_Connection *self, PyTypeObject *cls goto exit; } skip_optional_kwonly: - return_value = pysqlite_connection_create_function_impl(self, cls, name, narg, func, deterministic); + return_value = pysqlite_connection_create_function_impl((pysqlite_Connection *)self, cls, name, narg, func, deterministic); exit: return return_value; @@ -557,7 +557,7 @@ create_window_function_impl(pysqlite_Connection *self, PyTypeObject *cls, PyObject *aggregate_class); static PyObject * -create_window_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +create_window_function(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -601,7 +601,7 @@ create_window_function(pysqlite_Connection *self, PyTypeObject *cls, PyObject *c goto exit; } aggregate_class = args[2]; - return_value = create_window_function_impl(self, cls, name, num_params, aggregate_class); + return_value = create_window_function_impl((pysqlite_Connection *)self, cls, name, num_params, aggregate_class); exit: return return_value; @@ -642,7 +642,7 @@ pysqlite_connection_create_aggregate_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_create_aggregate(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_aggregate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -708,7 +708,7 @@ pysqlite_connection_create_aggregate(pysqlite_Connection *self, PyTypeObject *cl goto exit; } aggregate_class = args[2]; - return_value = pysqlite_connection_create_aggregate_impl(self, cls, name, n_arg, aggregate_class); + return_value = pysqlite_connection_create_aggregate_impl((pysqlite_Connection *)self, cls, name, n_arg, aggregate_class); exit: return return_value; @@ -745,7 +745,7 @@ pysqlite_connection_set_authorizer_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_authorizer(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_authorizer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -792,7 +792,7 @@ pysqlite_connection_set_authorizer(pysqlite_Connection *self, PyTypeObject *cls, } } callable = args[0]; - return_value = pysqlite_connection_set_authorizer_impl(self, cls, callable); + return_value = pysqlite_connection_set_authorizer_impl((pysqlite_Connection *)self, cls, callable); exit: return return_value; @@ -839,7 +839,7 @@ pysqlite_connection_set_progress_handler_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_progress_handler(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_progress_handler(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -891,7 +891,7 @@ pysqlite_connection_set_progress_handler(pysqlite_Connection *self, PyTypeObject if (n == -1 && PyErr_Occurred()) { goto exit; } - return_value = pysqlite_connection_set_progress_handler_impl(self, cls, callable, n); + return_value = pysqlite_connection_set_progress_handler_impl((pysqlite_Connection *)self, cls, callable, n); exit: return return_value; @@ -928,7 +928,7 @@ pysqlite_connection_set_trace_callback_impl(pysqlite_Connection *self, #endif static PyObject * -pysqlite_connection_set_trace_callback(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_set_trace_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -975,7 +975,7 @@ pysqlite_connection_set_trace_callback(pysqlite_Connection *self, PyTypeObject * } } callable = args[0]; - return_value = pysqlite_connection_set_trace_callback_impl(self, cls, callable); + return_value = pysqlite_connection_set_trace_callback_impl((pysqlite_Connection *)self, cls, callable); exit: return return_value; @@ -997,7 +997,7 @@ pysqlite_connection_enable_load_extension_impl(pysqlite_Connection *self, int onoff); static PyObject * -pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *arg) +pysqlite_connection_enable_load_extension(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int onoff; @@ -1006,7 +1006,7 @@ pysqlite_connection_enable_load_extension(pysqlite_Connection *self, PyObject *a if (onoff < 0) { goto exit; } - return_value = pysqlite_connection_enable_load_extension_impl(self, onoff); + return_value = pysqlite_connection_enable_load_extension_impl((pysqlite_Connection *)self, onoff); exit: return return_value; @@ -1031,7 +1031,7 @@ pysqlite_connection_load_extension_impl(pysqlite_Connection *self, const char *entrypoint); static PyObject * -pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_load_extension(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1104,7 +1104,7 @@ pysqlite_connection_load_extension(pysqlite_Connection *self, PyObject *const *a goto exit; } skip_optional_kwonly: - return_value = pysqlite_connection_load_extension_impl(self, extension_name, entrypoint); + return_value = pysqlite_connection_load_extension_impl((pysqlite_Connection *)self, extension_name, entrypoint); exit: return return_value; @@ -1126,7 +1126,7 @@ pysqlite_connection_execute_impl(pysqlite_Connection *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_connection_execute(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_execute(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -1145,7 +1145,7 @@ pysqlite_connection_execute(pysqlite_Connection *self, PyObject *const *args, Py } parameters = args[1]; skip_optional: - return_value = pysqlite_connection_execute_impl(self, sql, parameters); + return_value = pysqlite_connection_execute_impl((pysqlite_Connection *)self, sql, parameters); exit: return return_value; @@ -1165,7 +1165,7 @@ pysqlite_connection_executemany_impl(pysqlite_Connection *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_connection_executemany(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_executemany(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -1180,7 +1180,7 @@ pysqlite_connection_executemany(pysqlite_Connection *self, PyObject *const *args } sql = args[0]; parameters = args[1]; - return_value = pysqlite_connection_executemany_impl(self, sql, parameters); + return_value = pysqlite_connection_executemany_impl((pysqlite_Connection *)self, sql, parameters); exit: return return_value; @@ -1208,9 +1208,9 @@ static PyObject * pysqlite_connection_interrupt_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_interrupt(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_interrupt(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_interrupt_impl(self); + return pysqlite_connection_interrupt_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_iterdump__doc__, @@ -1230,7 +1230,7 @@ pysqlite_connection_iterdump_impl(pysqlite_Connection *self, PyObject *filter); static PyObject * -pysqlite_connection_iterdump(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_iterdump(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1272,7 +1272,7 @@ pysqlite_connection_iterdump(pysqlite_Connection *self, PyObject *const *args, P } filter = args[0]; skip_optional_kwonly: - return_value = pysqlite_connection_iterdump_impl(self, filter); + return_value = pysqlite_connection_iterdump_impl((pysqlite_Connection *)self, filter); exit: return return_value; @@ -1295,7 +1295,7 @@ pysqlite_connection_backup_impl(pysqlite_Connection *self, double sleep); static PyObject * -pysqlite_connection_backup(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_backup(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1388,7 +1388,7 @@ pysqlite_connection_backup(pysqlite_Connection *self, PyObject *const *args, Py_ } } skip_optional_kwonly: - return_value = pysqlite_connection_backup_impl(self, target, pages, progress, name, sleep); + return_value = pysqlite_connection_backup_impl((pysqlite_Connection *)self, target, pages, progress, name, sleep); exit: return return_value; @@ -1410,7 +1410,7 @@ pysqlite_connection_create_collation_impl(pysqlite_Connection *self, PyObject *callable); static PyObject * -pysqlite_connection_create_collation(pysqlite_Connection *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_connection_create_collation(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1449,7 +1449,7 @@ pysqlite_connection_create_collation(pysqlite_Connection *self, PyTypeObject *cl goto exit; } callable = args[1]; - return_value = pysqlite_connection_create_collation_impl(self, cls, name, callable); + return_value = pysqlite_connection_create_collation_impl((pysqlite_Connection *)self, cls, name, callable); exit: return return_value; @@ -1478,7 +1478,7 @@ static PyObject * serialize_impl(pysqlite_Connection *self, const char *name); static PyObject * -serialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +serialize(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1532,7 +1532,7 @@ serialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, Py goto exit; } skip_optional_kwonly: - return_value = serialize_impl(self, name); + return_value = serialize_impl((pysqlite_Connection *)self, name); exit: return return_value; @@ -1568,7 +1568,7 @@ deserialize_impl(pysqlite_Connection *self, Py_buffer *data, const char *name); static PyObject * -deserialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +deserialize(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1638,7 +1638,7 @@ deserialize(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_kwonly: - return_value = deserialize_impl(self, &data, name); + return_value = deserialize_impl((pysqlite_Connection *)self, &data, name); exit: /* Cleanup for data */ @@ -1666,9 +1666,9 @@ static PyObject * pysqlite_connection_enter_impl(pysqlite_Connection *self); static PyObject * -pysqlite_connection_enter(pysqlite_Connection *self, PyObject *Py_UNUSED(ignored)) +pysqlite_connection_enter(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_connection_enter_impl(self); + return pysqlite_connection_enter_impl((pysqlite_Connection *)self); } PyDoc_STRVAR(pysqlite_connection_exit__doc__, @@ -1687,7 +1687,7 @@ pysqlite_connection_exit_impl(pysqlite_Connection *self, PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); static PyObject * -pysqlite_connection_exit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_connection_exit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type; @@ -1700,7 +1700,7 @@ pysqlite_connection_exit(pysqlite_Connection *self, PyObject *const *args, Py_ss exc_type = args[0]; exc_value = args[1]; exc_tb = args[2]; - return_value = pysqlite_connection_exit_impl(self, exc_type, exc_value, exc_tb); + return_value = pysqlite_connection_exit_impl((pysqlite_Connection *)self, exc_type, exc_value, exc_tb); exit: return return_value; @@ -1729,7 +1729,7 @@ static PyObject * setlimit_impl(pysqlite_Connection *self, int category, int limit); static PyObject * -setlimit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +setlimit(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int category; @@ -1746,7 +1746,7 @@ setlimit(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) if (limit == -1 && PyErr_Occurred()) { goto exit; } - return_value = setlimit_impl(self, category, limit); + return_value = setlimit_impl((pysqlite_Connection *)self, category, limit); exit: return return_value; @@ -1768,7 +1768,7 @@ static PyObject * getlimit_impl(pysqlite_Connection *self, int category); static PyObject * -getlimit(pysqlite_Connection *self, PyObject *arg) +getlimit(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int category; @@ -1777,7 +1777,7 @@ getlimit(pysqlite_Connection *self, PyObject *arg) if (category == -1 && PyErr_Occurred()) { goto exit; } - return_value = getlimit_impl(self, category); + return_value = getlimit_impl((pysqlite_Connection *)self, category); exit: return return_value; @@ -1799,7 +1799,7 @@ static PyObject * setconfig_impl(pysqlite_Connection *self, int op, int enable); static PyObject * -setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) +setconfig(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int op; @@ -1820,7 +1820,7 @@ setconfig(pysqlite_Connection *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = setconfig_impl(self, op, enable); + return_value = setconfig_impl((pysqlite_Connection *)self, op, enable); exit: return return_value; @@ -1842,7 +1842,7 @@ static int getconfig_impl(pysqlite_Connection *self, int op); static PyObject * -getconfig(pysqlite_Connection *self, PyObject *arg) +getconfig(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int op; @@ -1852,7 +1852,7 @@ getconfig(pysqlite_Connection *self, PyObject *arg) if (op == -1 && PyErr_Occurred()) { goto exit; } - _return_value = getconfig_impl(self, op); + _return_value = getconfig_impl((pysqlite_Connection *)self, op); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1881,4 +1881,4 @@ getconfig(pysqlite_Connection *self, PyObject *arg) #ifndef DESERIALIZE_METHODDEF #define DESERIALIZE_METHODDEF #endif /* !defined(DESERIALIZE_METHODDEF) */ -/*[clinic end generated code: output=a8fd19301c7390cc input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c59effb407b8ea4d input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/cursor.c.h b/Modules/_sqlite/clinic/cursor.c.h index ca7823cf5aef5b..590e429e9139f1 100644 --- a/Modules/_sqlite/clinic/cursor.c.h +++ b/Modules/_sqlite/clinic/cursor.c.h @@ -52,7 +52,7 @@ pysqlite_cursor_execute_impl(pysqlite_Cursor *self, PyObject *sql, PyObject *parameters); static PyObject * -pysqlite_cursor_execute(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_execute(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -71,7 +71,7 @@ pysqlite_cursor_execute(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t } parameters = args[1]; skip_optional: - return_value = pysqlite_cursor_execute_impl(self, sql, parameters); + return_value = pysqlite_cursor_execute_impl((pysqlite_Cursor *)self, sql, parameters); exit: return return_value; @@ -91,7 +91,7 @@ pysqlite_cursor_executemany_impl(pysqlite_Cursor *self, PyObject *sql, PyObject *seq_of_parameters); static PyObject * -pysqlite_cursor_executemany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_executemany(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sql; @@ -106,7 +106,7 @@ pysqlite_cursor_executemany(pysqlite_Cursor *self, PyObject *const *args, Py_ssi } sql = args[0]; seq_of_parameters = args[1]; - return_value = pysqlite_cursor_executemany_impl(self, sql, seq_of_parameters); + return_value = pysqlite_cursor_executemany_impl((pysqlite_Cursor *)self, sql, seq_of_parameters); exit: return return_value; @@ -126,7 +126,7 @@ pysqlite_cursor_executescript_impl(pysqlite_Cursor *self, const char *sql_script); static PyObject * -pysqlite_cursor_executescript(pysqlite_Cursor *self, PyObject *arg) +pysqlite_cursor_executescript(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *sql_script; @@ -144,7 +144,7 @@ pysqlite_cursor_executescript(pysqlite_Cursor *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pysqlite_cursor_executescript_impl(self, sql_script); + return_value = pysqlite_cursor_executescript_impl((pysqlite_Cursor *)self, sql_script); exit: return return_value; @@ -163,9 +163,9 @@ static PyObject * pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_fetchone(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_fetchone(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_fetchone_impl(self); + return pysqlite_cursor_fetchone_impl((pysqlite_Cursor *)self); } PyDoc_STRVAR(pysqlite_cursor_fetchmany__doc__, @@ -184,7 +184,7 @@ static PyObject * pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows); static PyObject * -pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pysqlite_cursor_fetchmany(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -214,7 +214,7 @@ pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize #undef KWTUPLE PyObject *argsbuf[1]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - int maxrows = self->arraysize; + int maxrows = ((pysqlite_Cursor *)self)->arraysize; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, /*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf); @@ -229,7 +229,7 @@ pysqlite_cursor_fetchmany(pysqlite_Cursor *self, PyObject *const *args, Py_ssize goto exit; } skip_optional_pos: - return_value = pysqlite_cursor_fetchmany_impl(self, maxrows); + return_value = pysqlite_cursor_fetchmany_impl((pysqlite_Cursor *)self, maxrows); exit: return return_value; @@ -248,9 +248,9 @@ static PyObject * pysqlite_cursor_fetchall_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_fetchall(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_fetchall(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_fetchall_impl(self); + return pysqlite_cursor_fetchall_impl((pysqlite_Cursor *)self); } PyDoc_STRVAR(pysqlite_cursor_setinputsizes__doc__, @@ -276,7 +276,7 @@ pysqlite_cursor_setoutputsize_impl(pysqlite_Cursor *self, PyObject *size, PyObject *column); static PyObject * -pysqlite_cursor_setoutputsize(pysqlite_Cursor *self, PyObject *const *args, Py_ssize_t nargs) +pysqlite_cursor_setoutputsize(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *size; @@ -291,7 +291,7 @@ pysqlite_cursor_setoutputsize(pysqlite_Cursor *self, PyObject *const *args, Py_s } column = args[1]; skip_optional: - return_value = pysqlite_cursor_setoutputsize_impl(self, size, column); + return_value = pysqlite_cursor_setoutputsize_impl((pysqlite_Cursor *)self, size, column); exit: return return_value; @@ -310,8 +310,8 @@ static PyObject * pysqlite_cursor_close_impl(pysqlite_Cursor *self); static PyObject * -pysqlite_cursor_close(pysqlite_Cursor *self, PyObject *Py_UNUSED(ignored)) +pysqlite_cursor_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_cursor_close_impl(self); + return pysqlite_cursor_close_impl((pysqlite_Cursor *)self); } -/*[clinic end generated code: output=f0804afc5f8646c1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=82620ca7622b547c input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/clinic/row.c.h b/Modules/_sqlite/clinic/row.c.h index e8d1dbf2ba8bc9..068906744e445f 100644 --- a/Modules/_sqlite/clinic/row.c.h +++ b/Modules/_sqlite/clinic/row.c.h @@ -52,8 +52,8 @@ static PyObject * pysqlite_row_keys_impl(pysqlite_Row *self); static PyObject * -pysqlite_row_keys(pysqlite_Row *self, PyObject *Py_UNUSED(ignored)) +pysqlite_row_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pysqlite_row_keys_impl(self); + return pysqlite_row_keys_impl((pysqlite_Row *)self); } -/*[clinic end generated code: output=788bf817acc02b8e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6c1acbb48f386468 input=a9049054013a1b77]*/ diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c index 0fbd408f18cf6a..24e97fcf1897e9 100644 --- a/Modules/_sqlite/cursor.c +++ b/Modules/_sqlite/cursor.c @@ -1155,7 +1155,7 @@ pysqlite_cursor_fetchone_impl(pysqlite_Cursor *self) /*[clinic input] _sqlite3.Cursor.fetchmany as pysqlite_cursor_fetchmany - size as maxrows: int(c_default='self->arraysize') = 1 + size as maxrows: int(c_default='((pysqlite_Cursor *)self)->arraysize') = 1 The default value is set by the Cursor.arraysize attribute. Fetches several rows from the resultset. @@ -1163,7 +1163,7 @@ Fetches several rows from the resultset. static PyObject * pysqlite_cursor_fetchmany_impl(pysqlite_Cursor *self, int maxrows) -/*[clinic end generated code: output=a8ef31fea64d0906 input=c26e6ca3f34debd0]*/ +/*[clinic end generated code: output=a8ef31fea64d0906 input=035dbe44a1005bf2]*/ { PyObject* row; PyObject* list; diff --git a/Modules/_sre/clinic/sre.c.h b/Modules/_sre/clinic/sre.c.h index 87e4785a428468..cfc6813f37f012 100644 --- a/Modules/_sre/clinic/sre.c.h +++ b/Modules/_sre/clinic/sre.c.h @@ -179,7 +179,7 @@ _sre_SRE_Pattern_match_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_match(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_match(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -252,7 +252,7 @@ _sre_SRE_Pattern_match(PatternObject *self, PyTypeObject *cls, PyObject *const * endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_match_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_match_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -273,7 +273,7 @@ _sre_SRE_Pattern_fullmatch_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_fullmatch(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_fullmatch(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -346,7 +346,7 @@ _sre_SRE_Pattern_fullmatch(PatternObject *self, PyTypeObject *cls, PyObject *con endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_fullmatch_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_fullmatch_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -369,7 +369,7 @@ _sre_SRE_Pattern_search_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_search(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_search(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -442,7 +442,7 @@ _sre_SRE_Pattern_search(PatternObject *self, PyTypeObject *cls, PyObject *const endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_search_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_search_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -462,7 +462,7 @@ _sre_SRE_Pattern_findall_impl(PatternObject *self, PyObject *string, Py_ssize_t pos, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_findall(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -535,7 +535,7 @@ _sre_SRE_Pattern_findall(PatternObject *self, PyObject *const *args, Py_ssize_t endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_findall_impl(self, string, pos, endpos); + return_value = _sre_SRE_Pattern_findall_impl((PatternObject *)self, string, pos, endpos); exit: return return_value; @@ -558,7 +558,7 @@ _sre_SRE_Pattern_finditer_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_finditer(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_finditer(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -631,7 +631,7 @@ _sre_SRE_Pattern_finditer(PatternObject *self, PyTypeObject *cls, PyObject *cons endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_finditer_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_finditer_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -651,7 +651,7 @@ _sre_SRE_Pattern_scanner_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t endpos); static PyObject * -_sre_SRE_Pattern_scanner(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_scanner(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -724,7 +724,7 @@ _sre_SRE_Pattern_scanner(PatternObject *self, PyTypeObject *cls, PyObject *const endpos = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_scanner_impl(self, cls, string, pos, endpos); + return_value = _sre_SRE_Pattern_scanner_impl((PatternObject *)self, cls, string, pos, endpos); exit: return return_value; @@ -744,7 +744,7 @@ _sre_SRE_Pattern_split_impl(PatternObject *self, PyObject *string, Py_ssize_t maxsplit); static PyObject * -_sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -799,7 +799,7 @@ _sre_SRE_Pattern_split(PatternObject *self, PyObject *const *args, Py_ssize_t na maxsplit = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_split_impl(self, string, maxsplit); + return_value = _sre_SRE_Pattern_split_impl((PatternObject *)self, string, maxsplit); exit: return return_value; @@ -819,7 +819,7 @@ _sre_SRE_Pattern_sub_impl(PatternObject *self, PyTypeObject *cls, PyObject *repl, PyObject *string, Py_ssize_t count); static PyObject * -_sre_SRE_Pattern_sub(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_sub(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -876,7 +876,7 @@ _sre_SRE_Pattern_sub(PatternObject *self, PyTypeObject *cls, PyObject *const *ar count = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_sub_impl(self, cls, repl, string, count); + return_value = _sre_SRE_Pattern_sub_impl((PatternObject *)self, cls, repl, string, count); exit: return return_value; @@ -897,7 +897,7 @@ _sre_SRE_Pattern_subn_impl(PatternObject *self, PyTypeObject *cls, Py_ssize_t count); static PyObject * -_sre_SRE_Pattern_subn(PatternObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Pattern_subn(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -954,7 +954,7 @@ _sre_SRE_Pattern_subn(PatternObject *self, PyTypeObject *cls, PyObject *const *a count = ival; } skip_optional_pos: - return_value = _sre_SRE_Pattern_subn_impl(self, cls, repl, string, count); + return_value = _sre_SRE_Pattern_subn_impl((PatternObject *)self, cls, repl, string, count); exit: return return_value; @@ -972,9 +972,9 @@ static PyObject * _sre_SRE_Pattern___copy___impl(PatternObject *self); static PyObject * -_sre_SRE_Pattern___copy__(PatternObject *self, PyObject *Py_UNUSED(ignored)) +_sre_SRE_Pattern___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sre_SRE_Pattern___copy___impl(self); + return _sre_SRE_Pattern___copy___impl((PatternObject *)self); } PyDoc_STRVAR(_sre_SRE_Pattern___deepcopy____doc__, @@ -1001,7 +1001,7 @@ _sre_SRE_Pattern__fail_after_impl(PatternObject *self, int count, PyObject *exception); static PyObject * -_sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Pattern__fail_after(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int count; @@ -1015,7 +1015,7 @@ _sre_SRE_Pattern__fail_after(PatternObject *self, PyObject *const *args, Py_ssiz goto exit; } exception = args[1]; - return_value = _sre_SRE_Pattern__fail_after_impl(self, count, exception); + return_value = _sre_SRE_Pattern__fail_after_impl((PatternObject *)self, count, exception); exit: return return_value; @@ -1169,7 +1169,7 @@ static PyObject * _sre_SRE_Match_expand_impl(MatchObject *self, PyObject *template); static PyObject * -_sre_SRE_Match_expand(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_expand(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1206,7 +1206,7 @@ _sre_SRE_Match_expand(MatchObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } template = args[0]; - return_value = _sre_SRE_Match_expand_impl(self, template); + return_value = _sre_SRE_Match_expand_impl((MatchObject *)self, template); exit: return return_value; @@ -1228,7 +1228,7 @@ static PyObject * _sre_SRE_Match_groups_impl(MatchObject *self, PyObject *default_value); static PyObject * -_sre_SRE_Match_groups(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_groups(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1270,7 +1270,7 @@ _sre_SRE_Match_groups(MatchObject *self, PyObject *const *args, Py_ssize_t nargs } default_value = args[0]; skip_optional_pos: - return_value = _sre_SRE_Match_groups_impl(self, default_value); + return_value = _sre_SRE_Match_groups_impl((MatchObject *)self, default_value); exit: return return_value; @@ -1292,7 +1292,7 @@ static PyObject * _sre_SRE_Match_groupdict_impl(MatchObject *self, PyObject *default_value); static PyObject * -_sre_SRE_Match_groupdict(MatchObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Match_groupdict(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1334,7 +1334,7 @@ _sre_SRE_Match_groupdict(MatchObject *self, PyObject *const *args, Py_ssize_t na } default_value = args[0]; skip_optional_pos: - return_value = _sre_SRE_Match_groupdict_impl(self, default_value); + return_value = _sre_SRE_Match_groupdict_impl((MatchObject *)self, default_value); exit: return return_value; @@ -1353,7 +1353,7 @@ static Py_ssize_t _sre_SRE_Match_start_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_start(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_start(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1367,7 +1367,7 @@ _sre_SRE_Match_start(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - _return_value = _sre_SRE_Match_start_impl(self, group); + _return_value = _sre_SRE_Match_start_impl((MatchObject *)self, group); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1390,7 +1390,7 @@ static Py_ssize_t _sre_SRE_Match_end_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_end(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_end(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1404,7 +1404,7 @@ _sre_SRE_Match_end(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - _return_value = _sre_SRE_Match_end_impl(self, group); + _return_value = _sre_SRE_Match_end_impl((MatchObject *)self, group); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -1427,7 +1427,7 @@ static PyObject * _sre_SRE_Match_span_impl(MatchObject *self, PyObject *group); static PyObject * -_sre_SRE_Match_span(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) +_sre_SRE_Match_span(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *group = NULL; @@ -1440,7 +1440,7 @@ _sre_SRE_Match_span(MatchObject *self, PyObject *const *args, Py_ssize_t nargs) } group = args[0]; skip_optional: - return_value = _sre_SRE_Match_span_impl(self, group); + return_value = _sre_SRE_Match_span_impl((MatchObject *)self, group); exit: return return_value; @@ -1458,9 +1458,9 @@ static PyObject * _sre_SRE_Match___copy___impl(MatchObject *self); static PyObject * -_sre_SRE_Match___copy__(MatchObject *self, PyObject *Py_UNUSED(ignored)) +_sre_SRE_Match___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sre_SRE_Match___copy___impl(self); + return _sre_SRE_Match___copy___impl((MatchObject *)self); } PyDoc_STRVAR(_sre_SRE_Match___deepcopy____doc__, @@ -1483,13 +1483,13 @@ static PyObject * _sre_SRE_Scanner_match_impl(ScannerObject *self, PyTypeObject *cls); static PyObject * -_sre_SRE_Scanner_match(ScannerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Scanner_match(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "match() takes no arguments"); return NULL; } - return _sre_SRE_Scanner_match_impl(self, cls); + return _sre_SRE_Scanner_match_impl((ScannerObject *)self, cls); } PyDoc_STRVAR(_sre_SRE_Scanner_search__doc__, @@ -1504,16 +1504,16 @@ static PyObject * _sre_SRE_Scanner_search_impl(ScannerObject *self, PyTypeObject *cls); static PyObject * -_sre_SRE_Scanner_search(ScannerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_sre_SRE_Scanner_search(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "search() takes no arguments"); return NULL; } - return _sre_SRE_Scanner_search_impl(self, cls); + return _sre_SRE_Scanner_search_impl((ScannerObject *)self, cls); } #ifndef _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF #define _SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF #endif /* !defined(_SRE_SRE_PATTERN__FAIL_AFTER_METHODDEF) */ -/*[clinic end generated code: output=f8cb77f2261f0b2e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=3654103c87eb4830 input=a9049054013a1b77]*/ diff --git a/Modules/_ssl.c b/Modules/_ssl.c index a7d0f509aed3d1..c15a582a92aa4a 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -965,13 +965,13 @@ newPySSLSocket(PySSLContext *sslctx, PySocketSockObject *sock, } } if (owner && owner != Py_None) { - if (_ssl__SSLSocket_owner_set(self, owner, NULL) == -1) { + if (_ssl__SSLSocket_owner_set((PyObject *)self, owner, NULL) < 0) { Py_DECREF(self); return NULL; } } if (session && session != Py_None) { - if (_ssl__SSLSocket_session_set(self, session, NULL) == -1) { + if (_ssl__SSLSocket_session_set((PyObject *)self, session, NULL) < 0) { Py_DECREF(self); return NULL; } diff --git a/Modules/_ssl/clinic/cert.c.h b/Modules/_ssl/clinic/cert.c.h index 19559442cd9b88..3e0c5b405092db 100644 --- a/Modules/_ssl/clinic/cert.c.h +++ b/Modules/_ssl/clinic/cert.c.h @@ -20,7 +20,7 @@ static PyObject * _ssl_Certificate_public_bytes_impl(PySSLCertificate *self, int format); static PyObject * -_ssl_Certificate_public_bytes(PySSLCertificate *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl_Certificate_public_bytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -65,7 +65,7 @@ _ssl_Certificate_public_bytes(PySSLCertificate *self, PyObject *const *args, Py_ goto exit; } skip_optional_pos: - return_value = _ssl_Certificate_public_bytes_impl(self, format); + return_value = _ssl_Certificate_public_bytes_impl((PySSLCertificate *)self, format); exit: return return_value; @@ -83,8 +83,8 @@ static PyObject * _ssl_Certificate_get_info_impl(PySSLCertificate *self); static PyObject * -_ssl_Certificate_get_info(PySSLCertificate *self, PyObject *Py_UNUSED(ignored)) +_ssl_Certificate_get_info(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _ssl_Certificate_get_info_impl(self); + return _ssl_Certificate_get_info_impl((PySSLCertificate *)self); } -/*[clinic end generated code: output=e5fa354db5fc56b4 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=51365b498b975ee0 input=a9049054013a1b77]*/ diff --git a/Modules/_testcapi/mem.c b/Modules/_testcapi/mem.c index ab4ad934644c38..ecae5ba26226a6 100644 --- a/Modules/_testcapi/mem.c +++ b/Modules/_testcapi/mem.c @@ -557,8 +557,9 @@ tracemalloc_untrack(PyObject *self, PyObject *args) { unsigned int domain; PyObject *ptr_obj; + int release_gil = 0; - if (!PyArg_ParseTuple(args, "IO", &domain, &ptr_obj)) { + if (!PyArg_ParseTuple(args, "IO|i", &domain, &ptr_obj, &release_gil)) { return NULL; } void *ptr = PyLong_AsVoidPtr(ptr_obj); @@ -566,7 +567,15 @@ tracemalloc_untrack(PyObject *self, PyObject *args) return NULL; } - int res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + int res; + if (release_gil) { + Py_BEGIN_ALLOW_THREADS + res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + Py_END_ALLOW_THREADS + } + else { + res = PyTraceMalloc_Untrack(domain, (uintptr_t)ptr); + } if (res < 0) { PyErr_SetString(PyExc_RuntimeError, "PyTraceMalloc_Untrack error"); return NULL; diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index a0a1f8af6710a3..c405a352ed74a1 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -3059,52 +3059,6 @@ function_set_closure(PyObject *self, PyObject *args) Py_RETURN_NONE; } -static PyObject * -check_pyimport_addmodule(PyObject *self, PyObject *args) -{ - const char *name; - if (!PyArg_ParseTuple(args, "s", &name)) { - return NULL; - } - - // test PyImport_AddModuleRef() - PyObject *module = PyImport_AddModuleRef(name); - if (module == NULL) { - return NULL; - } - assert(PyModule_Check(module)); - // module is a strong reference - - // test PyImport_AddModule() - PyObject *module2 = PyImport_AddModule(name); - if (module2 == NULL) { - goto error; - } - assert(PyModule_Check(module2)); - assert(module2 == module); - // module2 is a borrowed ref - - // test PyImport_AddModuleObject() - PyObject *name_obj = PyUnicode_FromString(name); - if (name_obj == NULL) { - goto error; - } - PyObject *module3 = PyImport_AddModuleObject(name_obj); - Py_DECREF(name_obj); - if (module3 == NULL) { - goto error; - } - assert(PyModule_Check(module3)); - assert(module3 == module); - // module3 is a borrowed ref - - return module; - -error: - Py_DECREF(module); - return NULL; -} - static PyObject * test_weakref_capi(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) @@ -3435,6 +3389,105 @@ code_offset_to_line(PyObject* self, PyObject* const* args, Py_ssize_t nargsf) return PyLong_FromInt32(PyCode_Addr2Line(code, offset)); } + +static void +tracemalloc_track_race_thread(void *data) +{ + PyTraceMalloc_Track(123, 10, 1); + PyTraceMalloc_Untrack(123, 10); + + PyThread_type_lock lock = (PyThread_type_lock)data; + PyThread_release_lock(lock); +} + +// gh-128679: Test fix for tracemalloc.stop() race condition +static PyObject * +tracemalloc_track_race(PyObject *self, PyObject *args) +{ +#define NTHREAD 50 + PyObject *tracemalloc = NULL; + PyObject *stop = NULL; + PyThread_type_lock locks[NTHREAD]; + memset(locks, 0, sizeof(locks)); + + // Call tracemalloc.start() + tracemalloc = PyImport_ImportModule("tracemalloc"); + if (tracemalloc == NULL) { + goto error; + } + PyObject *start = PyObject_GetAttrString(tracemalloc, "start"); + if (start == NULL) { + goto error; + } + PyObject *res = PyObject_CallNoArgs(start); + Py_DECREF(start); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + stop = PyObject_GetAttrString(tracemalloc, "stop"); + Py_CLEAR(tracemalloc); + if (stop == NULL) { + goto error; + } + + // Start threads + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = PyThread_allocate_lock(); + if (!lock) { + PyErr_NoMemory(); + goto error; + } + locks[i] = lock; + PyThread_acquire_lock(lock, 1); + + unsigned long thread; + thread = PyThread_start_new_thread(tracemalloc_track_race_thread, + (void*)lock); + if (thread == (unsigned long)-1) { + PyErr_SetString(PyExc_RuntimeError, "can't start new thread"); + goto error; + } + } + + // Call tracemalloc.stop() while threads are running + res = PyObject_CallNoArgs(stop); + Py_CLEAR(stop); + if (res == NULL) { + goto error; + } + Py_DECREF(res); + + // Wait until threads complete with the GIL released + Py_BEGIN_ALLOW_THREADS + for (size_t i = 0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_acquire_lock(lock, 1); + PyThread_release_lock(lock); + } + Py_END_ALLOW_THREADS + + // Free threads locks + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + PyThread_free_lock(lock); + } + Py_RETURN_NONE; + +error: + Py_CLEAR(tracemalloc); + Py_CLEAR(stop); + for (size_t i=0; i < NTHREAD; i++) { + PyThread_type_lock lock = locks[i]; + if (lock) { + PyThread_free_lock(lock); + } + } + return NULL; +#undef NTHREAD +} + static PyMethodDef TestMethods[] = { {"set_errno", set_errno, METH_VARARGS}, {"test_config", test_config, METH_NOARGS}, @@ -3570,7 +3623,6 @@ static PyMethodDef TestMethods[] = { {"function_set_kw_defaults", function_set_kw_defaults, METH_VARARGS, NULL}, {"function_get_closure", function_get_closure, METH_O, NULL}, {"function_set_closure", function_set_closure, METH_VARARGS, NULL}, - {"check_pyimport_addmodule", check_pyimport_addmodule, METH_VARARGS}, {"test_weakref_capi", test_weakref_capi, METH_NOARGS}, {"function_set_warning", function_set_warning, METH_NOARGS}, {"test_critical_sections", test_critical_sections, METH_NOARGS}, @@ -3578,6 +3630,7 @@ static PyMethodDef TestMethods[] = { {"type_freeze", type_freeze, METH_VARARGS}, {"test_atexit", test_atexit, METH_NOARGS}, {"code_offset_to_line", _PyCFunction_CAST(code_offset_to_line), METH_FASTCALL}, + {"tracemalloc_track_race", tracemalloc_track_race, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; @@ -4050,6 +4103,61 @@ static PyTypeObject ContainerNoGC_type = { .tp_new = ContainerNoGC_new, }; +/* Manually allocated heap type */ + +typedef struct { + PyObject_HEAD + PyObject *dict; +} ManualHeapType; + +static int +ManualHeapType_traverse(PyObject *self, visitproc visit, void *arg) +{ + ManualHeapType *mht = (ManualHeapType *)self; + Py_VISIT(mht->dict); + return 0; +} + +static void +ManualHeapType_dealloc(PyObject *self) +{ + ManualHeapType *mht = (ManualHeapType *)self; + PyObject_GC_UnTrack(self); + Py_XDECREF(mht->dict); + PyTypeObject *type = Py_TYPE(self); + Py_TYPE(self)->tp_free(self); + Py_DECREF(type); +} + +static PyObject * +create_manual_heap_type(void) +{ + // gh-128923: Ensure that a heap type allocated through PyType_Type.tp_alloc + // with minimal initialization works correctly. + PyHeapTypeObject *heap_type = (PyHeapTypeObject *)PyType_Type.tp_alloc(&PyType_Type, 0); + if (heap_type == NULL) { + return NULL; + } + PyTypeObject* type = &heap_type->ht_type; + type->tp_basicsize = sizeof(ManualHeapType); + type->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HEAPTYPE | Py_TPFLAGS_HAVE_GC; + type->tp_new = PyType_GenericNew; + type->tp_name = "ManualHeapType"; + type->tp_dictoffset = offsetof(ManualHeapType, dict); + type->tp_traverse = ManualHeapType_traverse; + type->tp_dealloc = ManualHeapType_dealloc; + heap_type->ht_name = PyUnicode_FromString(type->tp_name); + if (!heap_type->ht_name) { + Py_DECREF(type); + return NULL; + } + heap_type->ht_qualname = Py_NewRef(heap_type->ht_name); + if (PyType_Ready(type) < 0) { + Py_DECREF(type); + return NULL; + } + return (PyObject *)type; +} static struct PyModuleDef _testcapimodule = { PyModuleDef_HEAD_INIT, @@ -4184,6 +4292,15 @@ PyInit__testcapi(void) (PyObject *) &ContainerNoGC_type) < 0) return NULL; + PyObject *manual_heap_type = create_manual_heap_type(); + if (manual_heap_type == NULL) { + return NULL; + } + if (PyModule_Add(m, "ManualHeapType", manual_heap_type) < 0) { + return NULL; + } + + /* Include tests from the _testcapi/ directory */ if (_PyTestCapi_Init_Vectorcall(m) < 0) { return NULL; diff --git a/Modules/_testexternalinspection.c b/Modules/_testexternalinspection.c index 0807d1e47b6736..5a28c0e266226b 100644 --- a/Modules/_testexternalinspection.c +++ b/Modules/_testexternalinspection.c @@ -59,10 +59,30 @@ # define HAVE_PROCESS_VM_READV 0 #endif +struct _Py_AsyncioModuleDebugOffsets { + struct _asyncio_task_object { + uint64_t size; + uint64_t task_name; + uint64_t task_awaited_by; + uint64_t task_is_task; + uint64_t task_awaited_by_is_set; + uint64_t task_coro; + } asyncio_task_object; + struct _asyncio_thread_state { + uint64_t size; + uint64_t asyncio_running_loop; + uint64_t asyncio_running_task; + } asyncio_thread_state; +}; + #if defined(__APPLE__) && TARGET_OS_OSX -static void* -analyze_macho64(mach_port_t proc_ref, void* base, void* map) -{ +static uintptr_t +return_section_address( + const char* section, + mach_port_t proc_ref, + uintptr_t base, + void* map +) { struct mach_header_64* hdr = (struct mach_header_64*)map; int ncmds = hdr->ncmds; @@ -72,35 +92,40 @@ analyze_macho64(mach_port_t proc_ref, void* base, void* map) mach_vm_size_t size = 0; mach_msg_type_number_t count = sizeof(vm_region_basic_info_data_64_t); mach_vm_address_t address = (mach_vm_address_t)base; - vm_region_basic_info_data_64_t region_info; + vm_region_basic_info_data_64_t r_info; mach_port_t object_name; + uintptr_t vmaddr = 0; for (int i = 0; cmd_cnt < 2 && i < ncmds; i++) { + if (cmd->cmd == LC_SEGMENT_64 && strcmp(cmd->segname, "__TEXT") == 0) { + vmaddr = cmd->vmaddr; + } if (cmd->cmd == LC_SEGMENT_64 && strcmp(cmd->segname, "__DATA") == 0) { while (cmd->filesize != size) { address += size; - if (mach_vm_region( - proc_ref, - &address, - &size, - VM_REGION_BASIC_INFO_64, - (vm_region_info_t)®ion_info, // cppcheck-suppress [uninitvar] - &count, - &object_name) - != KERN_SUCCESS) - { - PyErr_SetString(PyExc_RuntimeError, "Cannot get any more VM maps.\n"); - return NULL; + kern_return_t ret = mach_vm_region( + proc_ref, + &address, + &size, + VM_REGION_BASIC_INFO_64, + (vm_region_info_t)&r_info, // cppcheck-suppress [uninitvar] + &count, + &object_name + ); + if (ret != KERN_SUCCESS) { + PyErr_SetString( + PyExc_RuntimeError, "Cannot get any more VM maps.\n"); + return 0; } } - base = (void*)address - cmd->vmaddr; int nsects = cmd->nsects; - struct section_64* sec = - (struct section_64*)((void*)cmd + sizeof(struct segment_command_64)); + struct section_64* sec = (struct section_64*)( + (void*)cmd + sizeof(struct segment_command_64) + ); for (int j = 0; j < nsects; j++) { - if (strcmp(sec[j].sectname, "PyRuntime") == 0) { - return base + sec[j].addr; + if (strcmp(sec[j].sectname, section) == 0) { + return base + sec[j].addr - vmaddr; } } cmd_cnt++; @@ -108,33 +133,39 @@ analyze_macho64(mach_port_t proc_ref, void* base, void* map) cmd = (struct segment_command_64*)((void*)cmd + cmd->cmdsize); } - return NULL; + return 0; } -static void* -analyze_macho(char* path, void* base, mach_vm_size_t size, mach_port_t proc_ref) -{ +static uintptr_t +search_section_in_file( + const char* secname, + char* path, + uintptr_t base, + mach_vm_size_t size, + mach_port_t proc_ref +) { int fd = open(path, O_RDONLY); if (fd == -1) { PyErr_Format(PyExc_RuntimeError, "Cannot open binary %s\n", path); - return NULL; + return 0; } struct stat fs; if (fstat(fd, &fs) == -1) { - PyErr_Format(PyExc_RuntimeError, "Cannot get size of binary %s\n", path); + PyErr_Format( + PyExc_RuntimeError, "Cannot get size of binary %s\n", path); close(fd); - return NULL; + return 0; } void* map = mmap(0, fs.st_size, PROT_READ, MAP_SHARED, fd, 0); if (map == MAP_FAILED) { PyErr_Format(PyExc_RuntimeError, "Cannot map binary %s\n", path); close(fd); - return NULL; + return 0; } - void* result = NULL; + uintptr_t result = 0; struct mach_header_64* hdr = (struct mach_header_64*)map; switch (hdr->magic) { @@ -142,11 +173,13 @@ analyze_macho(char* path, void* base, mach_vm_size_t size, mach_port_t proc_ref) case MH_CIGAM: case FAT_MAGIC: case FAT_CIGAM: - PyErr_SetString(PyExc_RuntimeError, "32-bit Mach-O binaries are not supported"); + PyErr_SetString( + PyExc_RuntimeError, + "32-bit Mach-O binaries are not supported"); break; case MH_MAGIC_64: case MH_CIGAM_64: - result = analyze_macho64(proc_ref, base, map); + result = return_section_address(secname, proc_ref, base, map); break; default: PyErr_SetString(PyExc_RuntimeError, "Unknown Mach-O magic"); @@ -174,9 +207,8 @@ pid_to_task(pid_t pid) return task; } -static void* -get_py_runtime_macos(pid_t pid) -{ +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* substr) { mach_vm_address_t address = 0; mach_vm_size_t size = 0; mach_msg_type_number_t count = sizeof(vm_region_basic_info_data_64_t); @@ -186,12 +218,11 @@ get_py_runtime_macos(pid_t pid) mach_port_t proc_ref = pid_to_task(pid); if (proc_ref == 0) { PyErr_SetString(PyExc_PermissionError, "Cannot get task for PID"); - return NULL; + return 0; } int match_found = 0; char map_filename[MAXPATHLEN + 1]; - void* result_address = NULL; while (mach_vm_region( proc_ref, &address, @@ -199,15 +230,21 @@ get_py_runtime_macos(pid_t pid) VM_REGION_BASIC_INFO_64, (vm_region_info_t)®ion_info, &count, - &object_name) - == KERN_SUCCESS) + &object_name) == KERN_SUCCESS) { - int path_len = proc_regionfilename(pid, address, map_filename, MAXPATHLEN); + int path_len = proc_regionfilename( + pid, address, map_filename, MAXPATHLEN); if (path_len == 0) { address += size; continue; } + if ((region_info.protection & VM_PROT_READ) == 0 + || (region_info.protection & VM_PROT_EXECUTE) == 0) { + address += size; + continue; + } + char* filename = strrchr(map_filename, '/'); if (filename != NULL) { filename++; // Move past the '/' @@ -215,26 +252,20 @@ get_py_runtime_macos(pid_t pid) filename = map_filename; // No path, use the whole string } - // Check if the filename starts with "python" or "libpython" - if (!match_found && strncmp(filename, "python", 6) == 0) { - match_found = 1; - result_address = analyze_macho(map_filename, (void*)address, size, proc_ref); - } - if (strncmp(filename, "libpython", 9) == 0) { + if (!match_found && strncmp(filename, substr, strlen(substr)) == 0) { match_found = 1; - result_address = analyze_macho(map_filename, (void*)address, size, proc_ref); - break; + return search_section_in_file( + secname, map_filename, address, size, proc_ref); } address += size; } - return result_address; + return 0; } -#endif -#ifdef __linux__ -void* -find_python_map_start_address(pid_t pid, char* result_filename) +#elif defined(__linux__) +static uintptr_t +find_map_start_address(pid_t pid, char* result_filename, const char* map) { char maps_file_path[64]; sprintf(maps_file_path, "/proc/%d/maps", pid); @@ -242,17 +273,20 @@ find_python_map_start_address(pid_t pid, char* result_filename) FILE* maps_file = fopen(maps_file_path, "r"); if (maps_file == NULL) { PyErr_SetFromErrno(PyExc_OSError); - return NULL; + return 0; } int match_found = 0; char line[256]; char map_filename[PATH_MAX]; - void* result_address = 0; + uintptr_t result_address = 0; while (fgets(line, sizeof(line), maps_file) != NULL) { unsigned long start_address = 0; - sscanf(line, "%lx-%*x %*s %*s %*s %*s %s", &start_address, map_filename); + sscanf( + line, "%lx-%*x %*s %*s %*s %*s %s", + &start_address, map_filename + ); char* filename = strrchr(map_filename, '/'); if (filename != NULL) { filename++; // Move past the '/' @@ -260,15 +294,9 @@ find_python_map_start_address(pid_t pid, char* result_filename) filename = map_filename; // No path, use the whole string } - // Check if the filename starts with "python" or "libpython" - if (!match_found && strncmp(filename, "python", 6) == 0) { - match_found = 1; - result_address = (void*)start_address; - strcpy(result_filename, map_filename); - } - if (strncmp(filename, "libpython", 9) == 0) { + if (!match_found && strncmp(filename, map, strlen(map)) == 0) { match_found = 1; - result_address = (void*)start_address; + result_address = start_address; strcpy(result_filename, map_filename); break; } @@ -283,18 +311,17 @@ find_python_map_start_address(pid_t pid, char* result_filename) return result_address; } -void* -get_py_runtime_linux(pid_t pid) +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* map) { char elf_file[256]; - void* start_address = (void*)find_python_map_start_address(pid, elf_file); + uintptr_t start_address = find_map_start_address(pid, elf_file, map); if (start_address == 0) { - PyErr_SetString(PyExc_RuntimeError, "No memory map associated with python or libpython found"); - return NULL; + return 0; } - void* result = NULL; + uintptr_t result = 0; void* file_memory = NULL; int fd = open(elf_file, O_RDONLY); @@ -317,20 +344,29 @@ get_py_runtime_linux(pid_t pid) Elf_Ehdr* elf_header = (Elf_Ehdr*)file_memory; - Elf_Shdr* section_header_table = (Elf_Shdr*)(file_memory + elf_header->e_shoff); + Elf_Shdr* section_header_table = + (Elf_Shdr*)(file_memory + elf_header->e_shoff); Elf_Shdr* shstrtab_section = §ion_header_table[elf_header->e_shstrndx]; char* shstrtab = (char*)(file_memory + shstrtab_section->sh_offset); - Elf_Shdr* py_runtime_section = NULL; + Elf_Shdr* section = NULL; for (int i = 0; i < elf_header->e_shnum; i++) { - if (strcmp(".PyRuntime", shstrtab + section_header_table[i].sh_name) == 0) { - py_runtime_section = §ion_header_table[i]; + const char* this_sec_name = ( + shstrtab + + section_header_table[i].sh_name + + 1 // "+1" accounts for the leading "." + ); + + if (strcmp(secname, this_sec_name) == 0) { + section = §ion_header_table[i]; break; } } - Elf_Phdr* program_header_table = (Elf_Phdr*)(file_memory + elf_header->e_phoff); + Elf_Phdr* program_header_table = + (Elf_Phdr*)(file_memory + elf_header->e_phoff); + // Find the first PT_LOAD segment Elf_Phdr* first_load_segment = NULL; for (int i = 0; i < elf_header->e_phnum; i++) { @@ -340,10 +376,16 @@ get_py_runtime_linux(pid_t pid) } } - if (py_runtime_section != NULL && first_load_segment != NULL) { - uintptr_t elf_load_addr = first_load_segment->p_vaddr - - (first_load_segment->p_vaddr % first_load_segment->p_align); - result = start_address + py_runtime_section->sh_addr - elf_load_addr; + if (section != NULL && first_load_segment != NULL) { + uintptr_t elf_load_addr = + first_load_segment->p_vaddr - ( + first_load_segment->p_vaddr % first_load_segment->p_align + ); + result = start_address + (uintptr_t)section->sh_addr - elf_load_addr; + } + else { + PyErr_Format(PyExc_KeyError, + "cannot find map for section %s", secname); } exit: @@ -355,10 +397,37 @@ get_py_runtime_linux(pid_t pid) } return result; } +#else +static uintptr_t +search_map_for_section(pid_t pid, const char* secname, const char* map) +{ + return 0; +} #endif -ssize_t -read_memory(pid_t pid, void* remote_address, size_t len, void* dst) +static uintptr_t +get_py_runtime(pid_t pid) +{ + uintptr_t address = search_map_for_section(pid, "PyRuntime", "libpython"); + if (address == 0) { + address = search_map_for_section(pid, "PyRuntime", "python"); + } + return address; +} + +static uintptr_t +get_async_debug(pid_t pid) +{ + uintptr_t result = search_map_for_section(pid, "AsyncioDebug", "_asyncio.cpython"); + if (result == 0 && !PyErr_Occurred()) { + PyErr_SetString(PyExc_RuntimeError, "Cannot find AsyncioDebug section"); + } + return result; +} + + +static ssize_t +read_memory(pid_t pid, uintptr_t remote_address, size_t len, void* dst) { ssize_t total_bytes_read = 0; #if defined(__linux__) && HAVE_PROCESS_VM_READV @@ -394,13 +463,19 @@ read_memory(pid_t pid, void* remote_address, size_t len, void* dst) if (kr != KERN_SUCCESS) { switch (kr) { case KERN_PROTECTION_FAILURE: - PyErr_SetString(PyExc_PermissionError, "Not enough permissions to read memory"); + PyErr_SetString( + PyExc_PermissionError, + "Not enough permissions to read memory"); break; case KERN_INVALID_ARGUMENT: - PyErr_SetString(PyExc_PermissionError, "Invalid argument to mach_vm_read_overwrite"); + PyErr_SetString( + PyExc_PermissionError, + "Invalid argument to mach_vm_read_overwrite"); break; default: - PyErr_SetString(PyExc_RuntimeError, "Unknown error reading memory"); + PyErr_SetString( + PyExc_RuntimeError, + "Unknown error reading memory"); } return -1; } @@ -411,13 +486,22 @@ read_memory(pid_t pid, void* remote_address, size_t len, void* dst) return total_bytes_read; } -int -read_string(pid_t pid, _Py_DebugOffsets* debug_offsets, void* address, char* buffer, Py_ssize_t size) -{ +static int +read_string( + pid_t pid, + _Py_DebugOffsets* debug_offsets, + uintptr_t address, + char* buffer, + Py_ssize_t size +) { Py_ssize_t len; - ssize_t bytes_read = - read_memory(pid, address + debug_offsets->unicode_object.length, sizeof(Py_ssize_t), &len); - if (bytes_read == -1) { + ssize_t bytes_read = read_memory( + pid, + address + debug_offsets->unicode_object.length, + sizeof(Py_ssize_t), + &len + ); + if (bytes_read < 0) { return -1; } if (len >= size) { @@ -426,51 +510,652 @@ read_string(pid_t pid, _Py_DebugOffsets* debug_offsets, void* address, char* buf } size_t offset = debug_offsets->unicode_object.asciiobject_size; bytes_read = read_memory(pid, address + offset, len, buffer); - if (bytes_read == -1) { + if (bytes_read < 0) { return -1; } buffer[len] = '\0'; return 0; } -void* -get_py_runtime(pid_t pid) + +static inline int +read_ptr(pid_t pid, uintptr_t address, uintptr_t *ptr_addr) { -#if defined(__linux__) - return get_py_runtime_linux(pid); -#elif defined(__APPLE__) && TARGET_OS_OSX - return get_py_runtime_macos(pid); -#else - return NULL; -#endif + int bytes_read = read_memory(pid, address, sizeof(void*), ptr_addr); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static inline int +read_ssize_t(pid_t pid, uintptr_t address, Py_ssize_t *size) +{ + int bytes_read = read_memory(pid, address, sizeof(Py_ssize_t), size); + if (bytes_read < 0) { + return -1; + } + return 0; } static int -parse_code_object( - int pid, - PyObject* result, - struct _Py_DebugOffsets* offsets, - void* address, - void** previous_frame) +read_py_ptr(pid_t pid, uintptr_t address, uintptr_t *ptr_addr) +{ + if (read_ptr(pid, address, ptr_addr)) { + return -1; + } + *ptr_addr &= ~Py_TAG_BITS; + return 0; +} + +static int +read_char(pid_t pid, uintptr_t address, char *result) +{ + int bytes_read = read_memory(pid, address, sizeof(char), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_int(pid_t pid, uintptr_t address, int *result) +{ + int bytes_read = read_memory(pid, address, sizeof(int), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_unsigned_long(pid_t pid, uintptr_t address, unsigned long *result) +{ + int bytes_read = read_memory(pid, address, sizeof(unsigned long), result); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_pyobj(pid_t pid, uintptr_t address, PyObject *ptr_addr) { - void* address_of_function_name; - read_memory( + int bytes_read = read_memory(pid, address, sizeof(PyObject), ptr_addr); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static PyObject * +read_py_str( + pid_t pid, + _Py_DebugOffsets* debug_offsets, + uintptr_t address, + ssize_t max_len +) { + assert(max_len > 0); + + PyObject *result = NULL; + + char *buf = (char *)PyMem_RawMalloc(max_len); + if (buf == NULL) { + PyErr_NoMemory(); + return NULL; + } + if (read_string(pid, debug_offsets, address, buf, max_len)) { + goto err; + } + + result = PyUnicode_FromString(buf); + if (result == NULL) { + goto err; + } + + PyMem_RawFree(buf); + assert(result != NULL); + return result; + +err: + PyMem_RawFree(buf); + return NULL; +} + +static long +read_py_long(pid_t pid, _Py_DebugOffsets* offsets, uintptr_t address) +{ + unsigned int shift = PYLONG_BITS_IN_DIGIT; + + ssize_t size; + uintptr_t lv_tag; + + int bytes_read = read_memory( + pid, address + offsets->long_object.lv_tag, + sizeof(uintptr_t), + &lv_tag); + if (bytes_read < 0) { + return -1; + } + + int negative = (lv_tag & 3) == 2; + size = lv_tag >> 3; + + if (size == 0) { + return 0; + } + + digit *digits = (digit *)PyMem_RawMalloc(size * sizeof(digit)); + if (!digits) { + PyErr_NoMemory(); + return -1; + } + + bytes_read = read_memory( + pid, + address + offsets->long_object.ob_digit, + sizeof(digit) * size, + digits + ); + if (bytes_read < 0) { + goto error; + } + + long value = 0; + + // In theory this can overflow, but because of llvm/llvm-project#16778 + // we can't use __builtin_mul_overflow because it fails to link with + // __muloti4 on aarch64. In practice this is fine because all we're + // testing here are task numbers that would fit in a single byte. + for (ssize_t i = 0; i < size; ++i) { + long long factor = digits[i] * (1UL << (ssize_t)(shift * i)); + value += factor; + } + PyMem_RawFree(digits); + if (negative) { + value *= -1; + } + return value; +error: + PyMem_RawFree(digits); + return -1; +} + +static PyObject * +parse_task_name( + int pid, + _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address +) { + uintptr_t task_name_addr; + int err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_name, + &task_name_addr); + if (err) { + return NULL; + } + + // The task name can be a long or a string so we need to check the type + + PyObject task_name_obj; + err = read_pyobj( + pid, + task_name_addr, + &task_name_obj); + if (err) { + return NULL; + } + + unsigned long flags; + err = read_unsigned_long( + pid, + (uintptr_t)task_name_obj.ob_type + offsets->type_object.tp_flags, + &flags); + if (err) { + return NULL; + } + + if ((flags & Py_TPFLAGS_LONG_SUBCLASS)) { + long res = read_py_long(pid, offsets, task_name_addr); + if (res == -1) { + PyErr_SetString(PyExc_RuntimeError, "Failed to get task name"); + return NULL; + } + return PyUnicode_FromFormat("Task-%d", res); + } + + if(!(flags & Py_TPFLAGS_UNICODE_SUBCLASS)) { + PyErr_SetString(PyExc_RuntimeError, "Invalid task name object"); + return NULL; + } + + return read_py_str( + pid, + offsets, + task_name_addr, + 255 + ); +} + +static int +parse_coro_chain( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t coro_address, + PyObject *render_to +) { + assert((void*)coro_address != NULL); + + uintptr_t gen_type_addr; + int err = read_ptr( + pid, + coro_address + sizeof(void*), + &gen_type_addr); + if (err) { + return -1; + } + + uintptr_t gen_name_addr; + err = read_py_ptr( + pid, + coro_address + offsets->gen_object.gi_name, + &gen_name_addr); + if (err) { + return -1; + } + + PyObject *name = read_py_str( + pid, + offsets, + gen_name_addr, + 255 + ); + if (name == NULL) { + return -1; + } + + if (PyList_Append(render_to, name)) { + return -1; + } + Py_DECREF(name); + + int gi_frame_state; + err = read_int( + pid, + coro_address + offsets->gen_object.gi_frame_state, + &gi_frame_state); + + if (gi_frame_state == FRAME_SUSPENDED_YIELD_FROM) { + char owner; + err = read_char( pid, - (void*)(address + offsets->code_object.name), - sizeof(void*), - &address_of_function_name); + coro_address + offsets->gen_object.gi_iframe + + offsets->interpreter_frame.owner, + &owner + ); + if (err) { + return -1; + } + if (owner != FRAME_OWNED_BY_GENERATOR) { + PyErr_SetString( + PyExc_RuntimeError, + "generator doesn't own its frame \\_o_/"); + return -1; + } - if (address_of_function_name == NULL) { - PyErr_SetString(PyExc_RuntimeError, "No function name found"); + uintptr_t stackpointer_addr; + err = read_py_ptr( + pid, + coro_address + offsets->gen_object.gi_iframe + + offsets->interpreter_frame.stackpointer, + &stackpointer_addr); + if (err) { + return -1; + } + + if ((void*)stackpointer_addr != NULL) { + uintptr_t gi_await_addr; + err = read_py_ptr( + pid, + stackpointer_addr - sizeof(void*), + &gi_await_addr); + if (err) { + return -1; + } + + if ((void*)gi_await_addr != NULL) { + uintptr_t gi_await_addr_type_addr; + int err = read_ptr( + pid, + gi_await_addr + sizeof(void*), + &gi_await_addr_type_addr); + if (err) { + return -1; + } + + if (gen_type_addr == gi_await_addr_type_addr) { + /* This needs an explanation. We always start with parsing + native coroutine / generator frames. Ultimately they + are awaiting on something. That something can be + a native coroutine frame or... an iterator. + If it's the latter -- we can't continue building + our chain. So the condition to bail out of this is + to do that when the type of the current coroutine + doesn't match the type of whatever it points to + in its cr_await. + */ + err = parse_coro_chain( + pid, + offsets, + async_offsets, + gi_await_addr, + render_to + ); + if (err) { + return -1; + } + } + } + } + + } + + return 0; +} + + +static int +parse_task_awaited_by( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *awaited_by +); + + +static int +parse_task( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *render_to +) { + char is_task; + int err = read_char( + pid, + task_address + async_offsets->asyncio_task_object.task_is_task, + &is_task); + if (err) { + return -1; + } + + uintptr_t refcnt; + read_ptr(pid, task_address + sizeof(Py_ssize_t), &refcnt); + + PyObject* result = PyList_New(0); + if (result == NULL) { + return -1; + } + + PyObject *call_stack = PyList_New(0); + if (call_stack == NULL) { + goto err; + } + if (PyList_Append(result, call_stack)) { + Py_DECREF(call_stack); + goto err; + } + /* we can operate on a borrowed one to simplify cleanup */ + Py_DECREF(call_stack); + + if (is_task) { + PyObject *tn = parse_task_name( + pid, offsets, async_offsets, task_address); + if (tn == NULL) { + goto err; + } + if (PyList_Append(result, tn)) { + Py_DECREF(tn); + goto err; + } + Py_DECREF(tn); + + uintptr_t coro_addr; + err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_coro, + &coro_addr); + if (err) { + goto err; + } + + if ((void*)coro_addr != NULL) { + err = parse_coro_chain( + pid, + offsets, + async_offsets, + coro_addr, + call_stack + ); + if (err) { + goto err; + } + + if (PyList_Reverse(call_stack)) { + goto err; + } + } + } + + if (PyList_Append(render_to, result)) { + goto err; + } + Py_DECREF(result); + + PyObject *awaited_by = PyList_New(0); + if (awaited_by == NULL) { + goto err; + } + if (PyList_Append(result, awaited_by)) { + Py_DECREF(awaited_by); + goto err; + } + /* we can operate on a borrowed one to simplify cleanup */ + Py_DECREF(awaited_by); + + if (parse_task_awaited_by(pid, offsets, async_offsets, + task_address, awaited_by) + ) { + goto err; + } + + return 0; + +err: + Py_DECREF(result); + return -1; +} + +static int +parse_tasks_in_set( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t set_addr, + PyObject *awaited_by +) { + uintptr_t set_obj; + if (read_py_ptr( + pid, + set_addr, + &set_obj) + ) { + return -1; + } + + Py_ssize_t num_els; + if (read_ssize_t( + pid, + set_obj + offsets->set_object.used, + &num_els) + ) { + return -1; + } + + Py_ssize_t set_len; + if (read_ssize_t( + pid, + set_obj + offsets->set_object.mask, + &set_len) + ) { + return -1; + } + set_len++; // The set contains the `mask+1` element slots. + + uintptr_t table_ptr; + if (read_ptr( + pid, + set_obj + offsets->set_object.table, + &table_ptr) + ) { + return -1; + } + + Py_ssize_t i = 0; + Py_ssize_t els = 0; + while (i < set_len) { + uintptr_t key_addr; + if (read_py_ptr(pid, table_ptr, &key_addr)) { + return -1; + } + + if ((void*)key_addr != NULL) { + Py_ssize_t ref_cnt; + if (read_ssize_t(pid, table_ptr, &ref_cnt)) { + return -1; + } + + if (ref_cnt) { + // if 'ref_cnt=0' it's a set dummy marker + + if (parse_task( + pid, + offsets, + async_offsets, + key_addr, + awaited_by) + ) { + return -1; + } + + if (++els == num_els) { + break; + } + } + } + + table_ptr += sizeof(void*) * 2; + i++; + } + return 0; +} + + +static int +parse_task_awaited_by( + int pid, + struct _Py_DebugOffsets* offsets, + struct _Py_AsyncioModuleDebugOffsets* async_offsets, + uintptr_t task_address, + PyObject *awaited_by +) { + uintptr_t task_ab_addr; + int err = read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + &task_ab_addr); + if (err) { + return -1; + } + + if ((void*)task_ab_addr == NULL) { + return 0; + } + + char awaited_by_is_a_set; + err = read_char( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by_is_set, + &awaited_by_is_a_set); + if (err) { + return -1; + } + + if (awaited_by_is_a_set) { + if (parse_tasks_in_set( + pid, + offsets, + async_offsets, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + awaited_by) + ) { + return -1; + } + } else { + uintptr_t sub_task; + if (read_py_ptr( + pid, + task_address + async_offsets->asyncio_task_object.task_awaited_by, + &sub_task) + ) { + return -1; + } + + if (parse_task( + pid, + offsets, + async_offsets, + sub_task, + awaited_by) + ) { + return -1; + } + } + + return 0; +} + +static int +parse_code_object( + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame +) { + uintptr_t address_of_function_name; + int bytes_read = read_memory( + pid, + address + offsets->code_object.name, + sizeof(void*), + &address_of_function_name + ); + if (bytes_read < 0) { return -1; } - char function_name[256]; - if (read_string(pid, offsets, address_of_function_name, function_name, sizeof(function_name)) != 0) { + if ((void*)address_of_function_name == NULL) { + PyErr_SetString(PyExc_RuntimeError, "No function name found"); return -1; } - PyObject* py_function_name = PyUnicode_FromString(function_name); + PyObject* py_function_name = read_py_str( + pid, offsets, address_of_function_name, 256); if (py_function_name == NULL) { return -1; } @@ -486,54 +1171,283 @@ parse_code_object( static int parse_frame_object( - int pid, - PyObject* result, - struct _Py_DebugOffsets* offsets, - void* address, - void** previous_frame) -{ + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame +) { + int err; + + ssize_t bytes_read = read_memory( + pid, + address + offsets->interpreter_frame.previous, + sizeof(void*), + previous_frame + ); + if (bytes_read < 0) { + return -1; + } + + char owner; + if (read_char(pid, address + offsets->interpreter_frame.owner, &owner)) { + return -1; + } + + if (owner >= FRAME_OWNED_BY_INTERPRETER) { + return 0; + } + + uintptr_t address_of_code_object; + err = read_py_ptr( + pid, + address + offsets->interpreter_frame.executable, + &address_of_code_object + ); + if (err) { + return -1; + } + + if ((void*)address_of_code_object == NULL) { + return 0; + } + + return parse_code_object( + pid, result, offsets, address_of_code_object, previous_frame); +} + +static int +parse_async_frame_object( + int pid, + PyObject* result, + struct _Py_DebugOffsets* offsets, + uintptr_t address, + uintptr_t* previous_frame, + uintptr_t* code_object +) { + int err; + + ssize_t bytes_read = read_memory( + pid, + address + offsets->interpreter_frame.previous, + sizeof(void*), + previous_frame + ); + if (bytes_read < 0) { + return -1; + } + + char owner; + bytes_read = read_memory( + pid, address + offsets->interpreter_frame.owner, sizeof(char), &owner); + if (bytes_read < 0) { + return -1; + } + + if (owner == FRAME_OWNED_BY_CSTACK || owner == FRAME_OWNED_BY_INTERPRETER) { + return 0; // C frame + } + + if (owner != FRAME_OWNED_BY_GENERATOR + && owner != FRAME_OWNED_BY_THREAD) { + PyErr_Format(PyExc_RuntimeError, "Unhandled frame owner %d.\n", owner); + return -1; + } + + err = read_py_ptr( + pid, + address + offsets->interpreter_frame.executable, + code_object + ); + if (err) { + return -1; + } + + assert(code_object != NULL); + if ((void*)*code_object == NULL) { + return 0; + } + + if (parse_code_object( + pid, result, offsets, *code_object, previous_frame)) { + return -1; + } + + return 1; +} + +static int +read_offsets( + int pid, + uintptr_t *runtime_start_address, + _Py_DebugOffsets* debug_offsets +) { + *runtime_start_address = get_py_runtime(pid); + if ((void*)*runtime_start_address == NULL) { + if (!PyErr_Occurred()) { + PyErr_SetString( + PyExc_RuntimeError, "Failed to get .PyRuntime address"); + } + return -1; + } + size_t size = sizeof(struct _Py_DebugOffsets); + ssize_t bytes_read = read_memory( + pid, *runtime_start_address, size, debug_offsets); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +read_async_debug( + int pid, + struct _Py_AsyncioModuleDebugOffsets* async_debug +) { + uintptr_t async_debug_addr = get_async_debug(pid); + if (!async_debug_addr) { + return -1; + } + size_t size = sizeof(struct _Py_AsyncioModuleDebugOffsets); ssize_t bytes_read = read_memory( + pid, async_debug_addr, size, async_debug); + if (bytes_read < 0) { + return -1; + } + return 0; +} + +static int +find_running_frame( + int pid, + uintptr_t runtime_start_address, + _Py_DebugOffsets* local_debug_offsets, + uintptr_t *frame +) { + off_t interpreter_state_list_head = + local_debug_offsets->runtime_state.interpreters_head; + + uintptr_t address_of_interpreter_state; + int bytes_read = read_memory( pid, - (void*)(address + offsets->interpreter_frame.previous), + runtime_start_address + interpreter_state_list_head, sizeof(void*), - previous_frame); - if (bytes_read == -1) { + &address_of_interpreter_state); + if (bytes_read < 0) { return -1; } - char owner; - bytes_read = - read_memory(pid, (void*)(address + offsets->interpreter_frame.owner), sizeof(char), &owner); + if (address_of_interpreter_state == 0) { + PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + return -1; + } + + uintptr_t address_of_thread; + bytes_read = read_memory( + pid, + address_of_interpreter_state + + local_debug_offsets->interpreter_state.threads_head, + sizeof(void*), + &address_of_thread); if (bytes_read < 0) { return -1; } - if (owner == FRAME_OWNED_BY_CSTACK) { + // No Python frames are available for us (can happen at tear-down). + if ((void*)address_of_thread != NULL) { + int err = read_ptr( + pid, + address_of_thread + local_debug_offsets->thread_state.current_frame, + frame); + if (err) { + return -1; + } return 0; } - uintptr_t address_of_code_object; + *frame = (uintptr_t)NULL; + return 0; +} + +static int +find_running_task( + int pid, + uintptr_t runtime_start_address, + _Py_DebugOffsets *local_debug_offsets, + struct _Py_AsyncioModuleDebugOffsets *async_offsets, + uintptr_t *running_task_addr +) { + *running_task_addr = (uintptr_t)NULL; + + off_t interpreter_state_list_head = + local_debug_offsets->runtime_state.interpreters_head; + + uintptr_t address_of_interpreter_state; + int bytes_read = read_memory( + pid, + runtime_start_address + interpreter_state_list_head, + sizeof(void*), + &address_of_interpreter_state); + if (bytes_read < 0) { + return -1; + } + + if (address_of_interpreter_state == 0) { + PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + return -1; + } + + uintptr_t address_of_thread; bytes_read = read_memory( pid, - (void*)(address + offsets->interpreter_frame.executable), + address_of_interpreter_state + + local_debug_offsets->interpreter_state.threads_head, sizeof(void*), - &address_of_code_object); + &address_of_thread); + if (bytes_read < 0) { + return -1; + } + + uintptr_t address_of_running_loop; + // No Python frames are available for us (can happen at tear-down). + if ((void*)address_of_thread == NULL) { + return 0; + } + + bytes_read = read_py_ptr( + pid, + address_of_thread + + async_offsets->asyncio_thread_state.asyncio_running_loop, + &address_of_running_loop); if (bytes_read == -1) { return -1; } - if (address_of_code_object == 0) { + // no asyncio loop is now running + if ((void*)address_of_running_loop == NULL) { return 0; } - address_of_code_object &= ~Py_TAG_BITS; - return parse_code_object(pid, result, offsets, (void *)address_of_code_object, previous_frame); + + int err = read_ptr( + pid, + address_of_thread + + async_offsets->asyncio_thread_state.asyncio_running_task, + running_task_addr); + if (err) { + return -1; + } + + return 0; } static PyObject* get_stack_trace(PyObject* self, PyObject* args) { -#if (!defined(__linux__) && !defined(__APPLE__)) || (defined(__linux__) && !HAVE_PROCESS_VM_READV) - PyErr_SetString(PyExc_RuntimeError, "get_stack_trace is not supported on this platform"); +#if (!defined(__linux__) && !defined(__APPLE__)) || \ + (defined(__linux__) && !HAVE_PROCESS_VM_READV) + PyErr_SetString( + PyExc_RuntimeError, + "get_stack_trace is not supported on this platform"); return NULL; #endif int pid; @@ -542,88 +1456,205 @@ get_stack_trace(PyObject* self, PyObject* args) return NULL; } - void* runtime_start_address = get_py_runtime(pid); - if (runtime_start_address == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_RuntimeError, "Failed to get .PyRuntime address"); - } + uintptr_t runtime_start_address = get_py_runtime(pid); + struct _Py_DebugOffsets local_debug_offsets; + + if (read_offsets(pid, &runtime_start_address, &local_debug_offsets)) { return NULL; } - size_t size = sizeof(struct _Py_DebugOffsets); - struct _Py_DebugOffsets local_debug_offsets; - ssize_t bytes_read = read_memory(pid, runtime_start_address, size, &local_debug_offsets); - if (bytes_read == -1) { + uintptr_t address_of_current_frame; + if (find_running_frame( + pid, runtime_start_address, &local_debug_offsets, + &address_of_current_frame) + ) { return NULL; } - off_t interpreter_state_list_head = local_debug_offsets.runtime_state.interpreters_head; - void* address_of_interpreter_state; - bytes_read = read_memory( - pid, - (void*)(runtime_start_address + interpreter_state_list_head), - sizeof(void*), - &address_of_interpreter_state); - if (bytes_read == -1) { + PyObject* result = PyList_New(0); + if (result == NULL) { return NULL; } - if (address_of_interpreter_state == NULL) { - PyErr_SetString(PyExc_RuntimeError, "No interpreter state found"); + while ((void*)address_of_current_frame != NULL) { + if (parse_frame_object( + pid, + result, + &local_debug_offsets, + address_of_current_frame, + &address_of_current_frame) + < 0) + { + Py_DECREF(result); + return NULL; + } + } + + return result; +} + +static PyObject* +get_async_stack_trace(PyObject* self, PyObject* args) +{ +#if (!defined(__linux__) && !defined(__APPLE__)) || \ + (defined(__linux__) && !HAVE_PROCESS_VM_READV) + PyErr_SetString( + PyExc_RuntimeError, + "get_stack_trace is not supported on this platform"); + return NULL; +#endif + int pid; + + if (!PyArg_ParseTuple(args, "i", &pid)) { return NULL; } - void* address_of_thread; - bytes_read = read_memory( - pid, - (void*)(address_of_interpreter_state + local_debug_offsets.interpreter_state.threads_head), - sizeof(void*), - &address_of_thread); - if (bytes_read == -1) { + uintptr_t runtime_start_address = get_py_runtime(pid); + struct _Py_DebugOffsets local_debug_offsets; + + if (read_offsets(pid, &runtime_start_address, &local_debug_offsets)) { return NULL; } - PyObject* result = PyList_New(0); + struct _Py_AsyncioModuleDebugOffsets local_async_debug; + if (read_async_debug(pid, &local_async_debug)) { + return NULL; + } + + PyObject* result = PyList_New(1); if (result == NULL) { return NULL; } + PyObject* calls = PyList_New(0); + if (calls == NULL) { + return NULL; + } + if (PyList_SetItem(result, 0, calls)) { /* steals ref to 'calls' */ + Py_DECREF(result); + Py_DECREF(calls); + return NULL; + } - // No Python frames are available for us (can happen at tear-down). - if (address_of_thread != NULL) { - void* address_of_current_frame; - (void)read_memory( - pid, - (void*)(address_of_thread + local_debug_offsets.thread_state.current_frame), - sizeof(void*), - &address_of_current_frame); - while (address_of_current_frame != NULL) { - if (parse_frame_object( - pid, - result, - &local_debug_offsets, - address_of_current_frame, - &address_of_current_frame) - < 0) - { - Py_DECREF(result); - return NULL; - } + uintptr_t running_task_addr = (uintptr_t)NULL; + if (find_running_task( + pid, runtime_start_address, &local_debug_offsets, &local_async_debug, + &running_task_addr) + ) { + goto result_err; + } + + if ((void*)running_task_addr == NULL) { + PyErr_SetString(PyExc_RuntimeError, "No running task found"); + goto result_err; + } + + uintptr_t running_coro_addr; + if (read_py_ptr( + pid, + running_task_addr + local_async_debug.asyncio_task_object.task_coro, + &running_coro_addr + )) { + goto result_err; + } + + if ((void*)running_coro_addr == NULL) { + PyErr_SetString(PyExc_RuntimeError, "Running task coro is NULL"); + goto result_err; + } + + // note: genobject's gi_iframe is an embedded struct so the address to + // the offset leads directly to its first field: f_executable + uintptr_t address_of_running_task_code_obj; + if (read_py_ptr( + pid, + running_coro_addr + local_debug_offsets.gen_object.gi_iframe, + &address_of_running_task_code_obj + )) { + goto result_err; + } + + if ((void*)address_of_running_task_code_obj == NULL) { + PyErr_SetString(PyExc_RuntimeError, "Running task code object is NULL"); + goto result_err; + } + + uintptr_t address_of_current_frame; + if (find_running_frame( + pid, runtime_start_address, &local_debug_offsets, + &address_of_current_frame) + ) { + goto result_err; + } + + uintptr_t address_of_code_object; + while ((void*)address_of_current_frame != NULL) { + int res = parse_async_frame_object( + pid, + calls, + &local_debug_offsets, + address_of_current_frame, + &address_of_current_frame, + &address_of_code_object + ); + + if (res < 0) { + goto result_err; + } + + if (address_of_code_object == address_of_running_task_code_obj) { + break; } } + PyObject *tn = parse_task_name( + pid, &local_debug_offsets, &local_async_debug, running_task_addr); + if (tn == NULL) { + goto result_err; + } + if (PyList_Append(result, tn)) { + Py_DECREF(tn); + goto result_err; + } + Py_DECREF(tn); + + PyObject* awaited_by = PyList_New(0); + if (awaited_by == NULL) { + goto result_err; + } + if (PyList_Append(result, awaited_by)) { + Py_DECREF(awaited_by); + goto result_err; + } + Py_DECREF(awaited_by); + + if (parse_task_awaited_by( + pid, &local_debug_offsets, &local_async_debug, + running_task_addr, awaited_by) + ) { + goto result_err; + } + return result; + +result_err: + Py_DECREF(result); + return NULL; } + static PyMethodDef methods[] = { - {"get_stack_trace", get_stack_trace, METH_VARARGS, "Get the Python stack from a given PID"}, - {NULL, NULL, 0, NULL}, + {"get_stack_trace", get_stack_trace, METH_VARARGS, + "Get the Python stack from a given PID"}, + {"get_async_stack_trace", get_async_stack_trace, METH_VARARGS, + "Get the asyncio stack from a given PID"}, + {NULL, NULL, 0, NULL}, }; static struct PyModuleDef module = { - .m_base = PyModuleDef_HEAD_INIT, - .m_name = "_testexternalinspection", - .m_size = -1, - .m_methods = methods, + .m_base = PyModuleDef_HEAD_INIT, + .m_name = "_testexternalinspection", + .m_size = -1, + .m_methods = methods, }; PyMODINIT_FUNC @@ -636,7 +1667,8 @@ PyInit__testexternalinspection(void) #ifdef Py_GIL_DISABLED PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED); #endif - int rc = PyModule_AddIntConstant(mod, "PROCESS_VM_READV_SUPPORTED", HAVE_PROCESS_VM_READV); + int rc = PyModule_AddIntConstant( + mod, "PROCESS_VM_READV_SUPPORTED", HAVE_PROCESS_VM_READV); if (rc < 0) { Py_DECREF(mod); return NULL; diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 150d34d168f5e4..f3d234a7f9595e 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -25,10 +25,8 @@ #include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() #include "pycore_instruction_sequence.h" // _PyInstructionSequence_New() -#include "pycore_interp.h" // _PyInterpreterState_GetConfigCopy() -#include "pycore_long.h" // _PyLong_Sign() #include "pycore_object.h" // _PyObject_IsFreed() -#include "pycore_optimizer.h" // _Py_UopsSymbol, etc. +#include "pycore_optimizer.h" // JitOptSymbol, etc. #include "pycore_pathconfig.h" // _PyPathConfig_ClearGlobal() #include "pycore_pyerrors.h" // _PyErr_ChainExceptions1() #include "pycore_pylifecycle.h" // _PyInterpreterConfig_AsDict() @@ -318,41 +316,6 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) } -static PyObject * -test_get_config(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(args)) -{ - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - if (_PyInterpreterState_GetConfigCopy(&config) < 0) { - PyConfig_Clear(&config); - return NULL; - } - PyObject *dict = _PyConfig_AsDict(&config); - PyConfig_Clear(&config); - return dict; -} - - -static PyObject * -test_set_config(PyObject *Py_UNUSED(self), PyObject *dict) -{ - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - if (_PyConfig_FromDict(&config, dict) < 0) { - goto error; - } - if (_PyInterpreterState_SetConfig(&config) < 0) { - goto error; - } - PyConfig_Clear(&config); - Py_RETURN_NONE; - -error: - PyConfig_Clear(&config); - return NULL; -} - - static PyObject * test_reset_path_config(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(arg)) { @@ -989,12 +952,6 @@ get_co_framesize(PyObject *self, PyObject *arg) #ifdef _Py_TIER2 -static PyObject * -new_counter_optimizer(PyObject *self, PyObject *arg) -{ - return _PyOptimizer_NewCounter(); -} - static PyObject * new_uop_optimizer(PyObject *self, PyObject *arg) { @@ -1034,12 +991,6 @@ add_executor_dependency(PyObject *self, PyObject *args) if (!PyArg_ParseTuple(args, "OO", &exec, &obj)) { return NULL; } - /* No way to tell in general if exec is an executor, so we only accept - * counting_executor */ - if (strcmp(Py_TYPE(exec)->tp_name, "counting_executor")) { - PyErr_SetString(PyExc_TypeError, "argument must be a counting_executor"); - return NULL; - } _Py_Executor_DependsOn((_PyExecutorObject *)exec, obj); Py_RETURN_NONE; } @@ -1846,14 +1797,14 @@ _testinternalcapi_test_long_numbits_impl(PyObject *module) for (i = 0; i < Py_ARRAY_LENGTH(testcases); ++i) { uint64_t nbits; - int sign; + int sign = -7; PyObject *plong; plong = PyLong_FromLong(testcases[i].input); if (plong == NULL) return NULL; nbits = _PyLong_NumBits(plong); - sign = _PyLong_Sign(plong); + (void)PyLong_GetSign(plong, &sign); Py_DECREF(plong); if (nbits != testcases[i].nbits) @@ -1861,7 +1812,7 @@ _testinternalcapi_test_long_numbits_impl(PyObject *module) "wrong result for _PyLong_NumBits"); if (sign != testcases[i].sign) return raiseTestError("test_long_numbits", - "wrong result for _PyLong_Sign"); + "wrong result for PyLong_GetSign()"); } Py_RETURN_NONE; } @@ -2074,8 +2025,6 @@ static PyMethodDef module_functions[] = { {"test_popcount", test_popcount, METH_NOARGS}, {"test_bit_length", test_bit_length, METH_NOARGS}, {"test_hashtable", test_hashtable, METH_NOARGS}, - {"get_config", test_get_config, METH_NOARGS}, - {"set_config", test_set_config, METH_O}, {"reset_path_config", test_reset_path_config, METH_NOARGS}, {"test_edit_cost", test_edit_cost, METH_NOARGS}, {"test_bytes_find", test_bytes_find, METH_NOARGS}, @@ -2101,7 +2050,6 @@ static PyMethodDef module_functions[] = { #ifdef _Py_TIER2 {"get_optimizer", get_optimizer, METH_NOARGS, NULL}, {"set_optimizer", set_optimizer, METH_O, NULL}, - {"new_counter_optimizer", new_counter_optimizer, METH_NOARGS, NULL}, {"new_uop_optimizer", new_uop_optimizer, METH_NOARGS, NULL}, {"add_executor_dependency", add_executor_dependency, METH_VARARGS, NULL}, {"invalidate_executors", invalidate_executors, METH_O, NULL}, diff --git a/Modules/_testlimitedcapi.c b/Modules/_testlimitedcapi.c index bcc69a339ec5c4..82dac1c999470f 100644 --- a/Modules/_testlimitedcapi.c +++ b/Modules/_testlimitedcapi.c @@ -56,6 +56,9 @@ PyInit__testlimitedcapi(void) if (_PyTestLimitedCAPI_Init_HeaptypeRelative(mod) < 0) { return NULL; } + if (_PyTestLimitedCAPI_Init_Import(mod) < 0) { + return NULL; + } if (_PyTestLimitedCAPI_Init_List(mod) < 0) { return NULL; } diff --git a/Modules/_testlimitedcapi/import.c b/Modules/_testlimitedcapi/import.c new file mode 100644 index 00000000000000..3707dbedeea0d9 --- /dev/null +++ b/Modules/_testlimitedcapi/import.c @@ -0,0 +1,306 @@ +// Need limited C API version 3.13 for PyImport_AddModuleRef() +#include "pyconfig.h" // Py_GIL_DISABLED +#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API) +# define Py_LIMITED_API 0x030d0000 +#endif + +#include "parts.h" +#include "util.h" + + +/* Test PyImport_GetMagicNumber() */ +static PyObject * +pyimport_getmagicnumber(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + long magic = PyImport_GetMagicNumber(); + return PyLong_FromLong(magic); +} + + +/* Test PyImport_GetMagicTag() */ +static PyObject * +pyimport_getmagictag(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + const char *tag = PyImport_GetMagicTag(); + return PyUnicode_FromString(tag); +} + + +/* Test PyImport_GetModuleDict() */ +static PyObject * +pyimport_getmoduledict(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(args)) +{ + return Py_XNewRef(PyImport_GetModuleDict()); +} + + +/* Test PyImport_GetModule() */ +static PyObject * +pyimport_getmodule(PyObject *Py_UNUSED(module), PyObject *name) +{ + assert(!PyErr_Occurred()); + NULLABLE(name); + PyObject *module = PyImport_GetModule(name); + if (module == NULL && !PyErr_Occurred()) { + return Py_NewRef(PyExc_KeyError); + } + return module; +} + + +/* Test PyImport_AddModuleObject() */ +static PyObject * +pyimport_addmoduleobject(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + return Py_XNewRef(PyImport_AddModuleObject(name)); +} + + +/* Test PyImport_AddModule() */ +static PyObject * +pyimport_addmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return Py_XNewRef(PyImport_AddModule(name)); +} + + +/* Test PyImport_AddModuleRef() */ +static PyObject * +pyimport_addmoduleref(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return PyImport_AddModuleRef(name); +} + + +/* Test PyImport_Import() */ +static PyObject * +pyimport_import(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + return PyImport_Import(name); +} + + +/* Test PyImport_ImportModule() */ +static PyObject * +pyimport_importmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + return PyImport_ImportModule(name); +} + + +/* Test PyImport_ImportModuleNoBlock() */ +static PyObject * +pyimport_importmodulenoblock(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + return PyImport_ImportModuleNoBlock(name); + _Py_COMP_DIAG_POP +} + + +/* Test PyImport_ImportModuleEx() */ +static PyObject * +pyimport_importmoduleex(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *globals, *locals, *fromlist; + if (!PyArg_ParseTuple(args, "z#OOO", + &name, &size, &globals, &locals, &fromlist)) { + return NULL; + } + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleEx(name, globals, locals, fromlist); +} + + +/* Test PyImport_ImportModuleLevel() */ +static PyObject * +pyimport_importmodulelevel(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *globals, *locals, *fromlist; + int level; + if (!PyArg_ParseTuple(args, "z#OOOi", + &name, &size, &globals, &locals, &fromlist, &level)) { + return NULL; + } + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleLevel(name, globals, locals, fromlist, level); +} + + +/* Test PyImport_ImportModuleLevelObject() */ +static PyObject * +pyimport_importmodulelevelobject(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *name, *globals, *locals, *fromlist; + int level; + if (!PyArg_ParseTuple(args, "OOOOi", + &name, &globals, &locals, &fromlist, &level)) { + return NULL; + } + NULLABLE(name); + NULLABLE(globals); + NULLABLE(locals); + NULLABLE(fromlist); + + return PyImport_ImportModuleLevelObject(name, globals, locals, fromlist, level); +} + + +/* Test PyImport_ImportFrozenModule() */ +static PyObject * +pyimport_importfrozenmodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + if (!PyArg_ParseTuple(args, "z#", &name, &size)) { + return NULL; + } + + RETURN_INT(PyImport_ImportFrozenModule(name)); +} + + +/* Test PyImport_ImportFrozenModuleObject() */ +static PyObject * +pyimport_importfrozenmoduleobject(PyObject *Py_UNUSED(module), PyObject *name) +{ + NULLABLE(name); + RETURN_INT(PyImport_ImportFrozenModuleObject(name)); +} + + +/* Test PyImport_ExecCodeModule() */ +static PyObject * +pyimport_executecodemodule(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + if (!PyArg_ParseTuple(args, "z#O", &name, &size, &code)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModule(name, code); +} + + +/* Test PyImport_ExecCodeModuleEx() */ +static PyObject * +pyimport_executecodemoduleex(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + const char *pathname; + if (!PyArg_ParseTuple(args, "z#Oz#", &name, &size, &code, &pathname, &size)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModuleEx(name, code, pathname); +} + + +/* Test PyImport_ExecCodeModuleWithPathnames() */ +static PyObject * +pyimport_executecodemodulewithpathnames(PyObject *Py_UNUSED(module), PyObject *args) +{ + const char *name; + Py_ssize_t size; + PyObject *code; + const char *pathname; + const char *cpathname; + if (!PyArg_ParseTuple(args, "z#Oz#z#", &name, &size, &code, &pathname, &size, &cpathname, &size)) { + return NULL; + } + NULLABLE(code); + + return PyImport_ExecCodeModuleWithPathnames(name, code, + pathname, cpathname); +} + + +/* Test PyImport_ExecCodeModuleObject() */ +static PyObject * +pyimport_executecodemoduleobject(PyObject *Py_UNUSED(module), PyObject *args) +{ + PyObject *name, *code, *pathname, *cpathname; + if (!PyArg_ParseTuple(args, "OOOO", &name, &code, &pathname, &cpathname)) { + return NULL; + } + NULLABLE(name); + NULLABLE(code); + NULLABLE(pathname); + NULLABLE(cpathname); + + return PyImport_ExecCodeModuleObject(name, code, pathname, cpathname); +} + + +static PyMethodDef test_methods[] = { + {"PyImport_GetMagicNumber", pyimport_getmagicnumber, METH_NOARGS}, + {"PyImport_GetMagicTag", pyimport_getmagictag, METH_NOARGS}, + {"PyImport_GetModuleDict", pyimport_getmoduledict, METH_NOARGS}, + {"PyImport_GetModule", pyimport_getmodule, METH_O}, + {"PyImport_AddModuleObject", pyimport_addmoduleobject, METH_O}, + {"PyImport_AddModule", pyimport_addmodule, METH_VARARGS}, + {"PyImport_AddModuleRef", pyimport_addmoduleref, METH_VARARGS}, + {"PyImport_Import", pyimport_import, METH_O}, + {"PyImport_ImportModule", pyimport_importmodule, METH_VARARGS}, + {"PyImport_ImportModuleNoBlock", pyimport_importmodulenoblock, METH_VARARGS}, + {"PyImport_ImportModuleEx", pyimport_importmoduleex, METH_VARARGS}, + {"PyImport_ImportModuleLevel", pyimport_importmodulelevel, METH_VARARGS}, + {"PyImport_ImportModuleLevelObject", pyimport_importmodulelevelobject, METH_VARARGS}, + {"PyImport_ImportFrozenModule", pyimport_importfrozenmodule, METH_VARARGS}, + {"PyImport_ImportFrozenModuleObject", pyimport_importfrozenmoduleobject, METH_O}, + {"PyImport_ExecCodeModule", pyimport_executecodemodule, METH_VARARGS}, + {"PyImport_ExecCodeModuleEx", pyimport_executecodemoduleex, METH_VARARGS}, + {"PyImport_ExecCodeModuleWithPathnames", pyimport_executecodemodulewithpathnames, METH_VARARGS}, + {"PyImport_ExecCodeModuleObject", pyimport_executecodemoduleobject, METH_VARARGS}, + {NULL}, +}; + + +int +_PyTestLimitedCAPI_Init_Import(PyObject *module) +{ + return PyModule_AddFunctions(module, test_methods); +} diff --git a/Modules/_testlimitedcapi/parts.h b/Modules/_testlimitedcapi/parts.h index 56d566b66565a3..9efcd8dcb71e5b 100644 --- a/Modules/_testlimitedcapi/parts.h +++ b/Modules/_testlimitedcapi/parts.h @@ -31,6 +31,7 @@ int _PyTestLimitedCAPI_Init_Dict(PyObject *module); int _PyTestLimitedCAPI_Init_Eval(PyObject *module); int _PyTestLimitedCAPI_Init_Float(PyObject *module); int _PyTestLimitedCAPI_Init_HeaptypeRelative(PyObject *module); +int _PyTestLimitedCAPI_Init_Import(PyObject *module); int _PyTestLimitedCAPI_Init_Object(PyObject *module); int _PyTestLimitedCAPI_Init_List(PyObject *module); int _PyTestLimitedCAPI_Init_Long(PyObject *module); diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c index d19ae326bd6b48..dbc574f7816b85 100644 --- a/Modules/_threadmodule.c +++ b/Modules/_threadmodule.c @@ -47,6 +47,14 @@ get_thread_state(PyObject *module) } +#ifdef MS_WINDOWS +typedef HRESULT (WINAPI *PF_GET_THREAD_DESCRIPTION)(HANDLE, PCWSTR*); +typedef HRESULT (WINAPI *PF_SET_THREAD_DESCRIPTION)(HANDLE, PCWSTR); +static PF_GET_THREAD_DESCRIPTION pGetThreadDescription = NULL; +static PF_SET_THREAD_DESCRIPTION pSetThreadDescription = NULL; +#endif + + /*[clinic input] module _thread [clinic start generated code]*/ @@ -2368,7 +2376,7 @@ Internal only. Return a non-zero integer that uniquely identifies the main threa of the main interpreter."); -#ifdef HAVE_PTHREAD_GETNAME_NP +#if defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS) /*[clinic input] _thread._get_name @@ -2379,6 +2387,7 @@ static PyObject * _thread__get_name_impl(PyObject *module) /*[clinic end generated code: output=20026e7ee3da3dd7 input=35cec676833d04c8]*/ { +#ifndef MS_WINDOWS // Linux and macOS are limited to respectively 16 and 64 bytes char name[100]; pthread_t thread = pthread_self(); @@ -2393,11 +2402,26 @@ _thread__get_name_impl(PyObject *module) #else return PyUnicode_DecodeFSDefault(name); #endif +#else + // Windows implementation + assert(pGetThreadDescription != NULL); + + wchar_t *name; + HRESULT hr = pGetThreadDescription(GetCurrentThread(), &name); + if (FAILED(hr)) { + PyErr_SetFromWindowsErr(0); + return NULL; + } + + PyObject *name_obj = PyUnicode_FromWideChar(name, -1); + LocalFree(name); + return name_obj; +#endif } #endif // HAVE_PTHREAD_GETNAME_NP -#ifdef HAVE_PTHREAD_SETNAME_NP +#if defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS) /*[clinic input] _thread.set_name @@ -2410,6 +2434,7 @@ static PyObject * _thread_set_name_impl(PyObject *module, PyObject *name_obj) /*[clinic end generated code: output=402b0c68e0c0daed input=7e7acd98261be82f]*/ { +#ifndef MS_WINDOWS #ifdef __sun // Solaris always uses UTF-8 const char *encoding = "utf-8"; @@ -2425,12 +2450,12 @@ _thread_set_name_impl(PyObject *module, PyObject *name_obj) return NULL; } -#ifdef PYTHREAD_NAME_MAXLEN - // Truncate to PYTHREAD_NAME_MAXLEN bytes + the NUL byte if needed - if (PyBytes_GET_SIZE(name_encoded) > PYTHREAD_NAME_MAXLEN) { +#ifdef _PYTHREAD_NAME_MAXLEN + // Truncate to _PYTHREAD_NAME_MAXLEN bytes + the NUL byte if needed + if (PyBytes_GET_SIZE(name_encoded) > _PYTHREAD_NAME_MAXLEN) { PyObject *truncated; truncated = PyBytes_FromStringAndSize(PyBytes_AS_STRING(name_encoded), - PYTHREAD_NAME_MAXLEN); + _PYTHREAD_NAME_MAXLEN); if (truncated == NULL) { Py_DECREF(name_encoded); return NULL; @@ -2455,6 +2480,35 @@ _thread_set_name_impl(PyObject *module, PyObject *name_obj) return PyErr_SetFromErrno(PyExc_OSError); } Py_RETURN_NONE; +#else + // Windows implementation + assert(pSetThreadDescription != NULL); + + Py_ssize_t len; + wchar_t *name = PyUnicode_AsWideCharString(name_obj, &len); + if (name == NULL) { + return NULL; + } + + if (len > _PYTHREAD_NAME_MAXLEN) { + // Truncate the name + Py_UCS4 ch = name[_PYTHREAD_NAME_MAXLEN-1]; + if (Py_UNICODE_IS_HIGH_SURROGATE(ch)) { + name[_PYTHREAD_NAME_MAXLEN-1] = 0; + } + else { + name[_PYTHREAD_NAME_MAXLEN] = 0; + } + } + + HRESULT hr = pSetThreadDescription(GetCurrentThread(), name); + PyMem_Free(name); + if (FAILED(hr)) { + PyErr_SetFromWindowsErr((int)hr); + return NULL; + } + Py_RETURN_NONE; +#endif } #endif // HAVE_PTHREAD_SETNAME_NP @@ -2591,13 +2645,38 @@ thread_module_exec(PyObject *module) llist_init(&state->shutdown_handles); -#ifdef PYTHREAD_NAME_MAXLEN +#ifdef _PYTHREAD_NAME_MAXLEN if (PyModule_AddIntConstant(module, "_NAME_MAXLEN", - PYTHREAD_NAME_MAXLEN) < 0) { + _PYTHREAD_NAME_MAXLEN) < 0) { return -1; } #endif +#ifdef MS_WINDOWS + HMODULE kernelbase = GetModuleHandleW(L"kernelbase.dll"); + if (kernelbase != NULL) { + if (pGetThreadDescription == NULL) { + pGetThreadDescription = (PF_GET_THREAD_DESCRIPTION)GetProcAddress( + kernelbase, "GetThreadDescription"); + } + if (pSetThreadDescription == NULL) { + pSetThreadDescription = (PF_SET_THREAD_DESCRIPTION)GetProcAddress( + kernelbase, "SetThreadDescription"); + } + } + + if (pGetThreadDescription == NULL) { + if (PyObject_DelAttrString(module, "_get_name") < 0) { + return -1; + } + } + if (pSetThreadDescription == NULL) { + if (PyObject_DelAttrString(module, "set_name") < 0) { + return -1; + } + } +#endif + return 0; } diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 887a1e820e250e..be71fc9fc9c341 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -215,18 +215,14 @@ static struct PyModuleDef module_def = { PyMODINIT_FUNC PyInit__tracemalloc(void) { - PyObject *m; - m = PyModule_Create(&module_def); - if (m == NULL) + PyObject *mod = PyModule_Create(&module_def); + if (mod == NULL) { return NULL; + } + #ifdef Py_GIL_DISABLED - PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); + PyUnstable_Module_SetGIL(mod, Py_MOD_GIL_NOT_USED); #endif - if (_PyTraceMalloc_Init() < 0) { - Py_DECREF(m); - return NULL; - } - - return m; + return mod; } diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index b80c964f20d65e..28c6a0ae05c598 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -1557,7 +1557,7 @@ array_array_fromfile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f, not_enough_bytes = (PyBytes_GET_SIZE(b) != nbytes); - res = array_array_frombytes(self, b); + res = array_array_frombytes((PyObject *)self, b); Py_DECREF(b); if (res == NULL) return NULL; @@ -2797,8 +2797,7 @@ array_new(PyTypeObject *type, PyObject *args, PyObject *kwds) else if (initial != NULL && (PyByteArray_Check(initial) || PyBytes_Check(initial))) { PyObject *v; - v = array_array_frombytes((arrayobject *)a, - initial); + v = array_array_frombytes((PyObject *)a, initial); if (v == NULL) { Py_DECREF(a); return NULL; @@ -3090,11 +3089,16 @@ array_arrayiterator___setstate__(arrayiterobject *self, PyObject *state) Py_ssize_t index = PyLong_AsSsize_t(state); if (index == -1 && PyErr_Occurred()) return NULL; - if (index < 0) - index = 0; - else if (index > Py_SIZE(self->ao)) - index = Py_SIZE(self->ao); /* iterator exhausted */ - self->index = index; + arrayobject *ao = self->ao; + if (ao != NULL) { + if (index < 0) { + index = 0; + } + else if (index > Py_SIZE(ao)) { + index = Py_SIZE(ao); /* iterator exhausted */ + } + self->index = index; + } Py_RETURN_NONE; } diff --git a/Modules/cjkcodecs/clinic/multibytecodec.c.h b/Modules/cjkcodecs/clinic/multibytecodec.c.h index 7e7ea9e0fdfa8e..d77bbd48066354 100644 --- a/Modules/cjkcodecs/clinic/multibytecodec.c.h +++ b/Modules/cjkcodecs/clinic/multibytecodec.c.h @@ -28,7 +28,7 @@ _multibytecodec_MultibyteCodec_encode_impl(MultibyteCodecObject *self, const char *errors); static PyObject * -_multibytecodec_MultibyteCodec_encode(MultibyteCodecObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteCodec_encode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -89,7 +89,7 @@ _multibytecodec_MultibyteCodec_encode(MultibyteCodecObject *self, PyObject *cons goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteCodec_encode_impl(self, input, errors); + return_value = _multibytecodec_MultibyteCodec_encode_impl((MultibyteCodecObject *)self, input, errors); exit: return return_value; @@ -115,7 +115,7 @@ _multibytecodec_MultibyteCodec_decode_impl(MultibyteCodecObject *self, const char *errors); static PyObject * -_multibytecodec_MultibyteCodec_decode(MultibyteCodecObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteCodec_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -178,7 +178,7 @@ _multibytecodec_MultibyteCodec_decode(MultibyteCodecObject *self, PyObject *cons goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteCodec_decode_impl(self, &input, errors); + return_value = _multibytecodec_MultibyteCodec_decode_impl((MultibyteCodecObject *)self, &input, errors); exit: /* Cleanup for input */ @@ -203,7 +203,7 @@ _multibytecodec_MultibyteIncrementalEncoder_encode_impl(MultibyteIncrementalEnco int final); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteIncrementalEncoder_encode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -250,7 +250,7 @@ _multibytecodec_MultibyteIncrementalEncoder_encode(MultibyteIncrementalEncoderOb goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteIncrementalEncoder_encode_impl(self, input, final); + return_value = _multibytecodec_MultibyteIncrementalEncoder_encode_impl((MultibyteIncrementalEncoderObject *)self, input, final); exit: return return_value; @@ -268,9 +268,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalEncoder_getstate_impl(MultibyteIncrementalEncoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_getstate(MultibyteIncrementalEncoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalEncoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalEncoder_getstate_impl(self); + return _multibytecodec_MultibyteIncrementalEncoder_getstate_impl((MultibyteIncrementalEncoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalEncoder_setstate__doc__, @@ -286,7 +286,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(MultibyteIncrementalEn PyLongObject *statelong); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_setstate(MultibyteIncrementalEncoderObject *self, PyObject *arg) +_multibytecodec_MultibyteIncrementalEncoder_setstate(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyLongObject *statelong; @@ -296,7 +296,7 @@ _multibytecodec_MultibyteIncrementalEncoder_setstate(MultibyteIncrementalEncoder goto exit; } statelong = (PyLongObject *)arg; - return_value = _multibytecodec_MultibyteIncrementalEncoder_setstate_impl(self, statelong); + return_value = _multibytecodec_MultibyteIncrementalEncoder_setstate_impl((MultibyteIncrementalEncoderObject *)self, statelong); exit: return return_value; @@ -314,9 +314,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalEncoder_reset_impl(MultibyteIncrementalEncoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalEncoder_reset(MultibyteIncrementalEncoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalEncoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalEncoder_reset_impl(self); + return _multibytecodec_MultibyteIncrementalEncoder_reset_impl((MultibyteIncrementalEncoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalDecoder_decode__doc__, @@ -333,7 +333,7 @@ _multibytecodec_MultibyteIncrementalDecoder_decode_impl(MultibyteIncrementalDeco int final); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteIncrementalDecoder_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -382,7 +382,7 @@ _multibytecodec_MultibyteIncrementalDecoder_decode(MultibyteIncrementalDecoderOb goto exit; } skip_optional_pos: - return_value = _multibytecodec_MultibyteIncrementalDecoder_decode_impl(self, &input, final); + return_value = _multibytecodec_MultibyteIncrementalDecoder_decode_impl((MultibyteIncrementalDecoderObject *)self, &input, final); exit: /* Cleanup for input */ @@ -405,9 +405,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalDecoder_getstate_impl(MultibyteIncrementalDecoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_getstate(MultibyteIncrementalDecoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalDecoder_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalDecoder_getstate_impl(self); + return _multibytecodec_MultibyteIncrementalDecoder_getstate_impl((MultibyteIncrementalDecoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteIncrementalDecoder_setstate__doc__, @@ -423,7 +423,7 @@ _multibytecodec_MultibyteIncrementalDecoder_setstate_impl(MultibyteIncrementalDe PyObject *state); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_setstate(MultibyteIncrementalDecoderObject *self, PyObject *arg) +_multibytecodec_MultibyteIncrementalDecoder_setstate(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *state; @@ -433,7 +433,7 @@ _multibytecodec_MultibyteIncrementalDecoder_setstate(MultibyteIncrementalDecoder goto exit; } state = arg; - return_value = _multibytecodec_MultibyteIncrementalDecoder_setstate_impl(self, state); + return_value = _multibytecodec_MultibyteIncrementalDecoder_setstate_impl((MultibyteIncrementalDecoderObject *)self, state); exit: return return_value; @@ -451,9 +451,9 @@ static PyObject * _multibytecodec_MultibyteIncrementalDecoder_reset_impl(MultibyteIncrementalDecoderObject *self); static PyObject * -_multibytecodec_MultibyteIncrementalDecoder_reset(MultibyteIncrementalDecoderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteIncrementalDecoder_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteIncrementalDecoder_reset_impl(self); + return _multibytecodec_MultibyteIncrementalDecoder_reset_impl((MultibyteIncrementalDecoderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteStreamReader_read__doc__, @@ -469,7 +469,7 @@ _multibytecodec_MultibyteStreamReader_read_impl(MultibyteStreamReaderObject *sel PyObject *sizeobj); static PyObject * -_multibytecodec_MultibyteStreamReader_read(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizeobj = Py_None; @@ -482,7 +482,7 @@ _multibytecodec_MultibyteStreamReader_read(MultibyteStreamReaderObject *self, Py } sizeobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_read_impl(self, sizeobj); + return_value = _multibytecodec_MultibyteStreamReader_read_impl((MultibyteStreamReaderObject *)self, sizeobj); exit: return return_value; @@ -501,7 +501,7 @@ _multibytecodec_MultibyteStreamReader_readline_impl(MultibyteStreamReaderObject PyObject *sizeobj); static PyObject * -_multibytecodec_MultibyteStreamReader_readline(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_readline(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizeobj = Py_None; @@ -514,7 +514,7 @@ _multibytecodec_MultibyteStreamReader_readline(MultibyteStreamReaderObject *self } sizeobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_readline_impl(self, sizeobj); + return_value = _multibytecodec_MultibyteStreamReader_readline_impl((MultibyteStreamReaderObject *)self, sizeobj); exit: return return_value; @@ -533,7 +533,7 @@ _multibytecodec_MultibyteStreamReader_readlines_impl(MultibyteStreamReaderObject PyObject *sizehintobj); static PyObject * -_multibytecodec_MultibyteStreamReader_readlines(MultibyteStreamReaderObject *self, PyObject *const *args, Py_ssize_t nargs) +_multibytecodec_MultibyteStreamReader_readlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sizehintobj = Py_None; @@ -546,7 +546,7 @@ _multibytecodec_MultibyteStreamReader_readlines(MultibyteStreamReaderObject *sel } sizehintobj = args[0]; skip_optional: - return_value = _multibytecodec_MultibyteStreamReader_readlines_impl(self, sizehintobj); + return_value = _multibytecodec_MultibyteStreamReader_readlines_impl((MultibyteStreamReaderObject *)self, sizehintobj); exit: return return_value; @@ -564,9 +564,9 @@ static PyObject * _multibytecodec_MultibyteStreamReader_reset_impl(MultibyteStreamReaderObject *self); static PyObject * -_multibytecodec_MultibyteStreamReader_reset(MultibyteStreamReaderObject *self, PyObject *Py_UNUSED(ignored)) +_multibytecodec_MultibyteStreamReader_reset(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _multibytecodec_MultibyteStreamReader_reset_impl(self); + return _multibytecodec_MultibyteStreamReader_reset_impl((MultibyteStreamReaderObject *)self); } PyDoc_STRVAR(_multibytecodec_MultibyteStreamWriter_write__doc__, @@ -583,7 +583,7 @@ _multibytecodec_MultibyteStreamWriter_write_impl(MultibyteStreamWriterObject *se PyObject *strobj); static PyObject * -_multibytecodec_MultibyteStreamWriter_write(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_write(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -608,7 +608,7 @@ _multibytecodec_MultibyteStreamWriter_write(MultibyteStreamWriterObject *self, P goto exit; } strobj = args[0]; - return_value = _multibytecodec_MultibyteStreamWriter_write_impl(self, cls, strobj); + return_value = _multibytecodec_MultibyteStreamWriter_write_impl((MultibyteStreamWriterObject *)self, cls, strobj); exit: return return_value; @@ -628,7 +628,7 @@ _multibytecodec_MultibyteStreamWriter_writelines_impl(MultibyteStreamWriterObjec PyObject *lines); static PyObject * -_multibytecodec_MultibyteStreamWriter_writelines(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_writelines(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -653,7 +653,7 @@ _multibytecodec_MultibyteStreamWriter_writelines(MultibyteStreamWriterObject *se goto exit; } lines = args[0]; - return_value = _multibytecodec_MultibyteStreamWriter_writelines_impl(self, cls, lines); + return_value = _multibytecodec_MultibyteStreamWriter_writelines_impl((MultibyteStreamWriterObject *)self, cls, lines); exit: return return_value; @@ -672,13 +672,13 @@ _multibytecodec_MultibyteStreamWriter_reset_impl(MultibyteStreamWriterObject *se PyTypeObject *cls); static PyObject * -_multibytecodec_MultibyteStreamWriter_reset(MultibyteStreamWriterObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_multibytecodec_MultibyteStreamWriter_reset(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "reset() takes no arguments"); return NULL; } - return _multibytecodec_MultibyteStreamWriter_reset_impl(self, cls); + return _multibytecodec_MultibyteStreamWriter_reset_impl((MultibyteStreamWriterObject *)self, cls); } PyDoc_STRVAR(_multibytecodec___create_codec__doc__, @@ -688,4 +688,4 @@ PyDoc_STRVAR(_multibytecodec___create_codec__doc__, #define _MULTIBYTECODEC___CREATE_CODEC_METHODDEF \ {"__create_codec", (PyCFunction)_multibytecodec___create_codec, METH_O, _multibytecodec___create_codec__doc__}, -/*[clinic end generated code: output=60e1fa3a7615c148 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6571941b8e45b013 input=a9049054013a1b77]*/ diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c index 53135ae4aa7968..08b74740bda4bf 100644 --- a/Modules/cjkcodecs/multibytecodec.c +++ b/Modules/cjkcodecs/multibytecodec.c @@ -56,6 +56,27 @@ class _multibytecodec.MultibyteStreamWriter "MultibyteStreamWriterObject *" "cli /*[clinic end generated code: output=da39a3ee5e6b4b0d input=305a76dfdd24b99c]*/ #undef clinic_get_state +#define _MultibyteCodec_CAST(op) ((MultibyteCodec *)(op)) +#define _MultibyteCodecObject_CAST(op) ((MultibyteCodecObject *)(op)) + +#define _MultibyteStatefulCodecContext_CAST(op) \ + ((MultibyteStatefulCodecContext *)(op)) + +#define _MultibyteStatefulEncoderContext_CAST(op) \ + ((MultibyteStatefulEncoderContext *)(op)) +#define _MultibyteStatefulDecoderContext_CAST(op) \ + ((MultibyteStatefulDecoderContext *)(op)) + +#define _MultibyteIncrementalEncoderObject_CAST(op) \ + ((MultibyteIncrementalEncoderObject *)(op)) +#define _MultibyteIncrementalDecoderObject_CAST(op) \ + ((MultibyteIncrementalDecoderObject *)(op)) + +#define _MultibyteStreamReaderObject_CAST(op) \ + ((MultibyteStreamReaderObject *)(op)) +#define _MultibyteStreamWriterObject_CAST(op) \ + ((MultibyteStreamWriterObject *)(op)) + typedef struct { PyObject *inobj; Py_ssize_t inpos, inlen; @@ -136,9 +157,10 @@ call_error_callback(PyObject *errors, PyObject *exc) } static PyObject * -codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored)) +codecctx_errors_get(PyObject *op, void *Py_UNUSED(closure)) { const char *errors; + MultibyteStatefulCodecContext *self = _MultibyteStatefulCodecContext_CAST(op); if (self->errors == ERROR_STRICT) errors = "strict"; @@ -154,11 +176,11 @@ codecctx_errors_get(MultibyteStatefulCodecContext *self, void *Py_UNUSED(ignored } static int -codecctx_errors_set(MultibyteStatefulCodecContext *self, PyObject *value, - void *closure) +codecctx_errors_set(PyObject *op, PyObject *value, void *Py_UNUSED(closure)) { PyObject *cb; const char *str; + MultibyteStatefulCodecContext *self = _MultibyteStatefulCodecContext_CAST(op); if (value == NULL) { PyErr_SetString(PyExc_AttributeError, "cannot delete attribute"); @@ -184,9 +206,8 @@ codecctx_errors_set(MultibyteStatefulCodecContext *self, PyObject *value, /* This getset handlers list is used by all the stateful codec objects */ static PyGetSetDef codecctx_getsets[] = { - {"errors", (getter)codecctx_errors_get, - (setter)codecctx_errors_set, - PyDoc_STR("how to treat errors")}, + {"errors", codecctx_errors_get, codecctx_errors_set, + PyDoc_STR("how to treat errors")}, {NULL,} }; @@ -719,22 +740,24 @@ static struct PyMethodDef multibytecodec_methods[] = { }; static int -multibytecodec_clear(MultibyteCodecObject *self) +multibytecodec_clear(PyObject *op) { + MultibyteCodecObject *self = _MultibyteCodecObject_CAST(op); Py_CLEAR(self->cjk_module); return 0; } static int -multibytecodec_traverse(MultibyteCodecObject *self, visitproc visit, void *arg) +multibytecodec_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteCodecObject *self = _MultibyteCodecObject_CAST(op); Py_VISIT(Py_TYPE(self)); Py_VISIT(self->cjk_module); return 0; } static void -multibytecodec_dealloc(MultibyteCodecObject *self) +multibytecodec_dealloc(PyObject *self) { PyObject_GC_UnTrack(self); PyTypeObject *tp = Py_TYPE(self); @@ -1106,17 +1129,18 @@ mbiencoder_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbiencoder_traverse(MultibyteIncrementalEncoderObject *self, - visitproc visit, void *arg) +mbiencoder_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteIncrementalEncoderObject *self = _MultibyteIncrementalEncoderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); return 0; } static void -mbiencoder_dealloc(MultibyteIncrementalEncoderObject *self) +mbiencoder_dealloc(PyObject *op) { + MultibyteIncrementalEncoderObject *self = _MultibyteIncrementalEncoderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1388,17 +1412,18 @@ mbidecoder_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbidecoder_traverse(MultibyteIncrementalDecoderObject *self, - visitproc visit, void *arg) +mbidecoder_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteIncrementalDecoderObject *self = _MultibyteIncrementalDecoderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); return 0; } static void -mbidecoder_dealloc(MultibyteIncrementalDecoderObject *self) +mbidecoder_dealloc(PyObject *op) { + MultibyteIncrementalDecoderObject *self = _MultibyteIncrementalDecoderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1704,9 +1729,9 @@ mbstreamreader_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbstreamreader_traverse(MultibyteStreamReaderObject *self, - visitproc visit, void *arg) +mbstreamreader_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteStreamReaderObject *self = _MultibyteStreamReaderObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); Py_VISIT(self->stream); @@ -1714,8 +1739,9 @@ mbstreamreader_traverse(MultibyteStreamReaderObject *self, } static void -mbstreamreader_dealloc(MultibyteStreamReaderObject *self) +mbstreamreader_dealloc(PyObject *op) { + MultibyteStreamReaderObject *self = _MultibyteStreamReaderObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -1927,9 +1953,9 @@ mbstreamwriter_init(PyObject *self, PyObject *args, PyObject *kwds) } static int -mbstreamwriter_traverse(MultibyteStreamWriterObject *self, - visitproc visit, void *arg) +mbstreamwriter_traverse(PyObject *op, visitproc visit, void *arg) { + MultibyteStreamWriterObject *self = _MultibyteStreamWriterObject_CAST(op); if (ERROR_ISCUSTOM(self->errors)) Py_VISIT(self->errors); Py_VISIT(self->stream); @@ -1937,8 +1963,9 @@ mbstreamwriter_traverse(MultibyteStreamWriterObject *self, } static void -mbstreamwriter_dealloc(MultibyteStreamWriterObject *self) +mbstreamwriter_dealloc(PyObject *op) { + MultibyteStreamWriterObject *self = _MultibyteStreamWriterObject_CAST(op); PyTypeObject *tp = Py_TYPE(self); PyObject_GC_UnTrack(self); ERROR_DECREF(self->errors); @@ -2044,7 +2071,7 @@ _multibytecodec_clear(PyObject *mod) static void _multibytecodec_free(void *mod) { - _multibytecodec_clear((PyObject *)mod); + (void)_multibytecodec_clear((PyObject *)mod); } #define CREATE_TYPE(module, type, spec) \ diff --git a/Modules/clinic/_asynciomodule.c.h b/Modules/clinic/_asynciomodule.c.h index 3a37cdd9b5fa83..c6b7e39788be71 100644 --- a/Modules/clinic/_asynciomodule.c.h +++ b/Modules/clinic/_asynciomodule.c.h @@ -9,6 +9,31 @@ preserve #include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() +#if !defined(_asyncio_Future__asyncio_awaited_by_DOCSTR) +# define _asyncio_Future__asyncio_awaited_by_DOCSTR NULL +#endif +#if defined(_ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF) +# undef _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF +# define _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {"_asyncio_awaited_by", (getter)_asyncio_Future__asyncio_awaited_by_get, (setter)_asyncio_Future__asyncio_awaited_by_set, _asyncio_Future__asyncio_awaited_by_DOCSTR}, +#else +# define _ASYNCIO_FUTURE__ASYNCIO_AWAITED_BY_GETSETDEF {"_asyncio_awaited_by", (getter)_asyncio_Future__asyncio_awaited_by_get, NULL, _asyncio_Future__asyncio_awaited_by_DOCSTR}, +#endif + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get_impl(FutureObj *self); + +static PyObject * +_asyncio_Future__asyncio_awaited_by_get(PyObject *self, void *Py_UNUSED(context)) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _asyncio_Future__asyncio_awaited_by_get_impl((FutureObj *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + PyDoc_STRVAR(_asyncio_Future___init____doc__, "Future(*, loop=None)\n" "--\n" @@ -97,12 +122,12 @@ static PyObject * _asyncio_Future_result_impl(FutureObj *self); static PyObject * -_asyncio_Future_result(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_result(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_result_impl(self); + return_value = _asyncio_Future_result_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -126,7 +151,7 @@ static PyObject * _asyncio_Future_exception_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_exception(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -135,7 +160,7 @@ _asyncio_Future_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *a goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_exception_impl(self, cls); + return_value = _asyncio_Future_exception_impl((FutureObj *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -159,7 +184,7 @@ _asyncio_Future_set_result_impl(FutureObj *self, PyTypeObject *cls, PyObject *result); static PyObject * -_asyncio_Future_set_result(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_set_result(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -185,7 +210,7 @@ _asyncio_Future_set_result(FutureObj *self, PyTypeObject *cls, PyObject *const * } result = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_set_result_impl(self, cls, result); + return_value = _asyncio_Future_set_result_impl((FutureObj *)self, cls, result); Py_END_CRITICAL_SECTION(); exit: @@ -209,7 +234,7 @@ _asyncio_Future_set_exception_impl(FutureObj *self, PyTypeObject *cls, PyObject *exception); static PyObject * -_asyncio_Future_set_exception(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_set_exception(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -235,7 +260,7 @@ _asyncio_Future_set_exception(FutureObj *self, PyTypeObject *cls, PyObject *cons } exception = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_set_exception_impl(self, cls, exception); + return_value = _asyncio_Future_set_exception_impl((FutureObj *)self, cls, exception); Py_END_CRITICAL_SECTION(); exit: @@ -260,7 +285,7 @@ _asyncio_Future_add_done_callback_impl(FutureObj *self, PyTypeObject *cls, PyObject *fn, PyObject *context); static PyObject * -_asyncio_Future_add_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_add_done_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -305,7 +330,7 @@ _asyncio_Future_add_done_callback(FutureObj *self, PyTypeObject *cls, PyObject * context = args[1]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_add_done_callback_impl(self, cls, fn, context); + return_value = _asyncio_Future_add_done_callback_impl((FutureObj *)self, cls, fn, context); Py_END_CRITICAL_SECTION(); exit: @@ -328,7 +353,7 @@ _asyncio_Future_remove_done_callback_impl(FutureObj *self, PyTypeObject *cls, PyObject *fn); static PyObject * -_asyncio_Future_remove_done_callback(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_remove_done_callback(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -354,7 +379,7 @@ _asyncio_Future_remove_done_callback(FutureObj *self, PyTypeObject *cls, PyObjec } fn = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_remove_done_callback_impl(self, cls, fn); + return_value = _asyncio_Future_remove_done_callback_impl((FutureObj *)self, cls, fn); Py_END_CRITICAL_SECTION(); exit: @@ -379,7 +404,7 @@ _asyncio_Future_cancel_impl(FutureObj *self, PyTypeObject *cls, PyObject *msg); static PyObject * -_asyncio_Future_cancel(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_cancel(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -422,7 +447,7 @@ _asyncio_Future_cancel(FutureObj *self, PyTypeObject *cls, PyObject *const *args msg = args[0]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_cancel_impl(self, cls, msg); + return_value = _asyncio_Future_cancel_impl((FutureObj *)self, cls, msg); Py_END_CRITICAL_SECTION(); exit: @@ -442,12 +467,12 @@ static PyObject * _asyncio_Future_cancelled_impl(FutureObj *self); static PyObject * -_asyncio_Future_cancelled(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_cancelled(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_cancelled_impl(self); + return_value = _asyncio_Future_cancelled_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -469,12 +494,12 @@ static PyObject * _asyncio_Future_done_impl(FutureObj *self); static PyObject * -_asyncio_Future_done(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future_done(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_done_impl(self); + return_value = _asyncio_Future_done_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -493,7 +518,7 @@ static PyObject * _asyncio_Future_get_loop_impl(FutureObj *self, PyTypeObject *cls); static PyObject * -_asyncio_Future_get_loop(FutureObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Future_get_loop(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -502,7 +527,7 @@ _asyncio_Future_get_loop(FutureObj *self, PyTypeObject *cls, PyObject *const *ar goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future_get_loop_impl(self, cls); + return_value = _asyncio_Future_get_loop_impl((FutureObj *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -523,12 +548,12 @@ static PyObject * _asyncio_Future__asyncio_future_blocking_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__asyncio_future_blocking_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__asyncio_future_blocking_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__asyncio_future_blocking_get_impl(self); + return_value = _asyncio_Future__asyncio_future_blocking_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -549,12 +574,12 @@ _asyncio_Future__asyncio_future_blocking_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__asyncio_future_blocking_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__asyncio_future_blocking_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__asyncio_future_blocking_set_impl(self, value); + return_value = _asyncio_Future__asyncio_future_blocking_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -574,12 +599,12 @@ static PyObject * _asyncio_Future__log_traceback_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__log_traceback_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__log_traceback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__log_traceback_get_impl(self); + return_value = _asyncio_Future__log_traceback_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -599,12 +624,12 @@ static int _asyncio_Future__log_traceback_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__log_traceback_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__log_traceback_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__log_traceback_set_impl(self, value); + return_value = _asyncio_Future__log_traceback_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -624,12 +649,12 @@ static PyObject * _asyncio_Future__loop_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__loop_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__loop_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__loop_get_impl(self); + return_value = _asyncio_Future__loop_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -649,12 +674,12 @@ static PyObject * _asyncio_Future__callbacks_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__callbacks_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__callbacks_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__callbacks_get_impl(self); + return_value = _asyncio_Future__callbacks_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -674,12 +699,12 @@ static PyObject * _asyncio_Future__result_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__result_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__result_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__result_get_impl(self); + return_value = _asyncio_Future__result_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -699,12 +724,12 @@ static PyObject * _asyncio_Future__exception_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__exception_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__exception_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__exception_get_impl(self); + return_value = _asyncio_Future__exception_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -724,12 +749,12 @@ static PyObject * _asyncio_Future__source_traceback_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__source_traceback_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__source_traceback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__source_traceback_get_impl(self); + return_value = _asyncio_Future__source_traceback_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -749,12 +774,12 @@ static PyObject * _asyncio_Future__cancel_message_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__cancel_message_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__cancel_message_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__cancel_message_get_impl(self); + return_value = _asyncio_Future__cancel_message_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -774,12 +799,12 @@ static int _asyncio_Future__cancel_message_set_impl(FutureObj *self, PyObject *value); static int -_asyncio_Future__cancel_message_set(FutureObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Future__cancel_message_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__cancel_message_set_impl(self, value); + return_value = _asyncio_Future__cancel_message_set_impl((FutureObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -799,12 +824,12 @@ static PyObject * _asyncio_Future__state_get_impl(FutureObj *self); static PyObject * -_asyncio_Future__state_get(FutureObj *self, void *Py_UNUSED(context)) +_asyncio_Future__state_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__state_get_impl(self); + return_value = _asyncio_Future__state_get_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -826,12 +851,12 @@ static PyObject * _asyncio_Future__make_cancelled_error_impl(FutureObj *self); static PyObject * -_asyncio_Future__make_cancelled_error(FutureObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Future__make_cancelled_error(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Future__make_cancelled_error_impl(self); + return_value = _asyncio_Future__make_cancelled_error_impl((FutureObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -939,12 +964,12 @@ static PyObject * _asyncio_Task__log_destroy_pending_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__log_destroy_pending_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__log_destroy_pending_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__log_destroy_pending_get_impl(self); + return_value = _asyncio_Task__log_destroy_pending_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -964,12 +989,12 @@ static int _asyncio_Task__log_destroy_pending_set_impl(TaskObj *self, PyObject *value); static int -_asyncio_Task__log_destroy_pending_set(TaskObj *self, PyObject *value, void *Py_UNUSED(context)) +_asyncio_Task__log_destroy_pending_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__log_destroy_pending_set_impl(self, value); + return_value = _asyncio_Task__log_destroy_pending_set_impl((TaskObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -989,12 +1014,12 @@ static PyObject * _asyncio_Task__must_cancel_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__must_cancel_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__must_cancel_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__must_cancel_get_impl(self); + return_value = _asyncio_Task__must_cancel_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1014,12 +1039,12 @@ static PyObject * _asyncio_Task__coro_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__coro_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__coro_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__coro_get_impl(self); + return_value = _asyncio_Task__coro_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1039,12 +1064,12 @@ static PyObject * _asyncio_Task__fut_waiter_get_impl(TaskObj *self); static PyObject * -_asyncio_Task__fut_waiter_get(TaskObj *self, void *Py_UNUSED(context)) +_asyncio_Task__fut_waiter_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__fut_waiter_get_impl(self); + return_value = _asyncio_Task__fut_waiter_get_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1066,12 +1091,12 @@ static PyObject * _asyncio_Task__make_cancelled_error_impl(TaskObj *self); static PyObject * -_asyncio_Task__make_cancelled_error(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task__make_cancelled_error(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task__make_cancelled_error_impl(self); + return_value = _asyncio_Task__make_cancelled_error_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1109,7 +1134,7 @@ static PyObject * _asyncio_Task_cancel_impl(TaskObj *self, PyObject *msg); static PyObject * -_asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_cancel(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1152,7 +1177,7 @@ _asyncio_Task_cancel(TaskObj *self, PyObject *const *args, Py_ssize_t nargs, PyO msg = args[0]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_cancel_impl(self, msg); + return_value = _asyncio_Task_cancel_impl((TaskObj *)self, msg); Py_END_CRITICAL_SECTION(); exit: @@ -1175,12 +1200,12 @@ static PyObject * _asyncio_Task_cancelling_impl(TaskObj *self); static PyObject * -_asyncio_Task_cancelling(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_cancelling(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_cancelling_impl(self); + return_value = _asyncio_Task_cancelling_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1204,12 +1229,12 @@ static PyObject * _asyncio_Task_uncancel_impl(TaskObj *self); static PyObject * -_asyncio_Task_uncancel(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_uncancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_uncancel_impl(self); + return_value = _asyncio_Task_uncancel_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1247,7 +1272,7 @@ _asyncio_Task_get_stack_impl(TaskObj *self, PyTypeObject *cls, PyObject *limit); static PyObject * -_asyncio_Task_get_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_get_stack(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1289,7 +1314,7 @@ _asyncio_Task_get_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, } limit = args[0]; skip_optional_kwonly: - return_value = _asyncio_Task_get_stack_impl(self, cls, limit); + return_value = _asyncio_Task_get_stack_impl((TaskObj *)self, cls, limit); exit: return return_value; @@ -1315,7 +1340,7 @@ _asyncio_Task_print_stack_impl(TaskObj *self, PyTypeObject *cls, PyObject *limit, PyObject *file); static PyObject * -_asyncio_Task_print_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_asyncio_Task_print_stack(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1364,7 +1389,7 @@ _asyncio_Task_print_stack(TaskObj *self, PyTypeObject *cls, PyObject *const *arg } file = args[1]; skip_optional_kwonly: - return_value = _asyncio_Task_print_stack_impl(self, cls, limit, file); + return_value = _asyncio_Task_print_stack_impl((TaskObj *)self, cls, limit, file); exit: return return_value; @@ -1398,12 +1423,12 @@ static PyObject * _asyncio_Task_get_coro_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_coro(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_coro(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_get_coro_impl(self); + return_value = _asyncio_Task_get_coro_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1421,9 +1446,9 @@ static PyObject * _asyncio_Task_get_context_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_context(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_context(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _asyncio_Task_get_context_impl(self); + return _asyncio_Task_get_context_impl((TaskObj *)self); } PyDoc_STRVAR(_asyncio_Task_get_name__doc__, @@ -1438,12 +1463,12 @@ static PyObject * _asyncio_Task_get_name_impl(TaskObj *self); static PyObject * -_asyncio_Task_get_name(TaskObj *self, PyObject *Py_UNUSED(ignored)) +_asyncio_Task_get_name(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_get_name_impl(self); + return_value = _asyncio_Task_get_name_impl((TaskObj *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1466,7 +1491,7 @@ _asyncio_Task_set_name(TaskObj *self, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _asyncio_Task_set_name_impl(self, value); + return_value = _asyncio_Task_set_name_impl((TaskObj *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -2088,4 +2113,65 @@ _asyncio_all_tasks(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py exit: return return_value; } -/*[clinic end generated code: output=408e156476ced07f input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_asyncio_future_add_to_awaited_by__doc__, +"future_add_to_awaited_by($module, fut, waiter, /)\n" +"--\n" +"\n" +"Record that `fut` is awaited on by `waiter`."); + +#define _ASYNCIO_FUTURE_ADD_TO_AWAITED_BY_METHODDEF \ + {"future_add_to_awaited_by", _PyCFunction_CAST(_asyncio_future_add_to_awaited_by), METH_FASTCALL, _asyncio_future_add_to_awaited_by__doc__}, + +static PyObject * +_asyncio_future_add_to_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter); + +static PyObject * +_asyncio_future_add_to_awaited_by(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *fut; + PyObject *waiter; + + if (!_PyArg_CheckPositional("future_add_to_awaited_by", nargs, 2, 2)) { + goto exit; + } + fut = args[0]; + waiter = args[1]; + return_value = _asyncio_future_add_to_awaited_by_impl(module, fut, waiter); + +exit: + return return_value; +} + +PyDoc_STRVAR(_asyncio_future_discard_from_awaited_by__doc__, +"future_discard_from_awaited_by($module, fut, waiter, /)\n" +"--\n" +"\n"); + +#define _ASYNCIO_FUTURE_DISCARD_FROM_AWAITED_BY_METHODDEF \ + {"future_discard_from_awaited_by", _PyCFunction_CAST(_asyncio_future_discard_from_awaited_by), METH_FASTCALL, _asyncio_future_discard_from_awaited_by__doc__}, + +static PyObject * +_asyncio_future_discard_from_awaited_by_impl(PyObject *module, PyObject *fut, + PyObject *waiter); + +static PyObject * +_asyncio_future_discard_from_awaited_by(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + PyObject *fut; + PyObject *waiter; + + if (!_PyArg_CheckPositional("future_discard_from_awaited_by", nargs, 2, 2)) { + goto exit; + } + fut = args[0]; + waiter = args[1]; + return_value = _asyncio_future_discard_from_awaited_by_impl(module, fut, waiter); + +exit: + return return_value; +} +/*[clinic end generated code: output=fe4ffe08404ad566 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h index 93988bf48a1b00..a599bd1a8be96a 100644 --- a/Modules/clinic/_bz2module.c.h +++ b/Modules/clinic/_bz2module.c.h @@ -27,7 +27,7 @@ static PyObject * _bz2_BZ2Compressor_compress_impl(BZ2Compressor *self, Py_buffer *data); static PyObject * -_bz2_BZ2Compressor_compress(BZ2Compressor *self, PyObject *arg) +_bz2_BZ2Compressor_compress(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -35,7 +35,7 @@ _bz2_BZ2Compressor_compress(BZ2Compressor *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _bz2_BZ2Compressor_compress_impl(self, &data); + return_value = _bz2_BZ2Compressor_compress_impl((BZ2Compressor *)self, &data); exit: /* Cleanup for data */ @@ -63,9 +63,9 @@ static PyObject * _bz2_BZ2Compressor_flush_impl(BZ2Compressor *self); static PyObject * -_bz2_BZ2Compressor_flush(BZ2Compressor *self, PyObject *Py_UNUSED(ignored)) +_bz2_BZ2Compressor_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _bz2_BZ2Compressor_flush_impl(self); + return _bz2_BZ2Compressor_flush_impl((BZ2Compressor *)self); } PyDoc_STRVAR(_bz2_BZ2Compressor__doc__, @@ -137,7 +137,7 @@ _bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -_bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_bz2_BZ2Decompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -194,7 +194,7 @@ _bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py max_length = ival; } skip_optional_pos: - return_value = _bz2_BZ2Decompressor_decompress_impl(self, &data, max_length); + return_value = _bz2_BZ2Decompressor_decompress_impl((BZ2Decompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -235,4 +235,4 @@ _bz2_BZ2Decompressor(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=701a383434374c36 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=0fc5a6292c5fd2c5 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_collectionsmodule.c.h b/Modules/clinic/_collectionsmodule.c.h index b4e3325e89502b..ddf18c2c77a8cd 100644 --- a/Modules/clinic/_collectionsmodule.c.h +++ b/Modules/clinic/_collectionsmodule.c.h @@ -23,12 +23,12 @@ static PyObject * deque_pop_impl(dequeobject *deque); static PyObject * -deque_pop(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_pop(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_pop_impl(deque); + return_value = deque_pop_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -47,12 +47,12 @@ static PyObject * deque_popleft_impl(dequeobject *deque); static PyObject * -deque_popleft(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_popleft(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_popleft_impl(deque); + return_value = deque_popleft_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -76,7 +76,7 @@ deque_append(dequeobject *deque, PyObject *item) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_append_impl(deque, item); + return_value = deque_append_impl((dequeobject *)deque, item); Py_END_CRITICAL_SECTION(); return return_value; @@ -100,7 +100,7 @@ deque_appendleft(dequeobject *deque, PyObject *item) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_appendleft_impl(deque, item); + return_value = deque_appendleft_impl((dequeobject *)deque, item); Py_END_CRITICAL_SECTION(); return return_value; @@ -124,7 +124,7 @@ deque_extend(dequeobject *deque, PyObject *iterable) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_extend_impl(deque, iterable); + return_value = deque_extend_impl((dequeobject *)deque, iterable); Py_END_CRITICAL_SECTION(); return return_value; @@ -148,7 +148,7 @@ deque_extendleft(dequeobject *deque, PyObject *iterable) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_extendleft_impl(deque, iterable); + return_value = deque_extendleft_impl((dequeobject *)deque, iterable); Py_END_CRITICAL_SECTION(); return return_value; @@ -167,12 +167,12 @@ static PyObject * deque_copy_impl(dequeobject *deque); static PyObject * -deque_copy(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_copy(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_copy_impl(deque); + return_value = deque_copy_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -191,12 +191,12 @@ static PyObject * deque___copy___impl(dequeobject *deque); static PyObject * -deque___copy__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___copy__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque___copy___impl(deque); + return_value = deque___copy___impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -215,12 +215,12 @@ static PyObject * deque_clearmethod_impl(dequeobject *deque); static PyObject * -deque_clearmethod(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_clearmethod(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_clearmethod_impl(deque); + return_value = deque_clearmethod_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -239,7 +239,7 @@ static PyObject * deque_rotate_impl(dequeobject *deque, Py_ssize_t n); static PyObject * -deque_rotate(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_rotate(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t n = 1; @@ -264,7 +264,7 @@ deque_rotate(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_rotate_impl(deque, n); + return_value = deque_rotate_impl((dequeobject *)deque, n); Py_END_CRITICAL_SECTION(); exit: @@ -284,12 +284,12 @@ static PyObject * deque_reverse_impl(dequeobject *deque); static PyObject * -deque_reverse(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque_reverse(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_reverse_impl(deque); + return_value = deque_reverse_impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -313,7 +313,7 @@ deque_count(dequeobject *deque, PyObject *v) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_count_impl(deque, v); + return_value = deque_count_impl((dequeobject *)deque, v); Py_END_CRITICAL_SECTION(); return return_value; @@ -335,7 +335,7 @@ deque_index_impl(dequeobject *deque, PyObject *v, Py_ssize_t start, Py_ssize_t stop); static PyObject * -deque_index(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_index(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *v; @@ -360,7 +360,7 @@ deque_index(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_index_impl(deque, v, start, stop); + return_value = deque_index_impl((dequeobject *)deque, v, start, stop); Py_END_CRITICAL_SECTION(); exit: @@ -380,7 +380,7 @@ static PyObject * deque_insert_impl(dequeobject *deque, Py_ssize_t index, PyObject *value); static PyObject * -deque_insert(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) +deque_insert(PyObject *deque, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -403,7 +403,7 @@ deque_insert(dequeobject *deque, PyObject *const *args, Py_ssize_t nargs) } value = args[1]; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_insert_impl(deque, index, value); + return_value = deque_insert_impl((dequeobject *)deque, index, value); Py_END_CRITICAL_SECTION(); exit: @@ -428,7 +428,7 @@ deque_remove(dequeobject *deque, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque_remove_impl(deque, value); + return_value = deque_remove_impl((dequeobject *)deque, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -447,9 +447,9 @@ static PyObject * deque___reduce___impl(dequeobject *deque); static PyObject * -deque___reduce__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___reduce__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { - return deque___reduce___impl(deque); + return deque___reduce___impl((dequeobject *)deque); } PyDoc_STRVAR(deque_init__doc__, @@ -534,12 +534,12 @@ static PyObject * deque___sizeof___impl(dequeobject *deque); static PyObject * -deque___sizeof__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___sizeof__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(deque); - return_value = deque___sizeof___impl(deque); + return_value = deque___sizeof___impl((dequeobject *)deque); Py_END_CRITICAL_SECTION(); return return_value; @@ -558,9 +558,9 @@ static PyObject * deque___reversed___impl(dequeobject *deque); static PyObject * -deque___reversed__(dequeobject *deque, PyObject *Py_UNUSED(ignored)) +deque___reversed__(PyObject *deque, PyObject *Py_UNUSED(ignored)) { - return deque___reversed___impl(deque); + return deque___reversed___impl((dequeobject *)deque); } PyDoc_STRVAR(_collections__count_elements__doc__, @@ -630,4 +630,4 @@ tuplegetter_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=65f896fb13902f6d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2d89c39288fc7389 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_curses_panel.c.h b/Modules/clinic/_curses_panel.c.h index b6bff5274a3a91..6f4966825ec4bf 100644 --- a/Modules/clinic/_curses_panel.c.h +++ b/Modules/clinic/_curses_panel.c.h @@ -20,13 +20,13 @@ static PyObject * _curses_panel_panel_bottom_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_bottom(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_bottom(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "bottom() takes no arguments"); return NULL; } - return _curses_panel_panel_bottom_impl(self, cls); + return _curses_panel_panel_bottom_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_hide__doc__, @@ -44,13 +44,13 @@ static PyObject * _curses_panel_panel_hide_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_hide(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_hide(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "hide() takes no arguments"); return NULL; } - return _curses_panel_panel_hide_impl(self, cls); + return _curses_panel_panel_hide_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_show__doc__, @@ -66,13 +66,13 @@ static PyObject * _curses_panel_panel_show_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_show(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_show(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "show() takes no arguments"); return NULL; } - return _curses_panel_panel_show_impl(self, cls); + return _curses_panel_panel_show_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_top__doc__, @@ -88,13 +88,13 @@ static PyObject * _curses_panel_panel_top_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_top(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_top(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "top() takes no arguments"); return NULL; } - return _curses_panel_panel_top_impl(self, cls); + return _curses_panel_panel_top_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_panel_above__doc__, @@ -110,9 +110,9 @@ static PyObject * _curses_panel_panel_above_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_above(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_above(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_above_impl(self); + return _curses_panel_panel_above_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_below__doc__, @@ -128,9 +128,9 @@ static PyObject * _curses_panel_panel_below_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_below(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_below(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_below_impl(self); + return _curses_panel_panel_below_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_hidden__doc__, @@ -146,9 +146,9 @@ static PyObject * _curses_panel_panel_hidden_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_hidden(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_hidden(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_hidden_impl(self); + return _curses_panel_panel_hidden_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_move__doc__, @@ -165,7 +165,7 @@ _curses_panel_panel_move_impl(PyCursesPanelObject *self, PyTypeObject *cls, int y, int x); static PyObject * -_curses_panel_panel_move(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_move(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -198,7 +198,7 @@ _curses_panel_panel_move(PyCursesPanelObject *self, PyTypeObject *cls, PyObject if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_panel_panel_move_impl(self, cls, y, x); + return_value = _curses_panel_panel_move_impl((PyCursesPanelObject *)self, cls, y, x); exit: return return_value; @@ -217,9 +217,9 @@ static PyObject * _curses_panel_panel_window_impl(PyCursesPanelObject *self); static PyObject * -_curses_panel_panel_window(PyCursesPanelObject *self, PyObject *Py_UNUSED(ignored)) +_curses_panel_panel_window(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_panel_panel_window_impl(self); + return _curses_panel_panel_window_impl((PyCursesPanelObject *)self); } PyDoc_STRVAR(_curses_panel_panel_replace__doc__, @@ -237,7 +237,7 @@ _curses_panel_panel_replace_impl(PyCursesPanelObject *self, PyCursesWindowObject *win); static PyObject * -_curses_panel_panel_replace(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_replace(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -266,7 +266,7 @@ _curses_panel_panel_replace(PyCursesPanelObject *self, PyTypeObject *cls, PyObje goto exit; } win = (PyCursesWindowObject *)args[0]; - return_value = _curses_panel_panel_replace_impl(self, cls, win); + return_value = _curses_panel_panel_replace_impl((PyCursesPanelObject *)self, cls, win); exit: return return_value; @@ -286,7 +286,7 @@ _curses_panel_panel_set_userptr_impl(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *obj); static PyObject * -_curses_panel_panel_set_userptr(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_set_userptr(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -311,7 +311,7 @@ _curses_panel_panel_set_userptr(PyCursesPanelObject *self, PyTypeObject *cls, Py goto exit; } obj = args[0]; - return_value = _curses_panel_panel_set_userptr_impl(self, cls, obj); + return_value = _curses_panel_panel_set_userptr_impl((PyCursesPanelObject *)self, cls, obj); exit: return return_value; @@ -331,13 +331,13 @@ _curses_panel_panel_userptr_impl(PyCursesPanelObject *self, PyTypeObject *cls); static PyObject * -_curses_panel_panel_userptr(PyCursesPanelObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_curses_panel_panel_userptr(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "userptr() takes no arguments"); return NULL; } - return _curses_panel_panel_userptr_impl(self, cls); + return _curses_panel_panel_userptr_impl((PyCursesPanelObject *)self, cls); } PyDoc_STRVAR(_curses_panel_bottom_panel__doc__, @@ -424,4 +424,4 @@ _curses_panel_update_panels(PyObject *module, PyObject *Py_UNUSED(ignored)) { return _curses_panel_update_panels_impl(module); } -/*[clinic end generated code: output=298e49d54c0b14a0 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=36853ecb4a979814 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h index 524a114aba98bc..8291d5d635c79d 100644 --- a/Modules/clinic/_cursesmodule.c.h +++ b/Modules/clinic/_cursesmodule.c.h @@ -35,7 +35,7 @@ _curses_window_addch_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_addch(PyCursesWindowObject *self, PyObject *args) +_curses_window_addch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -74,7 +74,7 @@ _curses_window_addch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addch requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_addch_impl(self, group_left_1, y, x, ch, group_right_1, attr); + return_value = _curses_window_addch_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, group_right_1, attr); exit: return return_value; @@ -107,7 +107,7 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_addstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_addstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -146,7 +146,7 @@ _curses_window_addstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addstr requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_addstr_impl(self, group_left_1, y, x, str, group_right_1, attr); + return_value = _curses_window_addstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, group_right_1, attr); exit: return return_value; @@ -181,7 +181,7 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_addnstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_addnstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -221,7 +221,7 @@ _curses_window_addnstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.addnstr requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_addnstr_impl(self, group_left_1, y, x, str, n, group_right_1, attr); + return_value = _curses_window_addnstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, n, group_right_1, attr); exit: return return_value; @@ -245,7 +245,7 @@ static PyObject * _curses_window_bkgd_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_bkgd(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_bkgd(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -263,7 +263,7 @@ _curses_window_bkgd(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_ goto exit; } skip_optional: - return_value = _curses_window_bkgd_impl(self, ch, attr); + return_value = _curses_window_bkgd_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -282,7 +282,7 @@ static PyObject * _curses_window_attroff_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attroff(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attroff(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -291,7 +291,7 @@ _curses_window_attroff(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attroff_impl(self, attr); + return_value = _curses_window_attroff_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -310,7 +310,7 @@ static PyObject * _curses_window_attron_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attron(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attron(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -319,7 +319,7 @@ _curses_window_attron(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attron_impl(self, attr); + return_value = _curses_window_attron_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -338,7 +338,7 @@ static PyObject * _curses_window_attrset_impl(PyCursesWindowObject *self, long attr); static PyObject * -_curses_window_attrset(PyCursesWindowObject *self, PyObject *arg) +_curses_window_attrset(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; long attr; @@ -347,7 +347,7 @@ _curses_window_attrset(PyCursesWindowObject *self, PyObject *arg) if (attr == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_attrset_impl(self, attr); + return_value = _curses_window_attrset_impl((PyCursesWindowObject *)self, attr); exit: return return_value; @@ -372,7 +372,7 @@ _curses_window_bkgdset_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_bkgdset(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_bkgdset(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -390,7 +390,7 @@ _curses_window_bkgdset(PyCursesWindowObject *self, PyObject *const *args, Py_ssi goto exit; } skip_optional: - return_value = _curses_window_bkgdset_impl(self, ch, attr); + return_value = _curses_window_bkgdset_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -437,7 +437,7 @@ _curses_window_border_impl(PyCursesWindowObject *self, PyObject *ls, PyObject *br); static PyObject * -_curses_window_border(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_border(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ls = NULL; @@ -485,7 +485,7 @@ _curses_window_border(PyCursesWindowObject *self, PyObject *const *args, Py_ssiz } br = args[7]; skip_optional: - return_value = _curses_window_border_impl(self, ls, rs, ts, bs, tl, tr, bl, br); + return_value = _curses_window_border_impl((PyCursesWindowObject *)self, ls, rs, ts, bs, tl, tr, bl, br); exit: return return_value; @@ -511,7 +511,7 @@ _curses_window_box_impl(PyCursesWindowObject *self, int group_right_1, PyObject *verch, PyObject *horch); static PyObject * -_curses_window_box(PyCursesWindowObject *self, PyObject *args) +_curses_window_box(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -531,7 +531,7 @@ _curses_window_box(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.box requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_box_impl(self, group_right_1, verch, horch); + return_value = _curses_window_box_impl((PyCursesWindowObject *)self, group_right_1, verch, horch); exit: return return_value; @@ -554,7 +554,7 @@ _curses_window_delch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_delch(PyCursesWindowObject *self, PyObject *args) +_curses_window_delch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -574,7 +574,7 @@ _curses_window_delch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.delch requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_delch_impl(self, group_right_1, y, x); + return_value = _curses_window_delch_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -605,7 +605,7 @@ _curses_window_derwin_impl(PyCursesWindowObject *self, int group_left_1, int nlines, int ncols, int begin_y, int begin_x); static PyObject * -_curses_window_derwin(PyCursesWindowObject *self, PyObject *args) +_curses_window_derwin(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -630,7 +630,7 @@ _curses_window_derwin(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.derwin requires 2 to 4 arguments"); goto exit; } - return_value = _curses_window_derwin_impl(self, group_left_1, nlines, ncols, begin_y, begin_x); + return_value = _curses_window_derwin_impl((PyCursesWindowObject *)self, group_left_1, nlines, ncols, begin_y, begin_x); exit: return return_value; @@ -655,7 +655,7 @@ _curses_window_echochar_impl(PyCursesWindowObject *self, PyObject *ch, long attr); static PyObject * -_curses_window_echochar(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_echochar(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *ch; @@ -673,7 +673,7 @@ _curses_window_echochar(PyCursesWindowObject *self, PyObject *const *args, Py_ss goto exit; } skip_optional: - return_value = _curses_window_echochar_impl(self, ch, attr); + return_value = _curses_window_echochar_impl((PyCursesWindowObject *)self, ch, attr); exit: return return_value; @@ -699,7 +699,7 @@ static PyObject * _curses_window_enclose_impl(PyCursesWindowObject *self, int y, int x); static PyObject * -_curses_window_enclose(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_enclose(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int y; @@ -716,7 +716,7 @@ _curses_window_enclose(PyCursesWindowObject *self, PyObject *const *args, Py_ssi if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_enclose_impl(self, y, x); + return_value = _curses_window_enclose_impl((PyCursesWindowObject *)self, y, x); exit: return return_value; @@ -737,12 +737,12 @@ static long _curses_window_getbkgd_impl(PyCursesWindowObject *self); static PyObject * -_curses_window_getbkgd(PyCursesWindowObject *self, PyObject *Py_UNUSED(ignored)) +_curses_window_getbkgd(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; long _return_value; - _return_value = _curses_window_getbkgd_impl(self); + _return_value = _curses_window_getbkgd_impl((PyCursesWindowObject *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -773,7 +773,7 @@ _curses_window_getch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_getch(PyCursesWindowObject *self, PyObject *args) +_curses_window_getch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -794,7 +794,7 @@ _curses_window_getch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.getch requires 0 to 2 arguments"); goto exit; } - _return_value = _curses_window_getch_impl(self, group_right_1, y, x); + _return_value = _curses_window_getch_impl((PyCursesWindowObject *)self, group_right_1, y, x); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -825,7 +825,7 @@ _curses_window_getkey_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_getkey(PyCursesWindowObject *self, PyObject *args) +_curses_window_getkey(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -845,7 +845,7 @@ _curses_window_getkey(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.getkey requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_getkey_impl(self, group_right_1, y, x); + return_value = _curses_window_getkey_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -873,7 +873,7 @@ _curses_window_get_wch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_get_wch(PyCursesWindowObject *self, PyObject *args) +_curses_window_get_wch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -893,7 +893,7 @@ _curses_window_get_wch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.get_wch requires 0 to 2 arguments"); goto exit; } - return_value = _curses_window_get_wch_impl(self, group_right_1, y, x); + return_value = _curses_window_get_wch_impl((PyCursesWindowObject *)self, group_right_1, y, x); exit: return return_value; @@ -925,7 +925,7 @@ _curses_window_hline_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_hline(PyCursesWindowObject *self, PyObject *args) +_curses_window_hline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -965,7 +965,7 @@ _curses_window_hline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.hline requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_hline_impl(self, group_left_1, y, x, ch, n, group_right_1, attr); + return_value = _curses_window_hline_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, n, group_right_1, attr); exit: return return_value; @@ -996,7 +996,7 @@ _curses_window_insch_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_insch(PyCursesWindowObject *self, PyObject *args) +_curses_window_insch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1035,7 +1035,7 @@ _curses_window_insch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insch requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_insch_impl(self, group_left_1, y, x, ch, group_right_1, attr); + return_value = _curses_window_insch_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, group_right_1, attr); exit: return return_value; @@ -1060,7 +1060,7 @@ _curses_window_inch_impl(PyCursesWindowObject *self, int group_right_1, int y, int x); static PyObject * -_curses_window_inch(PyCursesWindowObject *self, PyObject *args) +_curses_window_inch(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1081,7 +1081,7 @@ _curses_window_inch(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.inch requires 0 to 2 arguments"); goto exit; } - _return_value = _curses_window_inch_impl(self, group_right_1, y, x); + _return_value = _curses_window_inch_impl((PyCursesWindowObject *)self, group_right_1, y, x); if ((_return_value == (unsigned long)-1) && PyErr_Occurred()) { goto exit; } @@ -1119,7 +1119,7 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1, long attr); static PyObject * -_curses_window_insstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_insstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1158,7 +1158,7 @@ _curses_window_insstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insstr requires 1 to 4 arguments"); goto exit; } - return_value = _curses_window_insstr_impl(self, group_left_1, y, x, str, group_right_1, attr); + return_value = _curses_window_insstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, group_right_1, attr); exit: return return_value; @@ -1195,7 +1195,7 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_insnstr(PyCursesWindowObject *self, PyObject *args) +_curses_window_insnstr(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1235,7 +1235,7 @@ _curses_window_insnstr(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.insnstr requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_insnstr_impl(self, group_left_1, y, x, str, n, group_right_1, attr); + return_value = _curses_window_insnstr_impl((PyCursesWindowObject *)self, group_left_1, y, x, str, n, group_right_1, attr); exit: return return_value; @@ -1259,7 +1259,7 @@ static PyObject * _curses_window_is_linetouched_impl(PyCursesWindowObject *self, int line); static PyObject * -_curses_window_is_linetouched(PyCursesWindowObject *self, PyObject *arg) +_curses_window_is_linetouched(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int line; @@ -1268,7 +1268,7 @@ _curses_window_is_linetouched(PyCursesWindowObject *self, PyObject *arg) if (line == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_is_linetouched_impl(self, line); + return_value = _curses_window_is_linetouched_impl((PyCursesWindowObject *)self, line); exit: return return_value; @@ -1294,7 +1294,7 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self, int smaxcol); static PyObject * -_curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *args) +_curses_window_noutrefresh(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1318,7 +1318,7 @@ _curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.noutrefresh requires 0 to 6 arguments"); goto exit; } - return_value = _curses_window_noutrefresh_impl(self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); + return_value = _curses_window_noutrefresh_impl((PyCursesWindowObject *)self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); exit: return return_value; @@ -1345,9 +1345,9 @@ static PyObject * _curses_window_noutrefresh_impl(PyCursesWindowObject *self); static PyObject * -_curses_window_noutrefresh(PyCursesWindowObject *self, PyObject *Py_UNUSED(ignored)) +_curses_window_noutrefresh(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _curses_window_noutrefresh_impl(self); + return _curses_window_noutrefresh_impl((PyCursesWindowObject *)self); } #endif /* !defined(py_is_pad) */ @@ -1375,7 +1375,7 @@ _curses_window_overlay_impl(PyCursesWindowObject *self, int dmincol, int dmaxrow, int dmaxcol); static PyObject * -_curses_window_overlay(PyCursesWindowObject *self, PyObject *args) +_curses_window_overlay(PyObject *self, PyObject *args) { PyObject *return_value = NULL; PyCursesWindowObject *destwin; @@ -1403,7 +1403,7 @@ _curses_window_overlay(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.overlay requires 1 to 7 arguments"); goto exit; } - return_value = _curses_window_overlay_impl(self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); + return_value = _curses_window_overlay_impl((PyCursesWindowObject *)self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); exit: return return_value; @@ -1434,7 +1434,7 @@ _curses_window_overwrite_impl(PyCursesWindowObject *self, int dmaxcol); static PyObject * -_curses_window_overwrite(PyCursesWindowObject *self, PyObject *args) +_curses_window_overwrite(PyObject *self, PyObject *args) { PyObject *return_value = NULL; PyCursesWindowObject *destwin; @@ -1462,7 +1462,7 @@ _curses_window_overwrite(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.overwrite requires 1 to 7 arguments"); goto exit; } - return_value = _curses_window_overwrite_impl(self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); + return_value = _curses_window_overwrite_impl((PyCursesWindowObject *)self, destwin, group_right_1, sminrow, smincol, dminrow, dmincol, dmaxrow, dmaxcol); exit: return return_value; @@ -1499,7 +1499,7 @@ static PyObject * _curses_window_redrawln_impl(PyCursesWindowObject *self, int beg, int num); static PyObject * -_curses_window_redrawln(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_redrawln(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int beg; @@ -1516,7 +1516,7 @@ _curses_window_redrawln(PyCursesWindowObject *self, PyObject *const *args, Py_ss if (num == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_redrawln_impl(self, beg, num); + return_value = _curses_window_redrawln_impl((PyCursesWindowObject *)self, beg, num); exit: return return_value; @@ -1547,7 +1547,7 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1, int smincol, int smaxrow, int smaxcol); static PyObject * -_curses_window_refresh(PyCursesWindowObject *self, PyObject *args) +_curses_window_refresh(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1571,7 +1571,7 @@ _curses_window_refresh(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.refresh requires 0 to 6 arguments"); goto exit; } - return_value = _curses_window_refresh_impl(self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); + return_value = _curses_window_refresh_impl((PyCursesWindowObject *)self, group_right_1, pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol); exit: return return_value; @@ -1598,7 +1598,7 @@ _curses_window_setscrreg_impl(PyCursesWindowObject *self, int top, int bottom); static PyObject * -_curses_window_setscrreg(PyCursesWindowObject *self, PyObject *const *args, Py_ssize_t nargs) +_curses_window_setscrreg(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int top; @@ -1615,7 +1615,7 @@ _curses_window_setscrreg(PyCursesWindowObject *self, PyObject *const *args, Py_s if (bottom == -1 && PyErr_Occurred()) { goto exit; } - return_value = _curses_window_setscrreg_impl(self, top, bottom); + return_value = _curses_window_setscrreg_impl((PyCursesWindowObject *)self, top, bottom); exit: return return_value; @@ -1645,7 +1645,7 @@ _curses_window_subwin_impl(PyCursesWindowObject *self, int group_left_1, int nlines, int ncols, int begin_y, int begin_x); static PyObject * -_curses_window_subwin(PyCursesWindowObject *self, PyObject *args) +_curses_window_subwin(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1670,7 +1670,7 @@ _curses_window_subwin(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.subwin requires 2 to 4 arguments"); goto exit; } - return_value = _curses_window_subwin_impl(self, group_left_1, nlines, ncols, begin_y, begin_x); + return_value = _curses_window_subwin_impl((PyCursesWindowObject *)self, group_left_1, nlines, ncols, begin_y, begin_x); exit: return return_value; @@ -1693,7 +1693,7 @@ _curses_window_scroll_impl(PyCursesWindowObject *self, int group_right_1, int lines); static PyObject * -_curses_window_scroll(PyCursesWindowObject *self, PyObject *args) +_curses_window_scroll(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_right_1 = 0; @@ -1712,7 +1712,7 @@ _curses_window_scroll(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.scroll requires 0 to 1 arguments"); goto exit; } - return_value = _curses_window_scroll_impl(self, group_right_1, lines); + return_value = _curses_window_scroll_impl((PyCursesWindowObject *)self, group_right_1, lines); exit: return return_value; @@ -1733,7 +1733,7 @@ _curses_window_touchline_impl(PyCursesWindowObject *self, int start, int count, int group_right_1, int changed); static PyObject * -_curses_window_touchline(PyCursesWindowObject *self, PyObject *args) +_curses_window_touchline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int start; @@ -1757,7 +1757,7 @@ _curses_window_touchline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.touchline requires 2 to 3 arguments"); goto exit; } - return_value = _curses_window_touchline_impl(self, start, count, group_right_1, changed); + return_value = _curses_window_touchline_impl((PyCursesWindowObject *)self, start, count, group_right_1, changed); exit: return return_value; @@ -1787,7 +1787,7 @@ _curses_window_vline_impl(PyCursesWindowObject *self, int group_left_1, int group_right_1, long attr); static PyObject * -_curses_window_vline(PyCursesWindowObject *self, PyObject *args) +_curses_window_vline(PyObject *self, PyObject *args) { PyObject *return_value = NULL; int group_left_1 = 0; @@ -1827,7 +1827,7 @@ _curses_window_vline(PyCursesWindowObject *self, PyObject *args) PyErr_SetString(PyExc_TypeError, "_curses.window.vline requires 2 to 5 arguments"); goto exit; } - return_value = _curses_window_vline_impl(self, group_left_1, y, x, ch, n, group_right_1, attr); + return_value = _curses_window_vline_impl((PyCursesWindowObject *)self, group_left_1, y, x, ch, n, group_right_1, attr); exit: return return_value; @@ -4379,4 +4379,4 @@ _curses_has_extended_color_support(PyObject *module, PyObject *Py_UNUSED(ignored #ifndef _CURSES_USE_DEFAULT_COLORS_METHODDEF #define _CURSES_USE_DEFAULT_COLORS_METHODDEF #endif /* !defined(_CURSES_USE_DEFAULT_COLORS_METHODDEF) */ -/*[clinic end generated code: output=26fe38c09ff8ca44 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c4211865ed96c2af input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_datetimemodule.c.h b/Modules/clinic/_datetimemodule.c.h index 72c230fc8aee68..8f33c9e7d4eae5 100644 --- a/Modules/clinic/_datetimemodule.c.h +++ b/Modules/clinic/_datetimemodule.c.h @@ -97,7 +97,7 @@ datetime_date_replace_impl(PyDateTime_Date *self, int year, int month, int day); static PyObject * -datetime_date_replace(PyDateTime_Date *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_date_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -162,7 +162,7 @@ datetime_date_replace(PyDateTime_Date *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = datetime_date_replace_impl(self, year, month, day); + return_value = datetime_date_replace_impl((PyDateTime_Date *)self, year, month, day); exit: return return_value; @@ -184,7 +184,7 @@ datetime_time_replace_impl(PyDateTime_Time *self, int hour, int minute, int fold); static PyObject * -datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_time_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -218,7 +218,7 @@ datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t n int minute = TIME_GET_MINUTE(self); int second = TIME_GET_SECOND(self); int microsecond = TIME_GET_MICROSECOND(self); - PyObject *tzinfo = HASTZINFO(self) ? self->tzinfo : Py_None; + PyObject *tzinfo = HASTZINFO(self) ? ((PyDateTime_Time *)self)->tzinfo : Py_None; int fold = TIME_GET_FOLD(self); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, @@ -280,7 +280,7 @@ datetime_time_replace(PyDateTime_Time *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_kwonly: - return_value = datetime_time_replace_impl(self, hour, minute, second, microsecond, tzinfo, fold); + return_value = datetime_time_replace_impl((PyDateTime_Time *)self, hour, minute, second, microsecond, tzinfo, fold); exit: return return_value; @@ -370,7 +370,7 @@ datetime_datetime_replace_impl(PyDateTime_DateTime *self, int year, int fold); static PyObject * -datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +datetime_datetime_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -407,7 +407,7 @@ datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_s int minute = DATE_GET_MINUTE(self); int second = DATE_GET_SECOND(self); int microsecond = DATE_GET_MICROSECOND(self); - PyObject *tzinfo = HASTZINFO(self) ? self->tzinfo : Py_None; + PyObject *tzinfo = HASTZINFO(self) ? ((PyDateTime_DateTime *)self)->tzinfo : Py_None; int fold = DATE_GET_FOLD(self); args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, @@ -496,9 +496,9 @@ datetime_datetime_replace(PyDateTime_DateTime *self, PyObject *const *args, Py_s goto exit; } skip_optional_kwonly: - return_value = datetime_datetime_replace_impl(self, year, month, day, hour, minute, second, microsecond, tzinfo, fold); + return_value = datetime_datetime_replace_impl((PyDateTime_DateTime *)self, year, month, day, hour, minute, second, microsecond, tzinfo, fold); exit: return return_value; } -/*[clinic end generated code: output=203217a61ea14171 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8acf62fbc7328f79 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index 4379b433db3738..5e503194408776 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -20,9 +20,9 @@ static PyObject * _dbm_dbm_close_impl(dbmobject *self); static PyObject * -_dbm_dbm_close(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_dbm_dbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _dbm_dbm_close_impl(self); + return _dbm_dbm_close_impl((dbmobject *)self); } PyDoc_STRVAR(_dbm_dbm_keys__doc__, @@ -38,13 +38,13 @@ static PyObject * _dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls); static PyObject * -_dbm_dbm_keys(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); return NULL; } - return _dbm_dbm_keys_impl(self, cls); + return _dbm_dbm_keys_impl((dbmobject *)self, cls); } PyDoc_STRVAR(_dbm_dbm_get__doc__, @@ -61,7 +61,7 @@ _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_get(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -85,7 +85,7 @@ _dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_get_impl(self, cls, key, key_length, default_value); + return_value = _dbm_dbm_get_impl((dbmobject *)self, cls, key, key_length, default_value); exit: return return_value; @@ -107,7 +107,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_setdefault(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -131,7 +131,7 @@ _dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, P &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_setdefault_impl(self, cls, key, key_length, default_value); + return_value = _dbm_dbm_setdefault_impl((dbmobject *)self, cls, key, key_length, default_value); exit: return return_value; @@ -150,13 +150,13 @@ static PyObject * _dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls); static PyObject * -_dbm_dbm_clear(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_dbm_dbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); return NULL; } - return _dbm_dbm_clear_impl(self, cls); + return _dbm_dbm_clear_impl((dbmobject *)self, cls); } PyDoc_STRVAR(dbmopen__doc__, @@ -221,4 +221,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=f7d9a87d80a64278 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=3b456118f231b160 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_elementtree.c.h b/Modules/clinic/_elementtree.c.h index 07045e72040664..78391887b615cf 100644 --- a/Modules/clinic/_elementtree.c.h +++ b/Modules/clinic/_elementtree.c.h @@ -22,7 +22,7 @@ _elementtree_Element_append_impl(ElementObject *self, PyTypeObject *cls, PyObject *subelement); static PyObject * -_elementtree_Element_append(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_append(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -51,7 +51,7 @@ _elementtree_Element_append(ElementObject *self, PyTypeObject *cls, PyObject *co goto exit; } subelement = args[0]; - return_value = _elementtree_Element_append_impl(self, cls, subelement); + return_value = _elementtree_Element_append_impl((ElementObject *)self, cls, subelement); exit: return return_value; @@ -69,9 +69,9 @@ static PyObject * _elementtree_Element_clear_impl(ElementObject *self); static PyObject * -_elementtree_Element_clear(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_clear_impl(self); + return _elementtree_Element_clear_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element___copy____doc__, @@ -86,13 +86,13 @@ static PyObject * _elementtree_Element___copy___impl(ElementObject *self, PyTypeObject *cls); static PyObject * -_elementtree_Element___copy__(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return _elementtree_Element___copy___impl(self, cls); + return _elementtree_Element___copy___impl((ElementObject *)self, cls); } PyDoc_STRVAR(_elementtree_Element___deepcopy____doc__, @@ -107,7 +107,7 @@ static PyObject * _elementtree_Element___deepcopy___impl(ElementObject *self, PyObject *memo); static PyObject * -_elementtree_Element___deepcopy__(ElementObject *self, PyObject *arg) +_elementtree_Element___deepcopy__(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *memo; @@ -117,7 +117,7 @@ _elementtree_Element___deepcopy__(ElementObject *self, PyObject *arg) goto exit; } memo = arg; - return_value = _elementtree_Element___deepcopy___impl(self, memo); + return_value = _elementtree_Element___deepcopy___impl((ElementObject *)self, memo); exit: return return_value; @@ -135,12 +135,12 @@ static size_t _elementtree_Element___sizeof___impl(ElementObject *self); static PyObject * -_elementtree_Element___sizeof__(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _elementtree_Element___sizeof___impl(self); + _return_value = _elementtree_Element___sizeof___impl((ElementObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -162,9 +162,9 @@ static PyObject * _elementtree_Element___getstate___impl(ElementObject *self); static PyObject * -_elementtree_Element___getstate__(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element___getstate__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element___getstate___impl(self); + return _elementtree_Element___getstate___impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element___setstate____doc__, @@ -180,7 +180,7 @@ _elementtree_Element___setstate___impl(ElementObject *self, PyTypeObject *cls, PyObject *state); static PyObject * -_elementtree_Element___setstate__(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element___setstate__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -205,7 +205,7 @@ _elementtree_Element___setstate__(ElementObject *self, PyTypeObject *cls, PyObje goto exit; } state = args[0]; - return_value = _elementtree_Element___setstate___impl(self, cls, state); + return_value = _elementtree_Element___setstate___impl((ElementObject *)self, cls, state); exit: return return_value; @@ -224,7 +224,7 @@ _elementtree_Element_extend_impl(ElementObject *self, PyTypeObject *cls, PyObject *elements); static PyObject * -_elementtree_Element_extend(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_extend(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -249,7 +249,7 @@ _elementtree_Element_extend(ElementObject *self, PyTypeObject *cls, PyObject *co goto exit; } elements = args[0]; - return_value = _elementtree_Element_extend_impl(self, cls, elements); + return_value = _elementtree_Element_extend_impl((ElementObject *)self, cls, elements); exit: return return_value; @@ -268,7 +268,7 @@ _elementtree_Element_find_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_find(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_find(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -312,7 +312,7 @@ _elementtree_Element_find(ElementObject *self, PyTypeObject *cls, PyObject *cons } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_find_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_find_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -332,7 +332,7 @@ _elementtree_Element_findtext_impl(ElementObject *self, PyTypeObject *cls, PyObject *namespaces); static PyObject * -_elementtree_Element_findtext(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_findtext(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -383,7 +383,7 @@ _elementtree_Element_findtext(ElementObject *self, PyTypeObject *cls, PyObject * } namespaces = args[2]; skip_optional_pos: - return_value = _elementtree_Element_findtext_impl(self, cls, path, default_value, namespaces); + return_value = _elementtree_Element_findtext_impl((ElementObject *)self, cls, path, default_value, namespaces); exit: return return_value; @@ -402,7 +402,7 @@ _elementtree_Element_findall_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_findall(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_findall(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -446,7 +446,7 @@ _elementtree_Element_findall(ElementObject *self, PyTypeObject *cls, PyObject *c } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_findall_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_findall_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -465,7 +465,7 @@ _elementtree_Element_iterfind_impl(ElementObject *self, PyTypeObject *cls, PyObject *path, PyObject *namespaces); static PyObject * -_elementtree_Element_iterfind(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_iterfind(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -509,7 +509,7 @@ _elementtree_Element_iterfind(ElementObject *self, PyTypeObject *cls, PyObject * } namespaces = args[1]; skip_optional_pos: - return_value = _elementtree_Element_iterfind_impl(self, cls, path, namespaces); + return_value = _elementtree_Element_iterfind_impl((ElementObject *)self, cls, path, namespaces); exit: return return_value; @@ -528,7 +528,7 @@ _elementtree_Element_get_impl(ElementObject *self, PyObject *key, PyObject *default_value); static PyObject * -_elementtree_Element_get(ElementObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -572,7 +572,7 @@ _elementtree_Element_get(ElementObject *self, PyObject *const *args, Py_ssize_t } default_value = args[1]; skip_optional_pos: - return_value = _elementtree_Element_get_impl(self, key, default_value); + return_value = _elementtree_Element_get_impl((ElementObject *)self, key, default_value); exit: return return_value; @@ -591,7 +591,7 @@ _elementtree_Element_iter_impl(ElementObject *self, PyTypeObject *cls, PyObject *tag); static PyObject * -_elementtree_Element_iter(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_iter(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -633,7 +633,7 @@ _elementtree_Element_iter(ElementObject *self, PyTypeObject *cls, PyObject *cons } tag = args[0]; skip_optional_pos: - return_value = _elementtree_Element_iter_impl(self, cls, tag); + return_value = _elementtree_Element_iter_impl((ElementObject *)self, cls, tag); exit: return return_value; @@ -651,13 +651,13 @@ static PyObject * _elementtree_Element_itertext_impl(ElementObject *self, PyTypeObject *cls); static PyObject * -_elementtree_Element_itertext(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_itertext(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "itertext() takes no arguments"); return NULL; } - return _elementtree_Element_itertext_impl(self, cls); + return _elementtree_Element_itertext_impl((ElementObject *)self, cls); } PyDoc_STRVAR(_elementtree_Element_insert__doc__, @@ -673,7 +673,7 @@ _elementtree_Element_insert_impl(ElementObject *self, Py_ssize_t index, PyObject *subelement); static PyObject * -_elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_Element_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -699,7 +699,7 @@ _elementtree_Element_insert(ElementObject *self, PyObject *const *args, Py_ssize goto exit; } subelement = args[1]; - return_value = _elementtree_Element_insert_impl(self, index, subelement); + return_value = _elementtree_Element_insert_impl((ElementObject *)self, index, subelement); exit: return return_value; @@ -717,9 +717,9 @@ static PyObject * _elementtree_Element_items_impl(ElementObject *self); static PyObject * -_elementtree_Element_items(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_items_impl(self); + return _elementtree_Element_items_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element_keys__doc__, @@ -734,9 +734,9 @@ static PyObject * _elementtree_Element_keys_impl(ElementObject *self); static PyObject * -_elementtree_Element_keys(ElementObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_Element_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_Element_keys_impl(self); + return _elementtree_Element_keys_impl((ElementObject *)self); } PyDoc_STRVAR(_elementtree_Element_makeelement__doc__, @@ -752,7 +752,7 @@ _elementtree_Element_makeelement_impl(ElementObject *self, PyTypeObject *cls, PyObject *tag, PyObject *attrib); static PyObject * -_elementtree_Element_makeelement(ElementObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_elementtree_Element_makeelement(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -783,7 +783,7 @@ _elementtree_Element_makeelement(ElementObject *self, PyTypeObject *cls, PyObjec goto exit; } attrib = args[1]; - return_value = _elementtree_Element_makeelement_impl(self, cls, tag, attrib); + return_value = _elementtree_Element_makeelement_impl((ElementObject *)self, cls, tag, attrib); exit: return return_value; @@ -801,7 +801,7 @@ static PyObject * _elementtree_Element_remove_impl(ElementObject *self, PyObject *subelement); static PyObject * -_elementtree_Element_remove(ElementObject *self, PyObject *arg) +_elementtree_Element_remove(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *subelement; @@ -811,7 +811,7 @@ _elementtree_Element_remove(ElementObject *self, PyObject *arg) goto exit; } subelement = arg; - return_value = _elementtree_Element_remove_impl(self, subelement); + return_value = _elementtree_Element_remove_impl((ElementObject *)self, subelement); exit: return return_value; @@ -830,7 +830,7 @@ _elementtree_Element_set_impl(ElementObject *self, PyObject *key, PyObject *value); static PyObject * -_elementtree_Element_set(ElementObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_Element_set(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -841,7 +841,7 @@ _elementtree_Element_set(ElementObject *self, PyObject *const *args, Py_ssize_t } key = args[0]; value = args[1]; - return_value = _elementtree_Element_set_impl(self, key, value); + return_value = _elementtree_Element_set_impl((ElementObject *)self, key, value); exit: return return_value; @@ -1013,7 +1013,7 @@ _elementtree_TreeBuilder_pi_impl(TreeBuilderObject *self, PyObject *target, PyObject *text); static PyObject * -_elementtree_TreeBuilder_pi(TreeBuilderObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_TreeBuilder_pi(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *target; @@ -1028,7 +1028,7 @@ _elementtree_TreeBuilder_pi(TreeBuilderObject *self, PyObject *const *args, Py_s } text = args[1]; skip_optional: - return_value = _elementtree_TreeBuilder_pi_impl(self, target, text); + return_value = _elementtree_TreeBuilder_pi_impl((TreeBuilderObject *)self, target, text); exit: return return_value; @@ -1046,9 +1046,9 @@ static PyObject * _elementtree_TreeBuilder_close_impl(TreeBuilderObject *self); static PyObject * -_elementtree_TreeBuilder_close(TreeBuilderObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_TreeBuilder_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_TreeBuilder_close_impl(self); + return _elementtree_TreeBuilder_close_impl((TreeBuilderObject *)self); } PyDoc_STRVAR(_elementtree_TreeBuilder_start__doc__, @@ -1064,7 +1064,7 @@ _elementtree_TreeBuilder_start_impl(TreeBuilderObject *self, PyObject *tag, PyObject *attrs); static PyObject * -_elementtree_TreeBuilder_start(TreeBuilderObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_TreeBuilder_start(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *tag; @@ -1079,7 +1079,7 @@ _elementtree_TreeBuilder_start(TreeBuilderObject *self, PyObject *const *args, P goto exit; } attrs = args[1]; - return_value = _elementtree_TreeBuilder_start_impl(self, tag, attrs); + return_value = _elementtree_TreeBuilder_start_impl((TreeBuilderObject *)self, tag, attrs); exit: return return_value; @@ -1176,9 +1176,9 @@ static PyObject * _elementtree_XMLParser_close_impl(XMLParserObject *self); static PyObject * -_elementtree_XMLParser_close(XMLParserObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_XMLParser_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_XMLParser_close_impl(self); + return _elementtree_XMLParser_close_impl((XMLParserObject *)self); } PyDoc_STRVAR(_elementtree_XMLParser_flush__doc__, @@ -1193,9 +1193,9 @@ static PyObject * _elementtree_XMLParser_flush_impl(XMLParserObject *self); static PyObject * -_elementtree_XMLParser_flush(XMLParserObject *self, PyObject *Py_UNUSED(ignored)) +_elementtree_XMLParser_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _elementtree_XMLParser_flush_impl(self); + return _elementtree_XMLParser_flush_impl((XMLParserObject *)self); } PyDoc_STRVAR(_elementtree_XMLParser_feed__doc__, @@ -1228,7 +1228,7 @@ _elementtree_XMLParser__setevents_impl(XMLParserObject *self, PyObject *events_to_report); static PyObject * -_elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, Py_ssize_t nargs) +_elementtree_XMLParser__setevents(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *events_queue; @@ -1243,9 +1243,9 @@ _elementtree_XMLParser__setevents(XMLParserObject *self, PyObject *const *args, } events_to_report = args[1]; skip_optional: - return_value = _elementtree_XMLParser__setevents_impl(self, events_queue, events_to_report); + return_value = _elementtree_XMLParser__setevents_impl((XMLParserObject *)self, events_queue, events_to_report); exit: return return_value; } -/*[clinic end generated code: output=b713bf59fd0fef9b input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e5c758958f14f102 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index bbf4365114c0aa..00950f18e53541 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -20,7 +20,7 @@ static PyObject * _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -35,7 +35,7 @@ _gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = _gdbm_gdbm_get_impl(self, key, default_value); + return_value = _gdbm_gdbm_get_impl((gdbmobject *)self, key, default_value); exit: return return_value; @@ -55,7 +55,7 @@ _gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -70,7 +70,7 @@ _gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = _gdbm_gdbm_setdefault_impl(self, key, default_value); + return_value = _gdbm_gdbm_setdefault_impl((gdbmobject *)self, key, default_value); exit: return return_value; @@ -89,9 +89,9 @@ static PyObject * _gdbm_gdbm_close_impl(gdbmobject *self); static PyObject * -_gdbm_gdbm_close(gdbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _gdbm_gdbm_close_impl(self); + return _gdbm_gdbm_close_impl((gdbmobject *)self); } PyDoc_STRVAR(_gdbm_gdbm_keys__doc__, @@ -107,13 +107,13 @@ static PyObject * _gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_keys(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); return NULL; } - return _gdbm_gdbm_keys_impl(self, cls); + return _gdbm_gdbm_keys_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, @@ -133,13 +133,13 @@ static PyObject * _gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_firstkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_firstkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "firstkey() takes no arguments"); return NULL; } - return _gdbm_gdbm_firstkey_impl(self, cls); + return _gdbm_gdbm_firstkey_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, @@ -164,7 +164,7 @@ _gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length); static PyObject * -_gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_nextkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -187,7 +187,7 @@ _gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, P &key, &key_length)) { goto exit; } - return_value = _gdbm_gdbm_nextkey_impl(self, cls, key, key_length); + return_value = _gdbm_gdbm_nextkey_impl((gdbmobject *)self, cls, key, key_length); exit: return return_value; @@ -212,13 +212,13 @@ static PyObject * _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_reorganize(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_reorganize(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "reorganize() takes no arguments"); return NULL; } - return _gdbm_gdbm_reorganize_impl(self, cls); + return _gdbm_gdbm_reorganize_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, @@ -237,13 +237,13 @@ static PyObject * _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_sync(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "sync() takes no arguments"); return NULL; } - return _gdbm_gdbm_sync_impl(self, cls); + return _gdbm_gdbm_sync_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(_gdbm_gdbm_clear__doc__, @@ -259,13 +259,13 @@ static PyObject * _gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_clear(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_gdbm_gdbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); return NULL; } - return _gdbm_gdbm_clear_impl(self, cls); + return _gdbm_gdbm_clear_impl((gdbmobject *)self, cls); } PyDoc_STRVAR(dbmopen__doc__, @@ -343,4 +343,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=07bdeb4a8ecb328e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d974cb39e4ee5d67 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h index f54f065f7d2b71..d219b80b791a66 100644 --- a/Modules/clinic/_hashopenssl.c.h +++ b/Modules/clinic/_hashopenssl.c.h @@ -22,9 +22,9 @@ static PyObject * EVP_copy_impl(EVPobject *self); static PyObject * -EVP_copy(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_copy_impl(self); + return EVP_copy_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_digest__doc__, @@ -40,9 +40,9 @@ static PyObject * EVP_digest_impl(EVPobject *self); static PyObject * -EVP_digest(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_digest_impl(self); + return EVP_digest_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_hexdigest__doc__, @@ -58,9 +58,9 @@ static PyObject * EVP_hexdigest_impl(EVPobject *self); static PyObject * -EVP_hexdigest(EVPobject *self, PyObject *Py_UNUSED(ignored)) +EVP_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EVP_hexdigest_impl(self); + return EVP_hexdigest_impl((EVPobject *)self); } PyDoc_STRVAR(EVP_update__doc__, @@ -87,7 +87,7 @@ static PyObject * EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length); static PyObject * -EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +EVPXOF_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -135,7 +135,7 @@ EVPXOF_digest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } length = ival; } - return_value = EVPXOF_digest_impl(self, length); + return_value = EVPXOF_digest_impl((EVPobject *)self, length); exit: return return_value; @@ -158,7 +158,7 @@ static PyObject * EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length); static PyObject * -EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +EVPXOF_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -206,7 +206,7 @@ EVPXOF_hexdigest(EVPobject *self, PyObject *const *args, Py_ssize_t nargs, PyObj } length = ival; } - return_value = EVPXOF_hexdigest_impl(self, length); + return_value = EVPXOF_hexdigest_impl((EVPobject *)self, length); exit: return return_value; @@ -1634,9 +1634,9 @@ static PyObject * _hashlib_HMAC_copy_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_copy(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_copy_impl(self); + return _hashlib_HMAC_copy_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_HMAC_update__doc__, @@ -1652,7 +1652,7 @@ static PyObject * _hashlib_HMAC_update_impl(HMACobject *self, PyObject *msg); static PyObject * -_hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_hashlib_HMAC_update(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1689,7 +1689,7 @@ _hashlib_HMAC_update(HMACobject *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } msg = args[0]; - return_value = _hashlib_HMAC_update_impl(self, msg); + return_value = _hashlib_HMAC_update_impl((HMACobject *)self, msg); exit: return return_value; @@ -1708,9 +1708,9 @@ static PyObject * _hashlib_HMAC_digest_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_digest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_digest_impl(self); + return _hashlib_HMAC_digest_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_HMAC_hexdigest__doc__, @@ -1729,9 +1729,9 @@ static PyObject * _hashlib_HMAC_hexdigest_impl(HMACobject *self); static PyObject * -_hashlib_HMAC_hexdigest(HMACobject *self, PyObject *Py_UNUSED(ignored)) +_hashlib_HMAC_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _hashlib_HMAC_hexdigest_impl(self); + return _hashlib_HMAC_hexdigest_impl((HMACobject *)self); } PyDoc_STRVAR(_hashlib_get_fips_mode__doc__, @@ -1844,4 +1844,4 @@ _hashlib_compare_digest(PyObject *module, PyObject *const *args, Py_ssize_t narg #ifndef _HASHLIB_SCRYPT_METHODDEF #define _HASHLIB_SCRYPT_METHODDEF #endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */ -/*[clinic end generated code: output=c3ef67e4a573cc7a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=811a8b50beae1018 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lsprof.c.h b/Modules/clinic/_lsprof.c.h index e19840f97e5c69..6a75a8f9833d1e 100644 --- a/Modules/clinic/_lsprof.c.h +++ b/Modules/clinic/_lsprof.c.h @@ -43,13 +43,13 @@ static PyObject * _lsprof_Profiler_getstats_impl(ProfilerObject *self, PyTypeObject *cls); static PyObject * -_lsprof_Profiler_getstats(ProfilerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lsprof_Profiler_getstats(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getstats() takes no arguments"); return NULL; } - return _lsprof_Profiler_getstats_impl(self, cls); + return _lsprof_Profiler_getstats_impl((ProfilerObject *)self, cls); } PyDoc_STRVAR(_lsprof_Profiler__pystart_callback__doc__, @@ -65,7 +65,7 @@ _lsprof_Profiler__pystart_callback_impl(ProfilerObject *self, PyObject *code, PyObject *instruction_offset); static PyObject * -_lsprof_Profiler__pystart_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__pystart_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -76,7 +76,7 @@ _lsprof_Profiler__pystart_callback(ProfilerObject *self, PyObject *const *args, } code = args[0]; instruction_offset = args[1]; - return_value = _lsprof_Profiler__pystart_callback_impl(self, code, instruction_offset); + return_value = _lsprof_Profiler__pystart_callback_impl((ProfilerObject *)self, code, instruction_offset); exit: return return_value; @@ -97,7 +97,7 @@ _lsprof_Profiler__pyreturn_callback_impl(ProfilerObject *self, PyObject *retval); static PyObject * -_lsprof_Profiler__pyreturn_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__pyreturn_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -110,7 +110,7 @@ _lsprof_Profiler__pyreturn_callback(ProfilerObject *self, PyObject *const *args, code = args[0]; instruction_offset = args[1]; retval = args[2]; - return_value = _lsprof_Profiler__pyreturn_callback_impl(self, code, instruction_offset, retval); + return_value = _lsprof_Profiler__pyreturn_callback_impl((ProfilerObject *)self, code, instruction_offset, retval); exit: return return_value; @@ -130,7 +130,7 @@ _lsprof_Profiler__ccall_callback_impl(ProfilerObject *self, PyObject *code, PyObject *callable, PyObject *self_arg); static PyObject * -_lsprof_Profiler__ccall_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__ccall_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -145,7 +145,7 @@ _lsprof_Profiler__ccall_callback(ProfilerObject *self, PyObject *const *args, Py instruction_offset = args[1]; callable = args[2]; self_arg = args[3]; - return_value = _lsprof_Profiler__ccall_callback_impl(self, code, instruction_offset, callable, self_arg); + return_value = _lsprof_Profiler__ccall_callback_impl((ProfilerObject *)self, code, instruction_offset, callable, self_arg); exit: return return_value; @@ -167,7 +167,7 @@ _lsprof_Profiler__creturn_callback_impl(ProfilerObject *self, PyObject *code, PyObject *self_arg); static PyObject * -_lsprof_Profiler__creturn_callback(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs) +_lsprof_Profiler__creturn_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *code; @@ -182,7 +182,7 @@ _lsprof_Profiler__creturn_callback(ProfilerObject *self, PyObject *const *args, instruction_offset = args[1]; callable = args[2]; self_arg = args[3]; - return_value = _lsprof_Profiler__creturn_callback_impl(self, code, instruction_offset, callable, self_arg); + return_value = _lsprof_Profiler__creturn_callback_impl((ProfilerObject *)self, code, instruction_offset, callable, self_arg); exit: return return_value; @@ -209,7 +209,7 @@ _lsprof_Profiler_enable_impl(ProfilerObject *self, int subcalls, int builtins); static PyObject * -_lsprof_Profiler_enable(ProfilerObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lsprof_Profiler_enable(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -264,7 +264,7 @@ _lsprof_Profiler_enable(ProfilerObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional_pos: - return_value = _lsprof_Profiler_enable_impl(self, subcalls, builtins); + return_value = _lsprof_Profiler_enable_impl((ProfilerObject *)self, subcalls, builtins); exit: return return_value; @@ -283,9 +283,9 @@ static PyObject * _lsprof_Profiler_disable_impl(ProfilerObject *self); static PyObject * -_lsprof_Profiler_disable(ProfilerObject *self, PyObject *Py_UNUSED(ignored)) +_lsprof_Profiler_disable(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lsprof_Profiler_disable_impl(self); + return _lsprof_Profiler_disable_impl((ProfilerObject *)self); } PyDoc_STRVAR(_lsprof_Profiler_clear__doc__, @@ -301,9 +301,9 @@ static PyObject * _lsprof_Profiler_clear_impl(ProfilerObject *self); static PyObject * -_lsprof_Profiler_clear(ProfilerObject *self, PyObject *Py_UNUSED(ignored)) +_lsprof_Profiler_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lsprof_Profiler_clear_impl(self); + return _lsprof_Profiler_clear_impl((ProfilerObject *)self); } PyDoc_STRVAR(profiler_init__doc__, @@ -407,4 +407,4 @@ profiler_init(PyObject *self, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=e56d849e35d005a5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d983dbf23fd8ac3b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h index 187f7b183dca84..c7c81d8d1f1b9d 100644 --- a/Modules/clinic/_lzmamodule.c.h +++ b/Modules/clinic/_lzmamodule.c.h @@ -27,7 +27,7 @@ static PyObject * _lzma_LZMACompressor_compress_impl(Compressor *self, Py_buffer *data); static PyObject * -_lzma_LZMACompressor_compress(Compressor *self, PyObject *arg) +_lzma_LZMACompressor_compress(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer data = {NULL, NULL}; @@ -35,7 +35,7 @@ _lzma_LZMACompressor_compress(Compressor *self, PyObject *arg) if (PyObject_GetBuffer(arg, &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _lzma_LZMACompressor_compress_impl(self, &data); + return_value = _lzma_LZMACompressor_compress_impl((Compressor *)self, &data); exit: /* Cleanup for data */ @@ -63,9 +63,9 @@ static PyObject * _lzma_LZMACompressor_flush_impl(Compressor *self); static PyObject * -_lzma_LZMACompressor_flush(Compressor *self, PyObject *Py_UNUSED(ignored)) +_lzma_LZMACompressor_flush(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _lzma_LZMACompressor_flush_impl(self); + return _lzma_LZMACompressor_flush_impl((Compressor *)self); } PyDoc_STRVAR(_lzma_LZMADecompressor_decompress__doc__, @@ -95,7 +95,7 @@ _lzma_LZMADecompressor_decompress_impl(Decompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -_lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_lzma_LZMADecompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -152,7 +152,7 @@ _lzma_LZMADecompressor_decompress(Decompressor *self, PyObject *const *args, Py_ max_length = ival; } skip_optional_pos: - return_value = _lzma_LZMADecompressor_decompress_impl(self, &data, max_length); + return_value = _lzma_LZMADecompressor_decompress_impl((Decompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -329,4 +329,4 @@ _lzma__decode_filter_properties(PyObject *module, PyObject *const *args, Py_ssiz return return_value; } -/*[clinic end generated code: output=52e1b68d0886cebb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=19ed9b1182f5ddf9 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_pickle.c.h b/Modules/clinic/_pickle.c.h index 2e84bb83e21ca6..91d355c5afb353 100644 --- a/Modules/clinic/_pickle.c.h +++ b/Modules/clinic/_pickle.c.h @@ -26,9 +26,9 @@ static PyObject * _pickle_Pickler_clear_memo_impl(PicklerObject *self); static PyObject * -_pickle_Pickler_clear_memo(PicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Pickler_clear_memo(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_Pickler_clear_memo_impl(self); + return _pickle_Pickler_clear_memo_impl((PicklerObject *)self); } PyDoc_STRVAR(_pickle_Pickler_dump__doc__, @@ -45,7 +45,7 @@ _pickle_Pickler_dump_impl(PicklerObject *self, PyTypeObject *cls, PyObject *obj); static PyObject * -_pickle_Pickler_dump(PicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Pickler_dump(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -70,7 +70,7 @@ _pickle_Pickler_dump(PicklerObject *self, PyTypeObject *cls, PyObject *const *ar goto exit; } obj = args[0]; - return_value = _pickle_Pickler_dump_impl(self, cls, obj); + return_value = _pickle_Pickler_dump_impl((PicklerObject *)self, cls, obj); exit: return return_value; @@ -89,12 +89,12 @@ static size_t _pickle_Pickler___sizeof___impl(PicklerObject *self); static PyObject * -_pickle_Pickler___sizeof__(PicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Pickler___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _pickle_Pickler___sizeof___impl(self); + _return_value = _pickle_Pickler___sizeof___impl((PicklerObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -227,9 +227,9 @@ static PyObject * _pickle_PicklerMemoProxy_clear_impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy_clear(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy_clear_impl(self); + return _pickle_PicklerMemoProxy_clear_impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_PicklerMemoProxy_copy__doc__, @@ -245,9 +245,9 @@ static PyObject * _pickle_PicklerMemoProxy_copy_impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy_copy(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy_copy_impl(self); + return _pickle_PicklerMemoProxy_copy_impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_PicklerMemoProxy___reduce____doc__, @@ -263,9 +263,9 @@ static PyObject * _pickle_PicklerMemoProxy___reduce___impl(PicklerMemoProxyObject *self); static PyObject * -_pickle_PicklerMemoProxy___reduce__(PicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_PicklerMemoProxy___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_PicklerMemoProxy___reduce___impl(self); + return _pickle_PicklerMemoProxy___reduce___impl((PicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_Unpickler_persistent_load__doc__, @@ -281,7 +281,7 @@ _pickle_Unpickler_persistent_load_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *pid); static PyObject * -_pickle_Unpickler_persistent_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_persistent_load(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -306,7 +306,7 @@ _pickle_Unpickler_persistent_load(UnpicklerObject *self, PyTypeObject *cls, PyOb goto exit; } pid = args[0]; - return_value = _pickle_Unpickler_persistent_load_impl(self, cls, pid); + return_value = _pickle_Unpickler_persistent_load_impl((UnpicklerObject *)self, cls, pid); exit: return return_value; @@ -329,13 +329,13 @@ static PyObject * _pickle_Unpickler_load_impl(UnpicklerObject *self, PyTypeObject *cls); static PyObject * -_pickle_Unpickler_load(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_load(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "load() takes no arguments"); return NULL; } - return _pickle_Unpickler_load_impl(self, cls); + return _pickle_Unpickler_load_impl((UnpicklerObject *)self, cls); } PyDoc_STRVAR(_pickle_Unpickler_find_class__doc__, @@ -360,7 +360,7 @@ _pickle_Unpickler_find_class_impl(UnpicklerObject *self, PyTypeObject *cls, PyObject *global_name); static PyObject * -_pickle_Unpickler_find_class(UnpicklerObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_pickle_Unpickler_find_class(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -387,7 +387,7 @@ _pickle_Unpickler_find_class(UnpicklerObject *self, PyTypeObject *cls, PyObject } module_name = args[0]; global_name = args[1]; - return_value = _pickle_Unpickler_find_class_impl(self, cls, module_name, global_name); + return_value = _pickle_Unpickler_find_class_impl((UnpicklerObject *)self, cls, module_name, global_name); exit: return return_value; @@ -406,12 +406,12 @@ static size_t _pickle_Unpickler___sizeof___impl(UnpicklerObject *self); static PyObject * -_pickle_Unpickler___sizeof__(UnpicklerObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_Unpickler___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; size_t _return_value; - _return_value = _pickle_Unpickler___sizeof___impl(self); + _return_value = _pickle_Unpickler___sizeof___impl((UnpicklerObject *)self); if ((_return_value == (size_t)-1) && PyErr_Occurred()) { goto exit; } @@ -566,9 +566,9 @@ static PyObject * _pickle_UnpicklerMemoProxy_clear_impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy_clear(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy_clear_impl(self); + return _pickle_UnpicklerMemoProxy_clear_impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_UnpicklerMemoProxy_copy__doc__, @@ -584,9 +584,9 @@ static PyObject * _pickle_UnpicklerMemoProxy_copy_impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy_copy(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy_copy_impl(self); + return _pickle_UnpicklerMemoProxy_copy_impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_UnpicklerMemoProxy___reduce____doc__, @@ -602,9 +602,9 @@ static PyObject * _pickle_UnpicklerMemoProxy___reduce___impl(UnpicklerMemoProxyObject *self); static PyObject * -_pickle_UnpicklerMemoProxy___reduce__(UnpicklerMemoProxyObject *self, PyObject *Py_UNUSED(ignored)) +_pickle_UnpicklerMemoProxy___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _pickle_UnpicklerMemoProxy___reduce___impl(self); + return _pickle_UnpicklerMemoProxy___reduce___impl((UnpicklerMemoProxyObject *)self); } PyDoc_STRVAR(_pickle_dump__doc__, @@ -1086,4 +1086,4 @@ _pickle_loads(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObjec exit: return return_value; } -/*[clinic end generated code: output=48ceb6687a8e716c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d71dc73af298ebe8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_queuemodule.c.h b/Modules/clinic/_queuemodule.c.h index f0d4a3a164cd5f..2dfc3e6be1984e 100644 --- a/Modules/clinic/_queuemodule.c.h +++ b/Modules/clinic/_queuemodule.c.h @@ -55,7 +55,7 @@ _queue_SimpleQueue_put_impl(simplequeueobject *self, PyObject *item, int block, PyObject *timeout); static PyObject * -_queue_SimpleQueue_put(simplequeueobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_put(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -110,7 +110,7 @@ _queue_SimpleQueue_put(simplequeueobject *self, PyObject *const *args, Py_ssize_ timeout = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_put_impl(self, item, block, timeout); + return_value = _queue_SimpleQueue_put_impl((simplequeueobject *)self, item, block, timeout); Py_END_CRITICAL_SECTION(); exit: @@ -133,7 +133,7 @@ static PyObject * _queue_SimpleQueue_put_nowait_impl(simplequeueobject *self, PyObject *item); static PyObject * -_queue_SimpleQueue_put_nowait(simplequeueobject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_put_nowait(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -171,7 +171,7 @@ _queue_SimpleQueue_put_nowait(simplequeueobject *self, PyObject *const *args, Py } item = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_put_nowait_impl(self, item); + return_value = _queue_SimpleQueue_put_nowait_impl((simplequeueobject *)self, item); Py_END_CRITICAL_SECTION(); exit: @@ -200,7 +200,7 @@ _queue_SimpleQueue_get_impl(simplequeueobject *self, PyTypeObject *cls, int block, PyObject *timeout_obj); static PyObject * -_queue_SimpleQueue_get(simplequeueobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_get(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -253,7 +253,7 @@ _queue_SimpleQueue_get(simplequeueobject *self, PyTypeObject *cls, PyObject *con timeout_obj = args[1]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_get_impl(self, cls, block, timeout_obj); + return_value = _queue_SimpleQueue_get_impl((simplequeueobject *)self, cls, block, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -277,7 +277,7 @@ _queue_SimpleQueue_get_nowait_impl(simplequeueobject *self, PyTypeObject *cls); static PyObject * -_queue_SimpleQueue_get_nowait(simplequeueobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_queue_SimpleQueue_get_nowait(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; @@ -286,7 +286,7 @@ _queue_SimpleQueue_get_nowait(simplequeueobject *self, PyTypeObject *cls, PyObje goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _queue_SimpleQueue_get_nowait_impl(self, cls); + return_value = _queue_SimpleQueue_get_nowait_impl((simplequeueobject *)self, cls); Py_END_CRITICAL_SECTION(); exit: @@ -306,13 +306,13 @@ static int _queue_SimpleQueue_empty_impl(simplequeueobject *self); static PyObject * -_queue_SimpleQueue_empty(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) +_queue_SimpleQueue_empty(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; Py_BEGIN_CRITICAL_SECTION(self); - _return_value = _queue_SimpleQueue_empty_impl(self); + _return_value = _queue_SimpleQueue_empty_impl((simplequeueobject *)self); Py_END_CRITICAL_SECTION(); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; @@ -336,13 +336,13 @@ static Py_ssize_t _queue_SimpleQueue_qsize_impl(simplequeueobject *self); static PyObject * -_queue_SimpleQueue_qsize(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) +_queue_SimpleQueue_qsize(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_ssize_t _return_value; Py_BEGIN_CRITICAL_SECTION(self); - _return_value = _queue_SimpleQueue_qsize_impl(self); + _return_value = _queue_SimpleQueue_qsize_impl((simplequeueobject *)self); Py_END_CRITICAL_SECTION(); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; @@ -352,4 +352,4 @@ _queue_SimpleQueue_qsize(simplequeueobject *self, PyObject *Py_UNUSED(ignored)) exit: return return_value; } -/*[clinic end generated code: output=07b5742dca7692d9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e04e15a1b959c700 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h index 6193acac67e7ac..b2d67e11c63595 100644 --- a/Modules/clinic/_randommodule.c.h +++ b/Modules/clinic/_randommodule.c.h @@ -18,12 +18,12 @@ static PyObject * _random_Random_random_impl(RandomObject *self); static PyObject * -_random_Random_random(RandomObject *self, PyObject *Py_UNUSED(ignored)) +_random_Random_random(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_random_impl(self); + return_value = _random_Random_random_impl((RandomObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -45,7 +45,7 @@ static PyObject * _random_Random_seed_impl(RandomObject *self, PyObject *n); static PyObject * -_random_Random_seed(RandomObject *self, PyObject *const *args, Py_ssize_t nargs) +_random_Random_seed(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *n = Py_None; @@ -59,7 +59,7 @@ _random_Random_seed(RandomObject *self, PyObject *const *args, Py_ssize_t nargs) n = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_seed_impl(self, n); + return_value = _random_Random_seed_impl((RandomObject *)self, n); Py_END_CRITICAL_SECTION(); exit: @@ -79,12 +79,12 @@ static PyObject * _random_Random_getstate_impl(RandomObject *self); static PyObject * -_random_Random_getstate(RandomObject *self, PyObject *Py_UNUSED(ignored)) +_random_Random_getstate(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_getstate_impl(self); + return_value = _random_Random_getstate_impl((RandomObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -108,7 +108,7 @@ _random_Random_setstate(RandomObject *self, PyObject *state) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_setstate_impl(self, state); + return_value = _random_Random_setstate_impl((RandomObject *)self, state); Py_END_CRITICAL_SECTION(); return return_value; @@ -127,7 +127,7 @@ static PyObject * _random_Random_getrandbits_impl(RandomObject *self, int k); static PyObject * -_random_Random_getrandbits(RandomObject *self, PyObject *arg) +_random_Random_getrandbits(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int k; @@ -137,10 +137,10 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _random_Random_getrandbits_impl(self, k); + return_value = _random_Random_getrandbits_impl((RandomObject *)self, k); Py_END_CRITICAL_SECTION(); exit: return return_value; } -/*[clinic end generated code: output=bf49ece1d341b1b6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=859cfbf59c133a4e input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_ssl.c.h b/Modules/clinic/_ssl.c.h index becdb9cc1831fa..73c5d304f1a141 100644 --- a/Modules/clinic/_ssl.c.h +++ b/Modules/clinic/_ssl.c.h @@ -21,12 +21,12 @@ static PyObject * _ssl__SSLSocket_do_handshake_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_do_handshake(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_do_handshake(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_do_handshake_impl(self); + return_value = _ssl__SSLSocket_do_handshake_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -79,7 +79,7 @@ static PyObject * _ssl__SSLSocket_getpeercert_impl(PySSLSocket *self, int binary_mode); static PyObject * -_ssl__SSLSocket_getpeercert(PySSLSocket *self, PyObject *const *args, Py_ssize_t nargs) +_ssl__SSLSocket_getpeercert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int binary_mode = 0; @@ -96,7 +96,7 @@ _ssl__SSLSocket_getpeercert(PySSLSocket *self, PyObject *const *args, Py_ssize_t } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_getpeercert_impl(self, binary_mode); + return_value = _ssl__SSLSocket_getpeercert_impl((PySSLSocket *)self, binary_mode); Py_END_CRITICAL_SECTION(); exit: @@ -115,12 +115,12 @@ static PyObject * _ssl__SSLSocket_get_verified_chain_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_get_verified_chain(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_get_verified_chain(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_verified_chain_impl(self); + return_value = _ssl__SSLSocket_get_verified_chain_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -138,12 +138,12 @@ static PyObject * _ssl__SSLSocket_get_unverified_chain_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_get_unverified_chain(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_get_unverified_chain(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_unverified_chain_impl(self); + return_value = _ssl__SSLSocket_get_unverified_chain_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -161,12 +161,12 @@ static PyObject * _ssl__SSLSocket_shared_ciphers_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_shared_ciphers(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_shared_ciphers(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_shared_ciphers_impl(self); + return_value = _ssl__SSLSocket_shared_ciphers_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -184,12 +184,12 @@ static PyObject * _ssl__SSLSocket_cipher_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_cipher(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_cipher(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_cipher_impl(self); + return_value = _ssl__SSLSocket_cipher_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -207,12 +207,12 @@ static PyObject * _ssl__SSLSocket_version_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_version(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_version(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_version_impl(self); + return_value = _ssl__SSLSocket_version_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -230,12 +230,12 @@ static PyObject * _ssl__SSLSocket_selected_alpn_protocol_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_selected_alpn_protocol(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_selected_alpn_protocol(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_selected_alpn_protocol_impl(self); + return_value = _ssl__SSLSocket_selected_alpn_protocol_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -253,9 +253,9 @@ static PyObject * _ssl__SSLSocket_compression_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_compression(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_compression(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _ssl__SSLSocket_compression_impl(self); + return _ssl__SSLSocket_compression_impl((PySSLSocket *)self); } PyDoc_STRVAR(_ssl__SSLSocket_context__doc__, @@ -283,12 +283,12 @@ static PyObject * _ssl__SSLSocket_context_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_context_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_context_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_context_get_impl(self); + return_value = _ssl__SSLSocket_context_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -308,12 +308,12 @@ static int _ssl__SSLSocket_context_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_context_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_context_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_context_set_impl(self, value); + return_value = _ssl__SSLSocket_context_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -340,12 +340,12 @@ static PyObject * _ssl__SSLSocket_server_side_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_server_side_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_server_side_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_server_side_get_impl(self); + return_value = _ssl__SSLSocket_server_side_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -372,12 +372,12 @@ static PyObject * _ssl__SSLSocket_server_hostname_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_server_hostname_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_server_hostname_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_server_hostname_get_impl(self); + return_value = _ssl__SSLSocket_server_hostname_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -406,12 +406,12 @@ static PyObject * _ssl__SSLSocket_owner_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_owner_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_owner_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_owner_get_impl(self); + return_value = _ssl__SSLSocket_owner_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -431,12 +431,12 @@ static int _ssl__SSLSocket_owner_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_owner_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_owner_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_owner_set_impl(self, value); + return_value = _ssl__SSLSocket_owner_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -457,7 +457,7 @@ static PyObject * _ssl__SSLSocket_write_impl(PySSLSocket *self, Py_buffer *b); static PyObject * -_ssl__SSLSocket_write(PySSLSocket *self, PyObject *arg) +_ssl__SSLSocket_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer b = {NULL, NULL}; @@ -466,7 +466,7 @@ _ssl__SSLSocket_write(PySSLSocket *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_write_impl(self, &b); + return_value = _ssl__SSLSocket_write_impl((PySSLSocket *)self, &b); Py_END_CRITICAL_SECTION(); exit: @@ -491,12 +491,12 @@ static PyObject * _ssl__SSLSocket_pending_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_pending(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_pending(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_pending_impl(self); + return_value = _ssl__SSLSocket_pending_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -514,7 +514,7 @@ _ssl__SSLSocket_read_impl(PySSLSocket *self, Py_ssize_t len, int group_right_1, Py_buffer *buffer); static PyObject * -_ssl__SSLSocket_read(PySSLSocket *self, PyObject *args) +_ssl__SSLSocket_read(PyObject *self, PyObject *args) { PyObject *return_value = NULL; Py_ssize_t len; @@ -538,7 +538,7 @@ _ssl__SSLSocket_read(PySSLSocket *self, PyObject *args) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_read_impl(self, len, group_right_1, &buffer); + return_value = _ssl__SSLSocket_read_impl((PySSLSocket *)self, len, group_right_1, &buffer); Py_END_CRITICAL_SECTION(); exit: @@ -563,12 +563,12 @@ static PyObject * _ssl__SSLSocket_shutdown_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_shutdown(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_shutdown(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_shutdown_impl(self); + return_value = _ssl__SSLSocket_shutdown_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -592,7 +592,7 @@ _ssl__SSLSocket_get_channel_binding_impl(PySSLSocket *self, const char *cb_type); static PyObject * -_ssl__SSLSocket_get_channel_binding(PySSLSocket *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLSocket_get_channel_binding(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -647,7 +647,7 @@ _ssl__SSLSocket_get_channel_binding(PySSLSocket *self, PyObject *const *args, Py } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_get_channel_binding_impl(self, cb_type); + return_value = _ssl__SSLSocket_get_channel_binding_impl((PySSLSocket *)self, cb_type); Py_END_CRITICAL_SECTION(); exit: @@ -667,12 +667,12 @@ static PyObject * _ssl__SSLSocket_verify_client_post_handshake_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_verify_client_post_handshake(PySSLSocket *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLSocket_verify_client_post_handshake(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_verify_client_post_handshake_impl(self); + return_value = _ssl__SSLSocket_verify_client_post_handshake_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -699,12 +699,12 @@ static PyObject * _ssl__SSLSocket_session_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_session_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_get_impl(self); + return_value = _ssl__SSLSocket_session_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -724,12 +724,12 @@ static int _ssl__SSLSocket_session_set_impl(PySSLSocket *self, PyObject *value); static int -_ssl__SSLSocket_session_set(PySSLSocket *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_set_impl(self, value); + return_value = _ssl__SSLSocket_session_set_impl((PySSLSocket *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -756,12 +756,12 @@ static PyObject * _ssl__SSLSocket_session_reused_get_impl(PySSLSocket *self); static PyObject * -_ssl__SSLSocket_session_reused_get(PySSLSocket *self, void *Py_UNUSED(context)) +_ssl__SSLSocket_session_reused_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLSocket_session_reused_get_impl(self); + return_value = _ssl__SSLSocket_session_reused_get_impl((PySSLSocket *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -808,7 +808,7 @@ static PyObject * _ssl__SSLContext_set_ciphers_impl(PySSLContext *self, const char *cipherlist); static PyObject * -_ssl__SSLContext_set_ciphers(PySSLContext *self, PyObject *arg) +_ssl__SSLContext_set_ciphers(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *cipherlist; @@ -827,7 +827,7 @@ _ssl__SSLContext_set_ciphers(PySSLContext *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_ciphers_impl(self, cipherlist); + return_value = _ssl__SSLContext_set_ciphers_impl((PySSLContext *)self, cipherlist); Py_END_CRITICAL_SECTION(); exit: @@ -846,12 +846,12 @@ static PyObject * _ssl__SSLContext_get_ciphers_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_get_ciphers(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_get_ciphers(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_get_ciphers_impl(self); + return_value = _ssl__SSLContext_get_ciphers_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -870,7 +870,7 @@ _ssl__SSLContext__set_alpn_protocols_impl(PySSLContext *self, Py_buffer *protos); static PyObject * -_ssl__SSLContext__set_alpn_protocols(PySSLContext *self, PyObject *arg) +_ssl__SSLContext__set_alpn_protocols(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer protos = {NULL, NULL}; @@ -879,7 +879,7 @@ _ssl__SSLContext__set_alpn_protocols(PySSLContext *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__set_alpn_protocols_impl(self, &protos); + return_value = _ssl__SSLContext__set_alpn_protocols_impl((PySSLContext *)self, &protos); Py_END_CRITICAL_SECTION(); exit: @@ -905,12 +905,12 @@ static PyObject * _ssl__SSLContext_verify_mode_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_verify_mode_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_mode_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_mode_get_impl(self); + return_value = _ssl__SSLContext_verify_mode_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -930,12 +930,12 @@ static int _ssl__SSLContext_verify_mode_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_verify_mode_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_mode_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_mode_set_impl(self, value); + return_value = _ssl__SSLContext_verify_mode_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -955,12 +955,12 @@ static PyObject * _ssl__SSLContext_verify_flags_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_verify_flags_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_flags_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_flags_get_impl(self); + return_value = _ssl__SSLContext_verify_flags_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -980,12 +980,12 @@ static int _ssl__SSLContext_verify_flags_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_verify_flags_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_verify_flags_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_verify_flags_set_impl(self, value); + return_value = _ssl__SSLContext_verify_flags_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1005,12 +1005,12 @@ static PyObject * _ssl__SSLContext_minimum_version_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_minimum_version_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_minimum_version_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_minimum_version_get_impl(self); + return_value = _ssl__SSLContext_minimum_version_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1031,12 +1031,12 @@ _ssl__SSLContext_minimum_version_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_minimum_version_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_minimum_version_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_minimum_version_set_impl(self, value); + return_value = _ssl__SSLContext_minimum_version_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1056,12 +1056,12 @@ static PyObject * _ssl__SSLContext_maximum_version_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_maximum_version_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_maximum_version_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_maximum_version_get_impl(self); + return_value = _ssl__SSLContext_maximum_version_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1082,12 +1082,12 @@ _ssl__SSLContext_maximum_version_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_maximum_version_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_maximum_version_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_maximum_version_set_impl(self, value); + return_value = _ssl__SSLContext_maximum_version_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1114,12 +1114,12 @@ static PyObject * _ssl__SSLContext_num_tickets_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_num_tickets_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_num_tickets_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_num_tickets_get_impl(self); + return_value = _ssl__SSLContext_num_tickets_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1139,12 +1139,12 @@ static int _ssl__SSLContext_num_tickets_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_num_tickets_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_num_tickets_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_num_tickets_set_impl(self, value); + return_value = _ssl__SSLContext_num_tickets_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1171,12 +1171,12 @@ static PyObject * _ssl__SSLContext_security_level_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_security_level_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_security_level_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_security_level_get_impl(self); + return_value = _ssl__SSLContext_security_level_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1196,12 +1196,12 @@ static PyObject * _ssl__SSLContext_options_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_options_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_options_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_options_get_impl(self); + return_value = _ssl__SSLContext_options_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1221,12 +1221,12 @@ static int _ssl__SSLContext_options_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_options_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_options_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_options_set_impl(self, value); + return_value = _ssl__SSLContext_options_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1246,12 +1246,12 @@ static PyObject * _ssl__SSLContext__host_flags_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext__host_flags_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext__host_flags_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__host_flags_get_impl(self); + return_value = _ssl__SSLContext__host_flags_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1271,12 +1271,12 @@ static int _ssl__SSLContext__host_flags_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext__host_flags_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext__host_flags_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__host_flags_set_impl(self, value); + return_value = _ssl__SSLContext__host_flags_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1296,12 +1296,12 @@ static PyObject * _ssl__SSLContext_check_hostname_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_check_hostname_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_check_hostname_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_check_hostname_get_impl(self); + return_value = _ssl__SSLContext_check_hostname_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1321,12 +1321,12 @@ static int _ssl__SSLContext_check_hostname_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_check_hostname_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_check_hostname_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_check_hostname_set_impl(self, value); + return_value = _ssl__SSLContext_check_hostname_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1346,12 +1346,12 @@ static PyObject * _ssl__SSLContext_protocol_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_protocol_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_protocol_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_protocol_get_impl(self); + return_value = _ssl__SSLContext_protocol_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1370,7 +1370,7 @@ _ssl__SSLContext_load_cert_chain_impl(PySSLContext *self, PyObject *certfile, PyObject *keyfile, PyObject *password); static PyObject * -_ssl__SSLContext_load_cert_chain(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_load_cert_chain(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1422,7 +1422,7 @@ _ssl__SSLContext_load_cert_chain(PySSLContext *self, PyObject *const *args, Py_s password = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_cert_chain_impl(self, certfile, keyfile, password); + return_value = _ssl__SSLContext_load_cert_chain_impl((PySSLContext *)self, certfile, keyfile, password); Py_END_CRITICAL_SECTION(); exit: @@ -1444,7 +1444,7 @@ _ssl__SSLContext_load_verify_locations_impl(PySSLContext *self, PyObject *cadata); static PyObject * -_ssl__SSLContext_load_verify_locations(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_load_verify_locations(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1501,7 +1501,7 @@ _ssl__SSLContext_load_verify_locations(PySSLContext *self, PyObject *const *args cadata = args[2]; skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_verify_locations_impl(self, cafile, capath, cadata); + return_value = _ssl__SSLContext_load_verify_locations_impl((PySSLContext *)self, cafile, capath, cadata); Py_END_CRITICAL_SECTION(); exit: @@ -1525,7 +1525,7 @@ _ssl__SSLContext_load_dh_params(PySSLContext *self, PyObject *filepath) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_load_dh_params_impl(self, filepath); + return_value = _ssl__SSLContext_load_dh_params_impl((PySSLContext *)self, filepath); Py_END_CRITICAL_SECTION(); return return_value; @@ -1546,7 +1546,7 @@ _ssl__SSLContext__wrap_socket_impl(PySSLContext *self, PyObject *sock, PyObject *owner, PyObject *session); static PyObject * -_ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext__wrap_socket(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1618,7 +1618,7 @@ _ssl__SSLContext__wrap_socket(PySSLContext *self, PyObject *const *args, Py_ssiz session = args[4]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__wrap_socket_impl(self, sock, server_side, hostname_obj, owner, session); + return_value = _ssl__SSLContext__wrap_socket_impl((PySSLContext *)self, sock, server_side, hostname_obj, owner, session); Py_END_CRITICAL_SECTION(); exit: @@ -1641,7 +1641,7 @@ _ssl__SSLContext__wrap_bio_impl(PySSLContext *self, PySSLMemoryBIO *incoming, PyObject *session); static PyObject * -_ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext__wrap_bio(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1719,7 +1719,7 @@ _ssl__SSLContext__wrap_bio(PySSLContext *self, PyObject *const *args, Py_ssize_t session = args[5]; skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext__wrap_bio_impl(self, incoming, outgoing, server_side, hostname_obj, owner, session); + return_value = _ssl__SSLContext__wrap_bio_impl((PySSLContext *)self, incoming, outgoing, server_side, hostname_obj, owner, session); Py_END_CRITICAL_SECTION(); exit: @@ -1738,12 +1738,12 @@ static PyObject * _ssl__SSLContext_session_stats_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_session_stats(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_session_stats(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_session_stats_impl(self); + return_value = _ssl__SSLContext_session_stats_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1761,12 +1761,12 @@ static PyObject * _ssl__SSLContext_set_default_verify_paths_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_set_default_verify_paths(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_set_default_verify_paths(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_default_verify_paths_impl(self); + return_value = _ssl__SSLContext_set_default_verify_paths_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1789,7 +1789,7 @@ _ssl__SSLContext_set_ecdh_curve(PySSLContext *self, PyObject *name) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_ecdh_curve_impl(self, name); + return_value = _ssl__SSLContext_set_ecdh_curve_impl((PySSLContext *)self, name); Py_END_CRITICAL_SECTION(); return return_value; @@ -1821,12 +1821,12 @@ static PyObject * _ssl__SSLContext_sni_callback_get_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_sni_callback_get(PySSLContext *self, void *Py_UNUSED(context)) +_ssl__SSLContext_sni_callback_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_sni_callback_get_impl(self); + return_value = _ssl__SSLContext_sni_callback_get_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1846,12 +1846,12 @@ static int _ssl__SSLContext_sni_callback_set_impl(PySSLContext *self, PyObject *value); static int -_ssl__SSLContext_sni_callback_set(PySSLContext *self, PyObject *value, void *Py_UNUSED(context)) +_ssl__SSLContext_sni_callback_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_sni_callback_set_impl(self, value); + return_value = _ssl__SSLContext_sni_callback_set_impl((PySSLContext *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -1876,12 +1876,12 @@ static PyObject * _ssl__SSLContext_cert_store_stats_impl(PySSLContext *self); static PyObject * -_ssl__SSLContext_cert_store_stats(PySSLContext *self, PyObject *Py_UNUSED(ignored)) +_ssl__SSLContext_cert_store_stats(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_cert_store_stats_impl(self); + return_value = _ssl__SSLContext_cert_store_stats_impl((PySSLContext *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1906,7 +1906,7 @@ static PyObject * _ssl__SSLContext_get_ca_certs_impl(PySSLContext *self, int binary_form); static PyObject * -_ssl__SSLContext_get_ca_certs(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_get_ca_certs(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1952,7 +1952,7 @@ _ssl__SSLContext_get_ca_certs(PySSLContext *self, PyObject *const *args, Py_ssiz } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_get_ca_certs_impl(self, binary_form); + return_value = _ssl__SSLContext_get_ca_certs_impl((PySSLContext *)self, binary_form); Py_END_CRITICAL_SECTION(); exit: @@ -1972,7 +1972,7 @@ _ssl__SSLContext_set_psk_client_callback_impl(PySSLContext *self, PyObject *callback); static PyObject * -_ssl__SSLContext_set_psk_client_callback(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_set_psk_client_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2010,7 +2010,7 @@ _ssl__SSLContext_set_psk_client_callback(PySSLContext *self, PyObject *const *ar } callback = args[0]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_psk_client_callback_impl(self, callback); + return_value = _ssl__SSLContext_set_psk_client_callback_impl((PySSLContext *)self, callback); Py_END_CRITICAL_SECTION(); exit: @@ -2031,7 +2031,7 @@ _ssl__SSLContext_set_psk_server_callback_impl(PySSLContext *self, const char *identity_hint); static PyObject * -_ssl__SSLContext_set_psk_server_callback(PySSLContext *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_ssl__SSLContext_set_psk_server_callback(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -2093,7 +2093,7 @@ _ssl__SSLContext_set_psk_server_callback(PySSLContext *self, PyObject *const *ar } skip_optional_pos: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl__SSLContext_set_psk_server_callback_impl(self, callback, identity_hint); + return_value = _ssl__SSLContext_set_psk_server_callback_impl((PySSLContext *)self, callback, identity_hint); Py_END_CRITICAL_SECTION(); exit: @@ -2146,12 +2146,12 @@ static PyObject * _ssl_MemoryBIO_pending_get_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_pending_get(PySSLMemoryBIO *self, void *Py_UNUSED(context)) +_ssl_MemoryBIO_pending_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_pending_get_impl(self); + return_value = _ssl_MemoryBIO_pending_get_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2178,12 +2178,12 @@ static PyObject * _ssl_MemoryBIO_eof_get_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_eof_get(PySSLMemoryBIO *self, void *Py_UNUSED(context)) +_ssl_MemoryBIO_eof_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_eof_get_impl(self); + return_value = _ssl_MemoryBIO_eof_get_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2207,7 +2207,7 @@ static PyObject * _ssl_MemoryBIO_read_impl(PySSLMemoryBIO *self, int len); static PyObject * -_ssl_MemoryBIO_read(PySSLMemoryBIO *self, PyObject *const *args, Py_ssize_t nargs) +_ssl_MemoryBIO_read(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int len = -1; @@ -2224,7 +2224,7 @@ _ssl_MemoryBIO_read(PySSLMemoryBIO *self, PyObject *const *args, Py_ssize_t narg } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_read_impl(self, len); + return_value = _ssl_MemoryBIO_read_impl((PySSLMemoryBIO *)self, len); Py_END_CRITICAL_SECTION(); exit: @@ -2246,7 +2246,7 @@ static PyObject * _ssl_MemoryBIO_write_impl(PySSLMemoryBIO *self, Py_buffer *b); static PyObject * -_ssl_MemoryBIO_write(PySSLMemoryBIO *self, PyObject *arg) +_ssl_MemoryBIO_write(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer b = {NULL, NULL}; @@ -2255,7 +2255,7 @@ _ssl_MemoryBIO_write(PySSLMemoryBIO *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_write_impl(self, &b); + return_value = _ssl_MemoryBIO_write_impl((PySSLMemoryBIO *)self, &b); Py_END_CRITICAL_SECTION(); exit: @@ -2282,12 +2282,12 @@ static PyObject * _ssl_MemoryBIO_write_eof_impl(PySSLMemoryBIO *self); static PyObject * -_ssl_MemoryBIO_write_eof(PySSLMemoryBIO *self, PyObject *Py_UNUSED(ignored)) +_ssl_MemoryBIO_write_eof(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_MemoryBIO_write_eof_impl(self); + return_value = _ssl_MemoryBIO_write_eof_impl((PySSLMemoryBIO *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2314,12 +2314,12 @@ static PyObject * _ssl_SSLSession_time_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_time_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_time_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_time_get_impl(self); + return_value = _ssl_SSLSession_time_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2346,12 +2346,12 @@ static PyObject * _ssl_SSLSession_timeout_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_timeout_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_timeout_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_timeout_get_impl(self); + return_value = _ssl_SSLSession_timeout_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2378,12 +2378,12 @@ static PyObject * _ssl_SSLSession_ticket_lifetime_hint_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_ticket_lifetime_hint_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_ticket_lifetime_hint_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_ticket_lifetime_hint_get_impl(self); + return_value = _ssl_SSLSession_ticket_lifetime_hint_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2410,12 +2410,12 @@ static PyObject * _ssl_SSLSession_id_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_id_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_id_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_id_get_impl(self); + return_value = _ssl_SSLSession_id_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2442,12 +2442,12 @@ static PyObject * _ssl_SSLSession_has_ticket_get_impl(PySSLSession *self); static PyObject * -_ssl_SSLSession_has_ticket_get(PySSLSession *self, void *Py_UNUSED(context)) +_ssl_SSLSession_has_ticket_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = _ssl_SSLSession_has_ticket_get_impl(self); + return_value = _ssl_SSLSession_has_ticket_get_impl((PySSLSession *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -2878,4 +2878,4 @@ _ssl_enum_crls(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje #ifndef _SSL_ENUM_CRLS_METHODDEF #define _SSL_ENUM_CRLS_METHODDEF #endif /* !defined(_SSL_ENUM_CRLS_METHODDEF) */ -/*[clinic end generated code: output=e71f1ef621aead08 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bededfb2b927bd41 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_struct.c.h b/Modules/clinic/_struct.c.h index cfc2fe7fc1dd58..7cf179f7a69d55 100644 --- a/Modules/clinic/_struct.c.h +++ b/Modules/clinic/_struct.c.h @@ -87,7 +87,7 @@ static PyObject * Struct_unpack_impl(PyStructObject *self, Py_buffer *buffer); static PyObject * -Struct_unpack(PyStructObject *self, PyObject *arg) +Struct_unpack(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -95,7 +95,7 @@ Struct_unpack(PyStructObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &buffer, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = Struct_unpack_impl(self, &buffer); + return_value = Struct_unpack_impl((PyStructObject *)self, &buffer); exit: /* Cleanup for buffer */ @@ -127,7 +127,7 @@ Struct_unpack_from_impl(PyStructObject *self, Py_buffer *buffer, Py_ssize_t offset); static PyObject * -Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +Struct_unpack_from(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -184,7 +184,7 @@ Struct_unpack_from(PyStructObject *self, PyObject *const *args, Py_ssize_t nargs offset = ival; } skip_optional_pos: - return_value = Struct_unpack_from_impl(self, &buffer, offset); + return_value = Struct_unpack_from_impl((PyStructObject *)self, &buffer, offset); exit: /* Cleanup for buffer */ @@ -439,4 +439,4 @@ iter_unpack(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } -/*[clinic end generated code: output=faff90f99c6bd09f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ec540c21be08e1d0 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_testmultiphase.c.h b/Modules/clinic/_testmultiphase.c.h index 5a432a6f70386d..01c29c0753ae13 100644 --- a/Modules/clinic/_testmultiphase.c.h +++ b/Modules/clinic/_testmultiphase.c.h @@ -25,13 +25,13 @@ _testmultiphase_StateAccessType_get_defining_module_impl(StateAccessTypeObject * PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_get_defining_module(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_get_defining_module(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "get_defining_module() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_get_defining_module_impl(self, cls); + return _testmultiphase_StateAccessType_get_defining_module_impl((StateAccessTypeObject *)self, cls); } PyDoc_STRVAR(_testmultiphase_StateAccessType_getmodulebydef_bad_def__doc__, @@ -48,13 +48,13 @@ _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl(StateAccessTypeObjec PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_getmodulebydef_bad_def(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_getmodulebydef_bad_def(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "getmodulebydef_bad_def() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl(self, cls); + return _testmultiphase_StateAccessType_getmodulebydef_bad_def_impl((StateAccessTypeObject *)self, cls); } PyDoc_STRVAR(_testmultiphase_StateAccessType_increment_count_clinic__doc__, @@ -76,7 +76,7 @@ _testmultiphase_StateAccessType_increment_count_clinic_impl(StateAccessTypeObjec int n, int twice); static PyObject * -_testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_increment_count_clinic(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -135,7 +135,7 @@ _testmultiphase_StateAccessType_increment_count_clinic(StateAccessTypeObject *se goto exit; } skip_optional_kwonly: - return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl(self, cls, n, twice); + return_value = _testmultiphase_StateAccessType_increment_count_clinic_impl((StateAccessTypeObject *)self, cls, n, twice); exit: return return_value; @@ -155,12 +155,12 @@ _testmultiphase_StateAccessType_get_count_impl(StateAccessTypeObject *self, PyTypeObject *cls); static PyObject * -_testmultiphase_StateAccessType_get_count(StateAccessTypeObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +_testmultiphase_StateAccessType_get_count(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "get_count() takes no arguments"); return NULL; } - return _testmultiphase_StateAccessType_get_count_impl(self, cls); + return _testmultiphase_StateAccessType_get_count_impl((StateAccessTypeObject *)self, cls); } -/*[clinic end generated code: output=c1aa0af3572bf059 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ea0ca98e467e53c2 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_threadmodule.c.h b/Modules/clinic/_threadmodule.c.h index 8f0507d40285b3..09b7afebd6d8d9 100644 --- a/Modules/clinic/_threadmodule.c.h +++ b/Modules/clinic/_threadmodule.c.h @@ -8,7 +8,7 @@ preserve #endif #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() -#if defined(HAVE_PTHREAD_GETNAME_NP) +#if (defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS)) PyDoc_STRVAR(_thread__get_name__doc__, "_get_name($module, /)\n" @@ -28,9 +28,9 @@ _thread__get_name(PyObject *module, PyObject *Py_UNUSED(ignored)) return _thread__get_name_impl(module); } -#endif /* defined(HAVE_PTHREAD_GETNAME_NP) */ +#endif /* (defined(HAVE_PTHREAD_GETNAME_NP) || defined(MS_WINDOWS)) */ -#if defined(HAVE_PTHREAD_SETNAME_NP) +#if (defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS)) PyDoc_STRVAR(_thread_set_name__doc__, "set_name($module, /, name)\n" @@ -92,7 +92,7 @@ _thread_set_name(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb return return_value; } -#endif /* defined(HAVE_PTHREAD_SETNAME_NP) */ +#endif /* (defined(HAVE_PTHREAD_SETNAME_NP) || defined(MS_WINDOWS)) */ #ifndef _THREAD__GET_NAME_METHODDEF #define _THREAD__GET_NAME_METHODDEF @@ -101,4 +101,4 @@ _thread_set_name(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb #ifndef _THREAD_SET_NAME_METHODDEF #define _THREAD_SET_NAME_METHODDEF #endif /* !defined(_THREAD_SET_NAME_METHODDEF) */ -/*[clinic end generated code: output=b5cb85aaccc45bf6 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e88ef6b126cece8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_tkinter.c.h b/Modules/clinic/_tkinter.c.h index 2b1ac954b4d570..d6e783b04fe968 100644 --- a/Modules/clinic/_tkinter.c.h +++ b/Modules/clinic/_tkinter.c.h @@ -16,7 +16,7 @@ static PyObject * _tkinter_tkapp_eval_impl(TkappObject *self, const char *script); static PyObject * -_tkinter_tkapp_eval(TkappObject *self, PyObject *arg) +_tkinter_tkapp_eval(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *script; @@ -34,7 +34,7 @@ _tkinter_tkapp_eval(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_eval_impl(self, script); + return_value = _tkinter_tkapp_eval_impl((TkappObject *)self, script); exit: return return_value; @@ -52,7 +52,7 @@ static PyObject * _tkinter_tkapp_evalfile_impl(TkappObject *self, const char *fileName); static PyObject * -_tkinter_tkapp_evalfile(TkappObject *self, PyObject *arg) +_tkinter_tkapp_evalfile(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *fileName; @@ -70,7 +70,7 @@ _tkinter_tkapp_evalfile(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_evalfile_impl(self, fileName); + return_value = _tkinter_tkapp_evalfile_impl((TkappObject *)self, fileName); exit: return return_value; @@ -88,7 +88,7 @@ static PyObject * _tkinter_tkapp_record_impl(TkappObject *self, const char *script); static PyObject * -_tkinter_tkapp_record(TkappObject *self, PyObject *arg) +_tkinter_tkapp_record(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *script; @@ -106,7 +106,7 @@ _tkinter_tkapp_record(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_record_impl(self, script); + return_value = _tkinter_tkapp_record_impl((TkappObject *)self, script); exit: return return_value; @@ -124,7 +124,7 @@ static PyObject * _tkinter_tkapp_adderrorinfo_impl(TkappObject *self, const char *msg); static PyObject * -_tkinter_tkapp_adderrorinfo(TkappObject *self, PyObject *arg) +_tkinter_tkapp_adderrorinfo(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *msg; @@ -142,7 +142,7 @@ _tkinter_tkapp_adderrorinfo(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_adderrorinfo_impl(self, msg); + return_value = _tkinter_tkapp_adderrorinfo_impl((TkappObject *)self, msg); exit: return return_value; @@ -184,7 +184,7 @@ static PyObject * _tkinter_tkapp_exprstring_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprstring(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprstring(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -202,7 +202,7 @@ _tkinter_tkapp_exprstring(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprstring_impl(self, s); + return_value = _tkinter_tkapp_exprstring_impl((TkappObject *)self, s); exit: return return_value; @@ -220,7 +220,7 @@ static PyObject * _tkinter_tkapp_exprlong_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprlong(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprlong(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -238,7 +238,7 @@ _tkinter_tkapp_exprlong(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprlong_impl(self, s); + return_value = _tkinter_tkapp_exprlong_impl((TkappObject *)self, s); exit: return return_value; @@ -256,7 +256,7 @@ static PyObject * _tkinter_tkapp_exprdouble_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprdouble(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprdouble(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -274,7 +274,7 @@ _tkinter_tkapp_exprdouble(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprdouble_impl(self, s); + return_value = _tkinter_tkapp_exprdouble_impl((TkappObject *)self, s); exit: return return_value; @@ -292,7 +292,7 @@ static PyObject * _tkinter_tkapp_exprboolean_impl(TkappObject *self, const char *s); static PyObject * -_tkinter_tkapp_exprboolean(TkappObject *self, PyObject *arg) +_tkinter_tkapp_exprboolean(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *s; @@ -310,7 +310,7 @@ _tkinter_tkapp_exprboolean(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_exprboolean_impl(self, s); + return_value = _tkinter_tkapp_exprboolean_impl((TkappObject *)self, s); exit: return return_value; @@ -337,7 +337,7 @@ _tkinter_tkapp_createcommand_impl(TkappObject *self, const char *name, PyObject *func); static PyObject * -_tkinter_tkapp_createcommand(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createcommand(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; const char *name; @@ -360,7 +360,7 @@ _tkinter_tkapp_createcommand(TkappObject *self, PyObject *const *args, Py_ssize_ goto exit; } func = args[1]; - return_value = _tkinter_tkapp_createcommand_impl(self, name, func); + return_value = _tkinter_tkapp_createcommand_impl((TkappObject *)self, name, func); exit: return return_value; @@ -378,7 +378,7 @@ static PyObject * _tkinter_tkapp_deletecommand_impl(TkappObject *self, const char *name); static PyObject * -_tkinter_tkapp_deletecommand(TkappObject *self, PyObject *arg) +_tkinter_tkapp_deletecommand(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *name; @@ -396,7 +396,7 @@ _tkinter_tkapp_deletecommand(TkappObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _tkinter_tkapp_deletecommand_impl(self, name); + return_value = _tkinter_tkapp_deletecommand_impl((TkappObject *)self, name); exit: return return_value; @@ -417,7 +417,7 @@ _tkinter_tkapp_createfilehandler_impl(TkappObject *self, PyObject *file, int mask, PyObject *func); static PyObject * -_tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createfilehandler(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *file; @@ -433,7 +433,7 @@ _tkinter_tkapp_createfilehandler(TkappObject *self, PyObject *const *args, Py_ss goto exit; } func = args[2]; - return_value = _tkinter_tkapp_createfilehandler_impl(self, file, mask, func); + return_value = _tkinter_tkapp_createfilehandler_impl((TkappObject *)self, file, mask, func); exit: return return_value; @@ -465,9 +465,9 @@ static PyObject * _tkinter_tktimertoken_deletetimerhandler_impl(TkttObject *self); static PyObject * -_tkinter_tktimertoken_deletetimerhandler(TkttObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tktimertoken_deletetimerhandler(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tktimertoken_deletetimerhandler_impl(self); + return _tkinter_tktimertoken_deletetimerhandler_impl((TkttObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_createtimerhandler__doc__, @@ -483,7 +483,7 @@ _tkinter_tkapp_createtimerhandler_impl(TkappObject *self, int milliseconds, PyObject *func); static PyObject * -_tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_createtimerhandler(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int milliseconds; @@ -497,7 +497,7 @@ _tkinter_tkapp_createtimerhandler(TkappObject *self, PyObject *const *args, Py_s goto exit; } func = args[1]; - return_value = _tkinter_tkapp_createtimerhandler_impl(self, milliseconds, func); + return_value = _tkinter_tkapp_createtimerhandler_impl((TkappObject *)self, milliseconds, func); exit: return return_value; @@ -515,7 +515,7 @@ static PyObject * _tkinter_tkapp_mainloop_impl(TkappObject *self, int threshold); static PyObject * -_tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_mainloop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int threshold = 0; @@ -531,7 +531,7 @@ _tkinter_tkapp_mainloop(TkappObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: - return_value = _tkinter_tkapp_mainloop_impl(self, threshold); + return_value = _tkinter_tkapp_mainloop_impl((TkappObject *)self, threshold); exit: return return_value; @@ -549,7 +549,7 @@ static PyObject * _tkinter_tkapp_dooneevent_impl(TkappObject *self, int flags); static PyObject * -_tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t nargs) +_tkinter_tkapp_dooneevent(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int flags = 0; @@ -565,7 +565,7 @@ _tkinter_tkapp_dooneevent(TkappObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: - return_value = _tkinter_tkapp_dooneevent_impl(self, flags); + return_value = _tkinter_tkapp_dooneevent_impl((TkappObject *)self, flags); exit: return return_value; @@ -583,9 +583,9 @@ static PyObject * _tkinter_tkapp_quit_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_quit(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_quit(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_quit_impl(self); + return _tkinter_tkapp_quit_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_interpaddr__doc__, @@ -600,9 +600,9 @@ static PyObject * _tkinter_tkapp_interpaddr_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_interpaddr(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_interpaddr(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_interpaddr_impl(self); + return _tkinter_tkapp_interpaddr_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_loadtk__doc__, @@ -617,9 +617,9 @@ static PyObject * _tkinter_tkapp_loadtk_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_loadtk(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_loadtk(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_loadtk_impl(self); + return _tkinter_tkapp_loadtk_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_settrace__doc__, @@ -644,9 +644,9 @@ static PyObject * _tkinter_tkapp_gettrace_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_gettrace(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_gettrace(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_gettrace_impl(self); + return _tkinter_tkapp_gettrace_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter_tkapp_willdispatch__doc__, @@ -661,9 +661,9 @@ static PyObject * _tkinter_tkapp_willdispatch_impl(TkappObject *self); static PyObject * -_tkinter_tkapp_willdispatch(TkappObject *self, PyObject *Py_UNUSED(ignored)) +_tkinter_tkapp_willdispatch(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _tkinter_tkapp_willdispatch_impl(self); + return _tkinter_tkapp_willdispatch_impl((TkappObject *)self); } PyDoc_STRVAR(_tkinter__flatten__doc__, @@ -888,4 +888,4 @@ _tkinter_getbusywaitinterval(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #define _TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF #endif /* !defined(_TKINTER_TKAPP_DELETEFILEHANDLER_METHODDEF) */ -/*[clinic end generated code: output=d90c1a9850c63249 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=172a98df5f209a84 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index 8bbecc44dc9c11..6a2f8d45cd4e0c 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -21,7 +21,7 @@ static PyObject * _winapi_Overlapped_GetOverlappedResult_impl(OverlappedObject *self, int wait); static PyObject * -_winapi_Overlapped_GetOverlappedResult(OverlappedObject *self, PyObject *arg) +_winapi_Overlapped_GetOverlappedResult(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int wait; @@ -30,7 +30,7 @@ _winapi_Overlapped_GetOverlappedResult(OverlappedObject *self, PyObject *arg) if (wait < 0) { goto exit; } - return_value = _winapi_Overlapped_GetOverlappedResult_impl(self, wait); + return_value = _winapi_Overlapped_GetOverlappedResult_impl((OverlappedObject *)self, wait); exit: return return_value; @@ -48,9 +48,9 @@ static PyObject * _winapi_Overlapped_getbuffer_impl(OverlappedObject *self); static PyObject * -_winapi_Overlapped_getbuffer(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_winapi_Overlapped_getbuffer(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _winapi_Overlapped_getbuffer_impl(self); + return _winapi_Overlapped_getbuffer_impl((OverlappedObject *)self); } PyDoc_STRVAR(_winapi_Overlapped_cancel__doc__, @@ -65,9 +65,9 @@ static PyObject * _winapi_Overlapped_cancel_impl(OverlappedObject *self); static PyObject * -_winapi_Overlapped_cancel(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_winapi_Overlapped_cancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _winapi_Overlapped_cancel_impl(self); + return _winapi_Overlapped_cancel_impl((OverlappedObject *)self); } PyDoc_STRVAR(_winapi_CloseHandle__doc__, @@ -2127,4 +2127,4 @@ _winapi_CopyFile2(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO return return_value; } -/*[clinic end generated code: output=b2a178bde6868e88 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=06b56212b2186250 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/arraymodule.c.h b/Modules/clinic/arraymodule.c.h index 4a7266ecb8b84f..c5b62b16699d06 100644 --- a/Modules/clinic/arraymodule.c.h +++ b/Modules/clinic/arraymodule.c.h @@ -21,9 +21,9 @@ static PyObject * array_array_clear_impl(arrayobject *self); static PyObject * -array_array_clear(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_clear_impl(self); + return array_array_clear_impl((arrayobject *)self); } PyDoc_STRVAR(array_array___copy____doc__, @@ -39,9 +39,9 @@ static PyObject * array_array___copy___impl(arrayobject *self); static PyObject * -array_array___copy__(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array___copy__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array___copy___impl(self); + return array_array___copy___impl((arrayobject *)self); } PyDoc_STRVAR(array_array___deepcopy____doc__, @@ -78,7 +78,7 @@ array_array_index_impl(arrayobject *self, PyObject *v, Py_ssize_t start, Py_ssize_t stop); static PyObject * -array_array_index(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *v; @@ -102,7 +102,7 @@ array_array_index(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = array_array_index_impl(self, v, start, stop); + return_value = array_array_index_impl((arrayobject *)self, v, start, stop); exit: return return_value; @@ -132,7 +132,7 @@ static PyObject * array_array_pop_impl(arrayobject *self, Py_ssize_t i); static PyObject * -array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t i = -1; @@ -156,7 +156,7 @@ array_array_pop(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) i = ival; } skip_optional: - return_value = array_array_pop_impl(self, i); + return_value = array_array_pop_impl((arrayobject *)self, i); exit: return return_value; @@ -175,7 +175,7 @@ static PyObject * array_array_extend_impl(arrayobject *self, PyTypeObject *cls, PyObject *bb); static PyObject * -array_array_extend(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_extend(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -200,7 +200,7 @@ array_array_extend(arrayobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } bb = args[0]; - return_value = array_array_extend_impl(self, cls, bb); + return_value = array_array_extend_impl((arrayobject *)self, cls, bb); exit: return return_value; @@ -219,7 +219,7 @@ static PyObject * array_array_insert_impl(arrayobject *self, Py_ssize_t i, PyObject *v); static PyObject * -array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) +array_array_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t i; @@ -241,7 +241,7 @@ array_array_insert(arrayobject *self, PyObject *const *args, Py_ssize_t nargs) i = ival; } v = args[1]; - return_value = array_array_insert_impl(self, i, v); + return_value = array_array_insert_impl((arrayobject *)self, i, v); exit: return return_value; @@ -263,9 +263,9 @@ static PyObject * array_array_buffer_info_impl(arrayobject *self); static PyObject * -array_array_buffer_info(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_buffer_info(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_buffer_info_impl(self); + return array_array_buffer_info_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_append__doc__, @@ -293,9 +293,9 @@ static PyObject * array_array_byteswap_impl(arrayobject *self); static PyObject * -array_array_byteswap(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_byteswap(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_byteswap_impl(self); + return array_array_byteswap_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_reverse__doc__, @@ -311,9 +311,9 @@ static PyObject * array_array_reverse_impl(arrayobject *self); static PyObject * -array_array_reverse(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_reverse_impl(self); + return array_array_reverse_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_fromfile__doc__, @@ -330,7 +330,7 @@ array_array_fromfile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f, Py_ssize_t n); static PyObject * -array_array_fromfile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_fromfile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -368,7 +368,7 @@ array_array_fromfile(arrayobject *self, PyTypeObject *cls, PyObject *const *args } n = ival; } - return_value = array_array_fromfile_impl(self, cls, f, n); + return_value = array_array_fromfile_impl((arrayobject *)self, cls, f, n); exit: return return_value; @@ -387,7 +387,7 @@ static PyObject * array_array_tofile_impl(arrayobject *self, PyTypeObject *cls, PyObject *f); static PyObject * -array_array_tofile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array_tofile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -412,7 +412,7 @@ array_array_tofile(arrayobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } f = args[0]; - return_value = array_array_tofile_impl(self, cls, f); + return_value = array_array_tofile_impl((arrayobject *)self, cls, f); exit: return return_value; @@ -440,9 +440,9 @@ static PyObject * array_array_tolist_impl(arrayobject *self); static PyObject * -array_array_tolist(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tolist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tolist_impl(self); + return array_array_tolist_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_frombytes__doc__, @@ -458,7 +458,7 @@ static PyObject * array_array_frombytes_impl(arrayobject *self, Py_buffer *buffer); static PyObject * -array_array_frombytes(arrayobject *self, PyObject *arg) +array_array_frombytes(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer buffer = {NULL, NULL}; @@ -466,7 +466,7 @@ array_array_frombytes(arrayobject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &buffer, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = array_array_frombytes_impl(self, &buffer); + return_value = array_array_frombytes_impl((arrayobject *)self, &buffer); exit: /* Cleanup for buffer */ @@ -490,9 +490,9 @@ static PyObject * array_array_tobytes_impl(arrayobject *self); static PyObject * -array_array_tobytes(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tobytes(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tobytes_impl(self); + return array_array_tobytes_impl((arrayobject *)self); } PyDoc_STRVAR(array_array_fromunicode__doc__, @@ -512,7 +512,7 @@ static PyObject * array_array_fromunicode_impl(arrayobject *self, PyObject *ustr); static PyObject * -array_array_fromunicode(arrayobject *self, PyObject *arg) +array_array_fromunicode(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *ustr; @@ -522,7 +522,7 @@ array_array_fromunicode(arrayobject *self, PyObject *arg) goto exit; } ustr = arg; - return_value = array_array_fromunicode_impl(self, ustr); + return_value = array_array_fromunicode_impl((arrayobject *)self, ustr); exit: return return_value; @@ -545,9 +545,9 @@ static PyObject * array_array_tounicode_impl(arrayobject *self); static PyObject * -array_array_tounicode(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array_tounicode(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array_tounicode_impl(self); + return array_array_tounicode_impl((arrayobject *)self); } PyDoc_STRVAR(array_array___sizeof____doc__, @@ -563,9 +563,9 @@ static PyObject * array_array___sizeof___impl(arrayobject *self); static PyObject * -array_array___sizeof__(arrayobject *self, PyObject *Py_UNUSED(ignored)) +array_array___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return array_array___sizeof___impl(self); + return array_array___sizeof___impl((arrayobject *)self); } PyDoc_STRVAR(array__array_reconstructor__doc__, @@ -634,7 +634,7 @@ array_array___reduce_ex___impl(arrayobject *self, PyTypeObject *cls, PyObject *value); static PyObject * -array_array___reduce_ex__(arrayobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_array___reduce_ex__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -659,7 +659,7 @@ array_array___reduce_ex__(arrayobject *self, PyTypeObject *cls, PyObject *const goto exit; } value = args[0]; - return_value = array_array___reduce_ex___impl(self, cls, value); + return_value = array_array___reduce_ex___impl((arrayobject *)self, cls, value); exit: return return_value; @@ -678,13 +678,13 @@ static PyObject * array_arrayiterator___reduce___impl(arrayiterobject *self, PyTypeObject *cls); static PyObject * -array_arrayiterator___reduce__(arrayiterobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +array_arrayiterator___reduce__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__reduce__() takes no arguments"); return NULL; } - return array_arrayiterator___reduce___impl(self, cls); + return array_arrayiterator___reduce___impl((arrayiterobject *)self, cls); } PyDoc_STRVAR(array_arrayiterator___setstate____doc__, @@ -695,4 +695,4 @@ PyDoc_STRVAR(array_arrayiterator___setstate____doc__, #define ARRAY_ARRAYITERATOR___SETSTATE___METHODDEF \ {"__setstate__", (PyCFunction)array_arrayiterator___setstate__, METH_O, array_arrayiterator___setstate____doc__}, -/*[clinic end generated code: output=22dbe12826bfa86f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8120dc5c4fa414b9 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/blake2module.c.h b/Modules/clinic/blake2module.c.h index f695f27e9e6c42..b5ac90143a1740 100644 --- a/Modules/clinic/blake2module.c.h +++ b/Modules/clinic/blake2module.c.h @@ -412,9 +412,9 @@ static PyObject * _blake2_blake2b_copy_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_copy(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_copy_impl(self); + return _blake2_blake2b_copy_impl((Blake2Object *)self); } PyDoc_STRVAR(_blake2_blake2b_update__doc__, @@ -439,9 +439,9 @@ static PyObject * _blake2_blake2b_digest_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_digest(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_digest_impl(self); + return _blake2_blake2b_digest_impl((Blake2Object *)self); } PyDoc_STRVAR(_blake2_blake2b_hexdigest__doc__, @@ -457,8 +457,8 @@ static PyObject * _blake2_blake2b_hexdigest_impl(Blake2Object *self); static PyObject * -_blake2_blake2b_hexdigest(Blake2Object *self, PyObject *Py_UNUSED(ignored)) +_blake2_blake2b_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _blake2_blake2b_hexdigest_impl(self); + return _blake2_blake2b_hexdigest_impl((Blake2Object *)self); } -/*[clinic end generated code: output=e0aaaf112d023b79 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6e03c947b7e0d973 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/md5module.c.h b/Modules/clinic/md5module.c.h index 7721616862ed0d..1f0acebf47b6ff 100644 --- a/Modules/clinic/md5module.c.h +++ b/Modules/clinic/md5module.c.h @@ -21,13 +21,13 @@ static PyObject * MD5Type_copy_impl(MD5object *self, PyTypeObject *cls); static PyObject * -MD5Type_copy(MD5object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +MD5Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return MD5Type_copy_impl(self, cls); + return MD5Type_copy_impl((MD5object *)self, cls); } PyDoc_STRVAR(MD5Type_digest__doc__, @@ -43,9 +43,9 @@ static PyObject * MD5Type_digest_impl(MD5object *self); static PyObject * -MD5Type_digest(MD5object *self, PyObject *Py_UNUSED(ignored)) +MD5Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return MD5Type_digest_impl(self); + return MD5Type_digest_impl((MD5object *)self); } PyDoc_STRVAR(MD5Type_hexdigest__doc__, @@ -61,9 +61,9 @@ static PyObject * MD5Type_hexdigest_impl(MD5object *self); static PyObject * -MD5Type_hexdigest(MD5object *self, PyObject *Py_UNUSED(ignored)) +MD5Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return MD5Type_hexdigest_impl(self); + return MD5Type_hexdigest_impl((MD5object *)self); } PyDoc_STRVAR(MD5Type_update__doc__, @@ -149,4 +149,4 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw exit: return return_value; } -/*[clinic end generated code: output=62ebf28802ae8b5f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=a4292eab710dcb60 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/overlapped.c.h b/Modules/clinic/overlapped.c.h index 9d5adb5193f297..7e5715660022c1 100644 --- a/Modules/clinic/overlapped.c.h +++ b/Modules/clinic/overlapped.c.h @@ -516,9 +516,9 @@ static PyObject * _overlapped_Overlapped_cancel_impl(OverlappedObject *self); static PyObject * -_overlapped_Overlapped_cancel(OverlappedObject *self, PyObject *Py_UNUSED(ignored)) +_overlapped_Overlapped_cancel(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _overlapped_Overlapped_cancel_impl(self); + return _overlapped_Overlapped_cancel_impl((OverlappedObject *)self); } PyDoc_STRVAR(_overlapped_Overlapped_getresult__doc__, @@ -537,7 +537,7 @@ static PyObject * _overlapped_Overlapped_getresult_impl(OverlappedObject *self, BOOL wait); static PyObject * -_overlapped_Overlapped_getresult(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_getresult(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; BOOL wait = FALSE; @@ -553,7 +553,7 @@ _overlapped_Overlapped_getresult(OverlappedObject *self, PyObject *const *args, goto exit; } skip_optional: - return_value = _overlapped_Overlapped_getresult_impl(self, wait); + return_value = _overlapped_Overlapped_getresult_impl((OverlappedObject *)self, wait); exit: return return_value; @@ -573,7 +573,7 @@ _overlapped_Overlapped_ReadFile_impl(OverlappedObject *self, HANDLE handle, DWORD size); static PyObject * -_overlapped_Overlapped_ReadFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ReadFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -589,7 +589,7 @@ _overlapped_Overlapped_ReadFile(OverlappedObject *self, PyObject *const *args, P if (!_PyLong_UnsignedLong_Converter(args[1], &size)) { goto exit; } - return_value = _overlapped_Overlapped_ReadFile_impl(self, handle, size); + return_value = _overlapped_Overlapped_ReadFile_impl((OverlappedObject *)self, handle, size); exit: return return_value; @@ -609,7 +609,7 @@ _overlapped_Overlapped_ReadFileInto_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj); static PyObject * -_overlapped_Overlapped_ReadFileInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ReadFileInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -625,7 +625,7 @@ _overlapped_Overlapped_ReadFileInto(OverlappedObject *self, PyObject *const *arg if (PyObject_GetBuffer(args[1], &bufobj, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _overlapped_Overlapped_ReadFileInto_impl(self, handle, &bufobj); + return_value = _overlapped_Overlapped_ReadFileInto_impl((OverlappedObject *)self, handle, &bufobj); exit: /* Cleanup for bufobj */ @@ -650,7 +650,7 @@ _overlapped_Overlapped_WSARecv_impl(OverlappedObject *self, HANDLE handle, DWORD size, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecv(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecv(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -674,7 +674,7 @@ _overlapped_Overlapped_WSARecv(OverlappedObject *self, PyObject *const *args, Py goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecv_impl(self, handle, size, flags); + return_value = _overlapped_Overlapped_WSARecv_impl((OverlappedObject *)self, handle, size, flags); exit: return return_value; @@ -695,7 +695,7 @@ _overlapped_Overlapped_WSARecvInto_impl(OverlappedObject *self, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -715,7 +715,7 @@ _overlapped_Overlapped_WSARecvInto(OverlappedObject *self, PyObject *const *args if (!_PyLong_UnsignedLong_Converter(args[2], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_WSARecvInto_impl(self, handle, &bufobj, flags); + return_value = _overlapped_Overlapped_WSARecvInto_impl((OverlappedObject *)self, handle, &bufobj, flags); exit: /* Cleanup for bufobj */ @@ -740,7 +740,7 @@ _overlapped_Overlapped_WriteFile_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj); static PyObject * -_overlapped_Overlapped_WriteFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WriteFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -756,7 +756,7 @@ _overlapped_Overlapped_WriteFile(OverlappedObject *self, PyObject *const *args, if (PyObject_GetBuffer(args[1], &bufobj, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _overlapped_Overlapped_WriteFile_impl(self, handle, &bufobj); + return_value = _overlapped_Overlapped_WriteFile_impl((OverlappedObject *)self, handle, &bufobj); exit: /* Cleanup for bufobj */ @@ -781,7 +781,7 @@ _overlapped_Overlapped_WSASend_impl(OverlappedObject *self, HANDLE handle, Py_buffer *bufobj, DWORD flags); static PyObject * -_overlapped_Overlapped_WSASend(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSASend(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -801,7 +801,7 @@ _overlapped_Overlapped_WSASend(OverlappedObject *self, PyObject *const *args, Py if (!_PyLong_UnsignedLong_Converter(args[2], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_WSASend_impl(self, handle, &bufobj, flags); + return_value = _overlapped_Overlapped_WSASend_impl((OverlappedObject *)self, handle, &bufobj, flags); exit: /* Cleanup for bufobj */ @@ -827,7 +827,7 @@ _overlapped_Overlapped_AcceptEx_impl(OverlappedObject *self, HANDLE AcceptSocket); static PyObject * -_overlapped_Overlapped_AcceptEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_AcceptEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE ListenSocket; @@ -844,7 +844,7 @@ _overlapped_Overlapped_AcceptEx(OverlappedObject *self, PyObject *const *args, P if (!AcceptSocket && PyErr_Occurred()) { goto exit; } - return_value = _overlapped_Overlapped_AcceptEx_impl(self, ListenSocket, AcceptSocket); + return_value = _overlapped_Overlapped_AcceptEx_impl((OverlappedObject *)self, ListenSocket, AcceptSocket); exit: return return_value; @@ -867,7 +867,7 @@ _overlapped_Overlapped_ConnectEx_impl(OverlappedObject *self, PyObject *AddressObj); static PyObject * -_overlapped_Overlapped_ConnectEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_ConnectEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE ConnectSocket; @@ -885,7 +885,7 @@ _overlapped_Overlapped_ConnectEx(OverlappedObject *self, PyObject *const *args, goto exit; } AddressObj = args[1]; - return_value = _overlapped_Overlapped_ConnectEx_impl(self, ConnectSocket, AddressObj); + return_value = _overlapped_Overlapped_ConnectEx_impl((OverlappedObject *)self, ConnectSocket, AddressObj); exit: return return_value; @@ -904,7 +904,7 @@ _overlapped_Overlapped_DisconnectEx_impl(OverlappedObject *self, HANDLE Socket, DWORD flags); static PyObject * -_overlapped_Overlapped_DisconnectEx(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_DisconnectEx(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE Socket; @@ -920,7 +920,7 @@ _overlapped_Overlapped_DisconnectEx(OverlappedObject *self, PyObject *const *arg if (!_PyLong_UnsignedLong_Converter(args[1], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_DisconnectEx_impl(self, Socket, flags); + return_value = _overlapped_Overlapped_DisconnectEx_impl((OverlappedObject *)self, Socket, flags); exit: return return_value; @@ -944,7 +944,7 @@ _overlapped_Overlapped_TransmitFile_impl(OverlappedObject *self, DWORD count_per_send, DWORD flags); static PyObject * -_overlapped_Overlapped_TransmitFile(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_TransmitFile(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE Socket; @@ -981,7 +981,7 @@ _overlapped_Overlapped_TransmitFile(OverlappedObject *self, PyObject *const *arg if (!_PyLong_UnsignedLong_Converter(args[6], &flags)) { goto exit; } - return_value = _overlapped_Overlapped_TransmitFile_impl(self, Socket, File, offset, offset_high, count_to_write, count_per_send, flags); + return_value = _overlapped_Overlapped_TransmitFile_impl((OverlappedObject *)self, Socket, File, offset, offset_high, count_to_write, count_per_send, flags); exit: return return_value; @@ -1001,7 +1001,7 @@ _overlapped_Overlapped_ConnectNamedPipe_impl(OverlappedObject *self, HANDLE Pipe); static PyObject * -_overlapped_Overlapped_ConnectNamedPipe(OverlappedObject *self, PyObject *arg) +_overlapped_Overlapped_ConnectNamedPipe(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; HANDLE Pipe; @@ -1010,7 +1010,7 @@ _overlapped_Overlapped_ConnectNamedPipe(OverlappedObject *self, PyObject *arg) if (!Pipe && PyErr_Occurred()) { goto exit; } - return_value = _overlapped_Overlapped_ConnectNamedPipe_impl(self, Pipe); + return_value = _overlapped_Overlapped_ConnectNamedPipe_impl((OverlappedObject *)self, Pipe); exit: return return_value; @@ -1030,7 +1030,7 @@ _overlapped_Overlapped_ConnectPipe_impl(OverlappedObject *self, const wchar_t *Address); static PyObject * -_overlapped_Overlapped_ConnectPipe(OverlappedObject *self, PyObject *arg) +_overlapped_Overlapped_ConnectPipe(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const wchar_t *Address = NULL; @@ -1043,7 +1043,7 @@ _overlapped_Overlapped_ConnectPipe(OverlappedObject *self, PyObject *arg) if (Address == NULL) { goto exit; } - return_value = _overlapped_Overlapped_ConnectPipe_impl(self, Address); + return_value = _overlapped_Overlapped_ConnectPipe_impl((OverlappedObject *)self, Address); exit: /* Cleanup for Address */ @@ -1105,7 +1105,7 @@ _overlapped_Overlapped_WSASendTo_impl(OverlappedObject *self, HANDLE handle, PyObject *AddressObj); static PyObject * -_overlapped_Overlapped_WSASendTo(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSASendTo(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1131,7 +1131,7 @@ _overlapped_Overlapped_WSASendTo(OverlappedObject *self, PyObject *const *args, goto exit; } AddressObj = args[3]; - return_value = _overlapped_Overlapped_WSASendTo_impl(self, handle, &bufobj, flags, AddressObj); + return_value = _overlapped_Overlapped_WSASendTo_impl((OverlappedObject *)self, handle, &bufobj, flags, AddressObj); exit: /* Cleanup for bufobj */ @@ -1157,7 +1157,7 @@ _overlapped_Overlapped_WSARecvFrom_impl(OverlappedObject *self, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvFrom(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvFrom(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1181,7 +1181,7 @@ _overlapped_Overlapped_WSARecvFrom(OverlappedObject *self, PyObject *const *args goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecvFrom_impl(self, handle, size, flags); + return_value = _overlapped_Overlapped_WSARecvFrom_impl((OverlappedObject *)self, handle, size, flags); exit: return return_value; @@ -1202,7 +1202,7 @@ _overlapped_Overlapped_WSARecvFromInto_impl(OverlappedObject *self, DWORD size, DWORD flags); static PyObject * -_overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const *args, Py_ssize_t nargs) +_overlapped_Overlapped_WSARecvFromInto(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; HANDLE handle; @@ -1230,7 +1230,7 @@ _overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const * goto exit; } skip_optional: - return_value = _overlapped_Overlapped_WSARecvFromInto_impl(self, handle, &bufobj, size, flags); + return_value = _overlapped_Overlapped_WSARecvFromInto_impl((OverlappedObject *)self, handle, &bufobj, size, flags); exit: /* Cleanup for bufobj */ @@ -1240,4 +1240,4 @@ _overlapped_Overlapped_WSARecvFromInto(OverlappedObject *self, PyObject *const * return return_value; } -/*[clinic end generated code: output=14c4f87906f28dc5 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d009cc9e53d9732a input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index 4e6c5b068c42b7..abeb9c3e3e12b1 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -7577,6 +7577,62 @@ os_read(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return return_value; } +PyDoc_STRVAR(os_readinto__doc__, +"readinto($module, fd, buffer, /)\n" +"--\n" +"\n" +"Read into a buffer object from a file descriptor.\n" +"\n" +"The buffer should be mutable and bytes-like. On success, returns the number of\n" +"bytes read. Less bytes may be read than the size of the buffer. The underlying\n" +"system call will be retried when interrupted by a signal, unless the signal\n" +"handler raises an exception. Other errors will not be retried and an error will\n" +"be raised.\n" +"\n" +"Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0\n" +"(which can be used to check for errors without reading data). Never returns\n" +"negative."); + +#define OS_READINTO_METHODDEF \ + {"readinto", _PyCFunction_CAST(os_readinto), METH_FASTCALL, os_readinto__doc__}, + +static Py_ssize_t +os_readinto_impl(PyObject *module, int fd, Py_buffer *buffer); + +static PyObject * +os_readinto(PyObject *module, PyObject *const *args, Py_ssize_t nargs) +{ + PyObject *return_value = NULL; + int fd; + Py_buffer buffer = {NULL, NULL}; + Py_ssize_t _return_value; + + if (!_PyArg_CheckPositional("readinto", nargs, 2, 2)) { + goto exit; + } + fd = PyLong_AsInt(args[0]); + if (fd == -1 && PyErr_Occurred()) { + goto exit; + } + if (PyObject_GetBuffer(args[1], &buffer, PyBUF_WRITABLE) < 0) { + _PyArg_BadArgument("readinto", "argument 2", "read-write bytes-like object", args[1]); + goto exit; + } + _return_value = os_readinto_impl(module, fd, &buffer); + if ((_return_value == -1) && PyErr_Occurred()) { + goto exit; + } + return_value = PyLong_FromSsize_t(_return_value); + +exit: + /* Cleanup for buffer */ + if (buffer.obj) { + PyBuffer_Release(&buffer); + } + + return return_value; +} + #if defined(HAVE_READV) PyDoc_STRVAR(os_readv__doc__, @@ -11662,7 +11718,7 @@ static int os_DirEntry_is_symlink_impl(DirEntry *self, PyTypeObject *defining_class); static PyObject * -os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_symlink(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; int _return_value; @@ -11671,7 +11727,7 @@ os_DirEntry_is_symlink(DirEntry *self, PyTypeObject *defining_class, PyObject *c PyErr_SetString(PyExc_TypeError, "is_symlink() takes no arguments"); goto exit; } - _return_value = os_DirEntry_is_symlink_impl(self, defining_class); + _return_value = os_DirEntry_is_symlink_impl((DirEntry *)self, defining_class); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11694,12 +11750,12 @@ static int os_DirEntry_is_junction_impl(DirEntry *self); static PyObject * -os_DirEntry_is_junction(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_is_junction(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; - _return_value = os_DirEntry_is_junction_impl(self); + _return_value = os_DirEntry_is_junction_impl((DirEntry *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11723,7 +11779,7 @@ os_DirEntry_stat_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_stat(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11768,7 +11824,7 @@ os_DirEntry_stat(DirEntry *self, PyTypeObject *defining_class, PyObject *const * goto exit; } skip_optional_kwonly: - return_value = os_DirEntry_stat_impl(self, defining_class, follow_symlinks); + return_value = os_DirEntry_stat_impl((DirEntry *)self, defining_class, follow_symlinks); exit: return return_value; @@ -11788,7 +11844,7 @@ os_DirEntry_is_dir_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_dir(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11834,7 +11890,7 @@ os_DirEntry_is_dir(DirEntry *self, PyTypeObject *defining_class, PyObject *const goto exit; } skip_optional_kwonly: - _return_value = os_DirEntry_is_dir_impl(self, defining_class, follow_symlinks); + _return_value = os_DirEntry_is_dir_impl((DirEntry *)self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11858,7 +11914,7 @@ os_DirEntry_is_file_impl(DirEntry *self, PyTypeObject *defining_class, int follow_symlinks); static PyObject * -os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +os_DirEntry_is_file(PyObject *self, PyTypeObject *defining_class, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -11904,7 +11960,7 @@ os_DirEntry_is_file(DirEntry *self, PyTypeObject *defining_class, PyObject *cons goto exit; } skip_optional_kwonly: - _return_value = os_DirEntry_is_file_impl(self, defining_class, follow_symlinks); + _return_value = os_DirEntry_is_file_impl((DirEntry *)self, defining_class, follow_symlinks); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -11927,9 +11983,9 @@ static PyObject * os_DirEntry_inode_impl(DirEntry *self); static PyObject * -os_DirEntry_inode(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry_inode(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return os_DirEntry_inode_impl(self); + return os_DirEntry_inode_impl((DirEntry *)self); } PyDoc_STRVAR(os_DirEntry___fspath____doc__, @@ -11945,9 +12001,9 @@ static PyObject * os_DirEntry___fspath___impl(DirEntry *self); static PyObject * -os_DirEntry___fspath__(DirEntry *self, PyObject *Py_UNUSED(ignored)) +os_DirEntry___fspath__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return os_DirEntry___fspath___impl(self); + return os_DirEntry___fspath___impl((DirEntry *)self); } PyDoc_STRVAR(os_scandir__doc__, @@ -13140,4 +13196,4 @@ os__emscripten_debugger(PyObject *module, PyObject *Py_UNUSED(ignored)) #ifndef OS__EMSCRIPTEN_DEBUGGER_METHODDEF #define OS__EMSCRIPTEN_DEBUGGER_METHODDEF #endif /* !defined(OS__EMSCRIPTEN_DEBUGGER_METHODDEF) */ -/*[clinic end generated code: output=39b69b279fd637f7 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8318c26fc2cd236c input=a9049054013a1b77]*/ diff --git a/Modules/clinic/pyexpat.c.h b/Modules/clinic/pyexpat.c.h index e57aa8a07d78c7..9eba59731c3fba 100644 --- a/Modules/clinic/pyexpat.c.h +++ b/Modules/clinic/pyexpat.c.h @@ -22,7 +22,7 @@ pyexpat_xmlparser_SetReparseDeferralEnabled_impl(xmlparseobject *self, int enabled); static PyObject * -pyexpat_xmlparser_SetReparseDeferralEnabled(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetReparseDeferralEnabled(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int enabled; @@ -31,7 +31,7 @@ pyexpat_xmlparser_SetReparseDeferralEnabled(xmlparseobject *self, PyObject *arg) if (enabled < 0) { goto exit; } - return_value = pyexpat_xmlparser_SetReparseDeferralEnabled_impl(self, enabled); + return_value = pyexpat_xmlparser_SetReparseDeferralEnabled_impl((xmlparseobject *)self, enabled); exit: return return_value; @@ -50,9 +50,9 @@ static PyObject * pyexpat_xmlparser_GetReparseDeferralEnabled_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetReparseDeferralEnabled(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetReparseDeferralEnabled(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetReparseDeferralEnabled_impl(self); + return pyexpat_xmlparser_GetReparseDeferralEnabled_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_Parse__doc__, @@ -71,7 +71,7 @@ pyexpat_xmlparser_Parse_impl(xmlparseobject *self, PyTypeObject *cls, PyObject *data, int isfinal); static PyObject * -pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_Parse(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -105,7 +105,7 @@ pyexpat_xmlparser_Parse(xmlparseobject *self, PyTypeObject *cls, PyObject *const goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_Parse_impl(self, cls, data, isfinal); + return_value = pyexpat_xmlparser_Parse_impl((xmlparseobject *)self, cls, data, isfinal); exit: return return_value; @@ -125,7 +125,7 @@ pyexpat_xmlparser_ParseFile_impl(xmlparseobject *self, PyTypeObject *cls, PyObject *file); static PyObject * -pyexpat_xmlparser_ParseFile(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_ParseFile(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -150,7 +150,7 @@ pyexpat_xmlparser_ParseFile(xmlparseobject *self, PyTypeObject *cls, PyObject *c goto exit; } file = args[0]; - return_value = pyexpat_xmlparser_ParseFile_impl(self, cls, file); + return_value = pyexpat_xmlparser_ParseFile_impl((xmlparseobject *)self, cls, file); exit: return return_value; @@ -169,7 +169,7 @@ static PyObject * pyexpat_xmlparser_SetBase_impl(xmlparseobject *self, const char *base); static PyObject * -pyexpat_xmlparser_SetBase(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetBase(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *base; @@ -187,7 +187,7 @@ pyexpat_xmlparser_SetBase(xmlparseobject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = pyexpat_xmlparser_SetBase_impl(self, base); + return_value = pyexpat_xmlparser_SetBase_impl((xmlparseobject *)self, base); exit: return return_value; @@ -206,9 +206,9 @@ static PyObject * pyexpat_xmlparser_GetBase_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetBase(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetBase(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetBase_impl(self); + return pyexpat_xmlparser_GetBase_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_GetInputContext__doc__, @@ -227,9 +227,9 @@ static PyObject * pyexpat_xmlparser_GetInputContext_impl(xmlparseobject *self); static PyObject * -pyexpat_xmlparser_GetInputContext(xmlparseobject *self, PyObject *Py_UNUSED(ignored)) +pyexpat_xmlparser_GetInputContext(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return pyexpat_xmlparser_GetInputContext_impl(self); + return pyexpat_xmlparser_GetInputContext_impl((xmlparseobject *)self); } PyDoc_STRVAR(pyexpat_xmlparser_ExternalEntityParserCreate__doc__, @@ -249,7 +249,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate_impl(xmlparseobject *self, const char *encoding); static PyObject * -pyexpat_xmlparser_ExternalEntityParserCreate(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_ExternalEntityParserCreate(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -309,7 +309,7 @@ pyexpat_xmlparser_ExternalEntityParserCreate(xmlparseobject *self, PyTypeObject goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_ExternalEntityParserCreate_impl(self, cls, context, encoding); + return_value = pyexpat_xmlparser_ExternalEntityParserCreate_impl((xmlparseobject *)self, cls, context, encoding); exit: return return_value; @@ -333,7 +333,7 @@ static PyObject * pyexpat_xmlparser_SetParamEntityParsing_impl(xmlparseobject *self, int flag); static PyObject * -pyexpat_xmlparser_SetParamEntityParsing(xmlparseobject *self, PyObject *arg) +pyexpat_xmlparser_SetParamEntityParsing(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int flag; @@ -342,7 +342,7 @@ pyexpat_xmlparser_SetParamEntityParsing(xmlparseobject *self, PyObject *arg) if (flag == -1 && PyErr_Occurred()) { goto exit; } - return_value = pyexpat_xmlparser_SetParamEntityParsing_impl(self, flag); + return_value = pyexpat_xmlparser_SetParamEntityParsing_impl((xmlparseobject *)self, flag); exit: return return_value; @@ -368,7 +368,7 @@ pyexpat_xmlparser_UseForeignDTD_impl(xmlparseobject *self, PyTypeObject *cls, int flag); static PyObject * -pyexpat_xmlparser_UseForeignDTD(xmlparseobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +pyexpat_xmlparser_UseForeignDTD(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -400,7 +400,7 @@ pyexpat_xmlparser_UseForeignDTD(xmlparseobject *self, PyTypeObject *cls, PyObjec goto exit; } skip_optional_posonly: - return_value = pyexpat_xmlparser_UseForeignDTD_impl(self, cls, flag); + return_value = pyexpat_xmlparser_UseForeignDTD_impl((xmlparseobject *)self, cls, flag); exit: return return_value; @@ -550,4 +550,4 @@ pyexpat_ErrorString(PyObject *module, PyObject *arg) #ifndef PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #define PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF #endif /* !defined(PYEXPAT_XMLPARSER_USEFOREIGNDTD_METHODDEF) */ -/*[clinic end generated code: output=63be65cb1823b5f8 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7ee30ae5b666d0a8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 806a888d6b8cd9..d8bdd6f95f3d29 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -91,7 +91,7 @@ static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask); static PyObject * -select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -112,7 +112,7 @@ select_poll_register(pollObject *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_register_impl(self, fd, eventmask); + return_value = select_poll_register_impl((pollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -142,7 +142,7 @@ static PyObject * select_poll_modify_impl(pollObject *self, int fd, unsigned short eventmask); static PyObject * -select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -159,7 +159,7 @@ select_poll_modify(pollObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_modify_impl(self, fd, eventmask); + return_value = select_poll_modify_impl((pollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -183,7 +183,7 @@ static PyObject * select_poll_unregister_impl(pollObject *self, int fd); static PyObject * -select_poll_unregister(pollObject *self, PyObject *arg) +select_poll_unregister(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int fd; @@ -193,7 +193,7 @@ select_poll_unregister(pollObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_unregister_impl(self, fd); + return_value = select_poll_unregister_impl((pollObject *)self, fd); Py_END_CRITICAL_SECTION(); exit: @@ -224,7 +224,7 @@ static PyObject * select_poll_poll_impl(pollObject *self, PyObject *timeout_obj); static PyObject * -select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_poll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *timeout_obj = Py_None; @@ -238,7 +238,7 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) timeout_obj = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_poll_poll_impl(self, timeout_obj); + return_value = select_poll_poll_impl((pollObject *)self, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -270,7 +270,7 @@ select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask); static PyObject * -select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -291,7 +291,7 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_register_impl(self, fd, eventmask); + return_value = select_devpoll_register_impl((devpollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -323,7 +323,7 @@ select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask); static PyObject * -select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int fd; @@ -344,7 +344,7 @@ select_devpoll_modify(devpollObject *self, PyObject *const *args, Py_ssize_t nar } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_modify_impl(self, fd, eventmask); + return_value = select_devpoll_modify_impl((devpollObject *)self, fd, eventmask); Py_END_CRITICAL_SECTION(); exit: @@ -368,7 +368,7 @@ static PyObject * select_devpoll_unregister_impl(devpollObject *self, int fd); static PyObject * -select_devpoll_unregister(devpollObject *self, PyObject *arg) +select_devpoll_unregister(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int fd; @@ -378,7 +378,7 @@ select_devpoll_unregister(devpollObject *self, PyObject *arg) goto exit; } Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_unregister_impl(self, fd); + return_value = select_devpoll_unregister_impl((devpollObject *)self, fd); Py_END_CRITICAL_SECTION(); exit: @@ -409,7 +409,7 @@ static PyObject * select_devpoll_poll_impl(devpollObject *self, PyObject *timeout_obj); static PyObject * -select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs) +select_devpoll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *timeout_obj = Py_None; @@ -423,7 +423,7 @@ select_devpoll_poll(devpollObject *self, PyObject *const *args, Py_ssize_t nargs timeout_obj = args[0]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_poll_impl(self, timeout_obj); + return_value = select_devpoll_poll_impl((devpollObject *)self, timeout_obj); Py_END_CRITICAL_SECTION(); exit: @@ -449,12 +449,12 @@ static PyObject * select_devpoll_close_impl(devpollObject *self); static PyObject * -select_devpoll_close(devpollObject *self, PyObject *Py_UNUSED(ignored)) +select_devpoll_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_close_impl(self); + return_value = select_devpoll_close_impl((devpollObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -477,12 +477,12 @@ static PyObject * select_devpoll_fileno_impl(devpollObject *self); static PyObject * -select_devpoll_fileno(devpollObject *self, PyObject *Py_UNUSED(ignored)) +select_devpoll_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_devpoll_fileno_impl(self); + return_value = select_devpoll_fileno_impl((devpollObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -643,12 +643,12 @@ static PyObject * select_epoll_close_impl(pyEpoll_Object *self); static PyObject * -select_epoll_close(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_epoll_close_impl(self); + return_value = select_epoll_close_impl((pyEpoll_Object *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -671,9 +671,9 @@ static PyObject * select_epoll_fileno_impl(pyEpoll_Object *self); static PyObject * -select_epoll_fileno(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll_fileno_impl(self); + return select_epoll_fileno_impl((pyEpoll_Object *)self); } #endif /* defined(HAVE_EPOLL) */ @@ -734,7 +734,7 @@ select_epoll_register_impl(pyEpoll_Object *self, int fd, unsigned int eventmask); static PyObject * -select_epoll_register(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_register(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -784,7 +784,7 @@ select_epoll_register(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional_pos: - return_value = select_epoll_register_impl(self, fd, eventmask); + return_value = select_epoll_register_impl((pyEpoll_Object *)self, fd, eventmask); exit: return return_value; @@ -813,7 +813,7 @@ select_epoll_modify_impl(pyEpoll_Object *self, int fd, unsigned int eventmask); static PyObject * -select_epoll_modify(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_modify(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -858,7 +858,7 @@ select_epoll_modify(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t narg if (eventmask == (unsigned int)-1 && PyErr_Occurred()) { goto exit; } - return_value = select_epoll_modify_impl(self, fd, eventmask); + return_value = select_epoll_modify_impl((pyEpoll_Object *)self, fd, eventmask); exit: return return_value; @@ -884,7 +884,7 @@ static PyObject * select_epoll_unregister_impl(pyEpoll_Object *self, int fd); static PyObject * -select_epoll_unregister(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_unregister(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -924,7 +924,7 @@ select_epoll_unregister(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t if (fd < 0) { goto exit; } - return_value = select_epoll_unregister_impl(self, fd); + return_value = select_epoll_unregister_impl((pyEpoll_Object *)self, fd); exit: return return_value; @@ -957,7 +957,7 @@ select_epoll_poll_impl(pyEpoll_Object *self, PyObject *timeout_obj, int maxevents); static PyObject * -select_epoll_poll(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +select_epoll_poll(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1009,7 +1009,7 @@ select_epoll_poll(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_pos: - return_value = select_epoll_poll_impl(self, timeout_obj, maxevents); + return_value = select_epoll_poll_impl((pyEpoll_Object *)self, timeout_obj, maxevents); exit: return return_value; @@ -1031,9 +1031,9 @@ static PyObject * select_epoll___enter___impl(pyEpoll_Object *self); static PyObject * -select_epoll___enter__(pyEpoll_Object *self, PyObject *Py_UNUSED(ignored)) +select_epoll___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_epoll___enter___impl(self); + return select_epoll___enter___impl((pyEpoll_Object *)self); } #endif /* defined(HAVE_EPOLL) */ @@ -1053,7 +1053,7 @@ select_epoll___exit___impl(pyEpoll_Object *self, PyObject *exc_type, PyObject *exc_value, PyObject *exc_tb); static PyObject * -select_epoll___exit__(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t nargs) +select_epoll___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type = Py_None; @@ -1076,7 +1076,7 @@ select_epoll___exit__(pyEpoll_Object *self, PyObject *const *args, Py_ssize_t na } exc_tb = args[2]; skip_optional: - return_value = select_epoll___exit___impl(self, exc_type, exc_value, exc_tb); + return_value = select_epoll___exit___impl((pyEpoll_Object *)self, exc_type, exc_value, exc_tb); exit: return return_value; @@ -1146,12 +1146,12 @@ static PyObject * select_kqueue_close_impl(kqueue_queue_Object *self); static PyObject * -select_kqueue_close(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) +select_kqueue_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = select_kqueue_close_impl(self); + return_value = select_kqueue_close_impl((kqueue_queue_Object *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -1174,9 +1174,9 @@ static PyObject * select_kqueue_fileno_impl(kqueue_queue_Object *self); static PyObject * -select_kqueue_fileno(kqueue_queue_Object *self, PyObject *Py_UNUSED(ignored)) +select_kqueue_fileno(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return select_kqueue_fileno_impl(self); + return select_kqueue_fileno_impl((kqueue_queue_Object *)self); } #endif /* defined(HAVE_KQUEUE) */ @@ -1238,7 +1238,7 @@ select_kqueue_control_impl(kqueue_queue_Object *self, PyObject *changelist, int maxevents, PyObject *otimeout); static PyObject * -select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize_t nargs) +select_kqueue_control(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *changelist; @@ -1258,7 +1258,7 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize } otimeout = args[2]; skip_optional: - return_value = select_kqueue_control_impl(self, changelist, maxevents, otimeout); + return_value = select_kqueue_control_impl((kqueue_queue_Object *)self, changelist, maxevents, otimeout); exit: return return_value; @@ -1365,4 +1365,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=78b4e67f7d401b5e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=c18fd93efc5f4dce input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha1module.c.h b/Modules/clinic/sha1module.c.h index 6af77ba64ecce6..ddd8e66a41d7ff 100644 --- a/Modules/clinic/sha1module.c.h +++ b/Modules/clinic/sha1module.c.h @@ -21,13 +21,13 @@ static PyObject * SHA1Type_copy_impl(SHA1object *self, PyTypeObject *cls); static PyObject * -SHA1Type_copy(SHA1object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA1Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA1Type_copy_impl(self, cls); + return SHA1Type_copy_impl((SHA1object *)self, cls); } PyDoc_STRVAR(SHA1Type_digest__doc__, @@ -43,9 +43,9 @@ static PyObject * SHA1Type_digest_impl(SHA1object *self); static PyObject * -SHA1Type_digest(SHA1object *self, PyObject *Py_UNUSED(ignored)) +SHA1Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA1Type_digest_impl(self); + return SHA1Type_digest_impl((SHA1object *)self); } PyDoc_STRVAR(SHA1Type_hexdigest__doc__, @@ -61,9 +61,9 @@ static PyObject * SHA1Type_hexdigest_impl(SHA1object *self); static PyObject * -SHA1Type_hexdigest(SHA1object *self, PyObject *Py_UNUSED(ignored)) +SHA1Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA1Type_hexdigest_impl(self); + return SHA1Type_hexdigest_impl((SHA1object *)self); } PyDoc_STRVAR(SHA1Type_update__doc__, @@ -149,4 +149,4 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject * exit: return return_value; } -/*[clinic end generated code: output=917e2789f1f5ebf9 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ad6f3788a6e7ff6f input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha2module.c.h b/Modules/clinic/sha2module.c.h index fec655a0dfaa58..d86f5510d752e8 100644 --- a/Modules/clinic/sha2module.c.h +++ b/Modules/clinic/sha2module.c.h @@ -21,13 +21,13 @@ static PyObject * SHA256Type_copy_impl(SHA256object *self, PyTypeObject *cls); static PyObject * -SHA256Type_copy(SHA256object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA256Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA256Type_copy_impl(self, cls); + return SHA256Type_copy_impl((SHA256object *)self, cls); } PyDoc_STRVAR(SHA512Type_copy__doc__, @@ -43,13 +43,13 @@ static PyObject * SHA512Type_copy_impl(SHA512object *self, PyTypeObject *cls); static PyObject * -SHA512Type_copy(SHA512object *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +SHA512Type_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return SHA512Type_copy_impl(self, cls); + return SHA512Type_copy_impl((SHA512object *)self, cls); } PyDoc_STRVAR(SHA256Type_digest__doc__, @@ -65,9 +65,9 @@ static PyObject * SHA256Type_digest_impl(SHA256object *self); static PyObject * -SHA256Type_digest(SHA256object *self, PyObject *Py_UNUSED(ignored)) +SHA256Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA256Type_digest_impl(self); + return SHA256Type_digest_impl((SHA256object *)self); } PyDoc_STRVAR(SHA512Type_digest__doc__, @@ -83,9 +83,9 @@ static PyObject * SHA512Type_digest_impl(SHA512object *self); static PyObject * -SHA512Type_digest(SHA512object *self, PyObject *Py_UNUSED(ignored)) +SHA512Type_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA512Type_digest_impl(self); + return SHA512Type_digest_impl((SHA512object *)self); } PyDoc_STRVAR(SHA256Type_hexdigest__doc__, @@ -101,9 +101,9 @@ static PyObject * SHA256Type_hexdigest_impl(SHA256object *self); static PyObject * -SHA256Type_hexdigest(SHA256object *self, PyObject *Py_UNUSED(ignored)) +SHA256Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA256Type_hexdigest_impl(self); + return SHA256Type_hexdigest_impl((SHA256object *)self); } PyDoc_STRVAR(SHA512Type_hexdigest__doc__, @@ -119,9 +119,9 @@ static PyObject * SHA512Type_hexdigest_impl(SHA512object *self); static PyObject * -SHA512Type_hexdigest(SHA512object *self, PyObject *Py_UNUSED(ignored)) +SHA512Type_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return SHA512Type_hexdigest_impl(self); + return SHA512Type_hexdigest_impl((SHA512object *)self); } PyDoc_STRVAR(SHA256Type_update__doc__, @@ -441,4 +441,4 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject exit: return return_value; } -/*[clinic end generated code: output=602a6939b8ec0927 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=1d7fec114eb6b6e3 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/sha3module.c.h b/Modules/clinic/sha3module.c.h index d9f4b66f81a038..729e216ce023cf 100644 --- a/Modules/clinic/sha3module.c.h +++ b/Modules/clinic/sha3module.c.h @@ -92,9 +92,9 @@ static PyObject * _sha3_sha3_224_copy_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_copy(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_copy_impl(self); + return _sha3_sha3_224_copy_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_digest__doc__, @@ -110,9 +110,9 @@ static PyObject * _sha3_sha3_224_digest_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_digest(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_digest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_digest_impl(self); + return _sha3_sha3_224_digest_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_hexdigest__doc__, @@ -128,9 +128,9 @@ static PyObject * _sha3_sha3_224_hexdigest_impl(SHA3object *self); static PyObject * -_sha3_sha3_224_hexdigest(SHA3object *self, PyObject *Py_UNUSED(ignored)) +_sha3_sha3_224_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _sha3_sha3_224_hexdigest_impl(self); + return _sha3_sha3_224_hexdigest_impl((SHA3object *)self); } PyDoc_STRVAR(_sha3_sha3_224_update__doc__, @@ -155,7 +155,7 @@ static PyObject * _sha3_shake_128_digest_impl(SHA3object *self, unsigned long length); static PyObject * -_sha3_shake_128_digest(SHA3object *self, PyObject *arg) +_sha3_shake_128_digest(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; unsigned long length; @@ -163,7 +163,7 @@ _sha3_shake_128_digest(SHA3object *self, PyObject *arg) if (!_PyLong_UnsignedLong_Converter(arg, &length)) { goto exit; } - return_value = _sha3_shake_128_digest_impl(self, length); + return_value = _sha3_shake_128_digest_impl((SHA3object *)self, length); exit: return return_value; @@ -182,7 +182,7 @@ static PyObject * _sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length); static PyObject * -_sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg) +_sha3_shake_128_hexdigest(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; unsigned long length; @@ -190,9 +190,9 @@ _sha3_shake_128_hexdigest(SHA3object *self, PyObject *arg) if (!_PyLong_UnsignedLong_Converter(arg, &length)) { goto exit; } - return_value = _sha3_shake_128_hexdigest_impl(self, length); + return_value = _sha3_shake_128_hexdigest_impl((SHA3object *)self, length); exit: return return_value; } -/*[clinic end generated code: output=5c644eb0ed42b993 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=21da06d9570969d8 input=a9049054013a1b77]*/ diff --git a/Modules/clinic/socketmodule.c.h b/Modules/clinic/socketmodule.c.h index 2152f288a9722f..dc62c4290d3e3b 100644 --- a/Modules/clinic/socketmodule.c.h +++ b/Modules/clinic/socketmodule.c.h @@ -23,9 +23,9 @@ static PyObject * _socket_socket_close_impl(PySocketSockObject *s); static PyObject * -_socket_socket_close(PySocketSockObject *s, PyObject *Py_UNUSED(ignored)) +_socket_socket_close(PyObject *s, PyObject *Py_UNUSED(ignored)) { - return _socket_socket_close_impl(s); + return _socket_socket_close_impl((PySocketSockObject *)s); } static int @@ -126,7 +126,7 @@ static PyObject * _socket_socket_ntohs_impl(PySocketSockObject *self, int x); static PyObject * -_socket_socket_ntohs(PySocketSockObject *self, PyObject *arg) +_socket_socket_ntohs(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int x; @@ -135,7 +135,7 @@ _socket_socket_ntohs(PySocketSockObject *self, PyObject *arg) if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _socket_socket_ntohs_impl(self, x); + return_value = _socket_socket_ntohs_impl((PySocketSockObject *)self, x); exit: return return_value; @@ -154,7 +154,7 @@ static PyObject * _socket_socket_htons_impl(PySocketSockObject *self, int x); static PyObject * -_socket_socket_htons(PySocketSockObject *self, PyObject *arg) +_socket_socket_htons(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int x; @@ -163,7 +163,7 @@ _socket_socket_htons(PySocketSockObject *self, PyObject *arg) if (x == -1 && PyErr_Occurred()) { goto exit; } - return_value = _socket_socket_htons_impl(self, x); + return_value = _socket_socket_htons_impl((PySocketSockObject *)self, x); exit: return return_value; @@ -182,7 +182,7 @@ static PyObject * _socket_socket_inet_aton_impl(PySocketSockObject *self, const char *ip_addr); static PyObject * -_socket_socket_inet_aton(PySocketSockObject *self, PyObject *arg) +_socket_socket_inet_aton(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; const char *ip_addr; @@ -200,7 +200,7 @@ _socket_socket_inet_aton(PySocketSockObject *self, PyObject *arg) PyErr_SetString(PyExc_ValueError, "embedded null character"); goto exit; } - return_value = _socket_socket_inet_aton_impl(self, ip_addr); + return_value = _socket_socket_inet_aton_impl((PySocketSockObject *)self, ip_addr); exit: return return_value; @@ -221,7 +221,7 @@ static PyObject * _socket_socket_inet_ntoa_impl(PySocketSockObject *self, Py_buffer *packed_ip); static PyObject * -_socket_socket_inet_ntoa(PySocketSockObject *self, PyObject *arg) +_socket_socket_inet_ntoa(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer packed_ip = {NULL, NULL}; @@ -229,7 +229,7 @@ _socket_socket_inet_ntoa(PySocketSockObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &packed_ip, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = _socket_socket_inet_ntoa_impl(self, &packed_ip); + return_value = _socket_socket_inet_ntoa_impl((PySocketSockObject *)self, &packed_ip); exit: /* Cleanup for packed_ip */ @@ -257,7 +257,7 @@ static PyObject * _socket_socket_if_nametoindex_impl(PySocketSockObject *self, PyObject *oname); static PyObject * -_socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) +_socket_socket_if_nametoindex(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *oname; @@ -265,7 +265,7 @@ _socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) if (!PyUnicode_FSConverter(arg, &oname)) { goto exit; } - return_value = _socket_socket_if_nametoindex_impl(self, oname); + return_value = _socket_socket_if_nametoindex_impl((PySocketSockObject *)self, oname); exit: return return_value; @@ -280,4 +280,4 @@ _socket_socket_if_nametoindex(PySocketSockObject *self, PyObject *arg) #ifndef _SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF #define _SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF #endif /* !defined(_SOCKET_SOCKET_IF_NAMETOINDEX_METHODDEF) */ -/*[clinic end generated code: output=3e612e8df1c322dd input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d39efc30d811e74b input=a9049054013a1b77]*/ diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h index 19906dc328d897..91a3ac76bcf0cc 100644 --- a/Modules/clinic/zlibmodule.c.h +++ b/Modules/clinic/zlibmodule.c.h @@ -439,7 +439,7 @@ zlib_Compress_compress_impl(compobject *self, PyTypeObject *cls, Py_buffer *data); static PyObject * -zlib_Compress_compress(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_compress(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -466,7 +466,7 @@ zlib_Compress_compress(compobject *self, PyTypeObject *cls, PyObject *const *arg if (PyObject_GetBuffer(args[0], &data, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = zlib_Compress_compress_impl(self, cls, &data); + return_value = zlib_Compress_compress_impl((compobject *)self, cls, &data); exit: /* Cleanup for data */ @@ -502,7 +502,7 @@ zlib_Decompress_decompress_impl(compobject *self, PyTypeObject *cls, Py_buffer *data, Py_ssize_t max_length); static PyObject * -zlib_Decompress_decompress(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_decompress(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -559,7 +559,7 @@ zlib_Decompress_decompress(compobject *self, PyTypeObject *cls, PyObject *const max_length = ival; } skip_optional_pos: - return_value = zlib_Decompress_decompress_impl(self, cls, &data, max_length); + return_value = zlib_Decompress_decompress_impl((compobject *)self, cls, &data, max_length); exit: /* Cleanup for data */ @@ -589,7 +589,7 @@ static PyObject * zlib_Compress_flush_impl(compobject *self, PyTypeObject *cls, int mode); static PyObject * -zlib_Compress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_flush(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -621,7 +621,7 @@ zlib_Compress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, goto exit; } skip_optional_posonly: - return_value = zlib_Compress_flush_impl(self, cls, mode); + return_value = zlib_Compress_flush_impl((compobject *)self, cls, mode); exit: return return_value; @@ -642,13 +642,13 @@ static PyObject * zlib_Compress_copy_impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Compress_copy(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return zlib_Compress_copy_impl(self, cls); + return zlib_Compress_copy_impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -667,13 +667,13 @@ static PyObject * zlib_Compress___copy___impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Compress___copy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return zlib_Compress___copy___impl(self, cls); + return zlib_Compress___copy___impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -693,7 +693,7 @@ zlib_Compress___deepcopy___impl(compobject *self, PyTypeObject *cls, PyObject *memo); static PyObject * -zlib_Compress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Compress___deepcopy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -718,7 +718,7 @@ zlib_Compress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const goto exit; } memo = args[0]; - return_value = zlib_Compress___deepcopy___impl(self, cls, memo); + return_value = zlib_Compress___deepcopy___impl((compobject *)self, cls, memo); exit: return return_value; @@ -741,13 +741,13 @@ static PyObject * zlib_Decompress_copy_impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Decompress_copy(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_copy(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "copy() takes no arguments"); return NULL; } - return zlib_Decompress_copy_impl(self, cls); + return zlib_Decompress_copy_impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -766,13 +766,13 @@ static PyObject * zlib_Decompress___copy___impl(compobject *self, PyTypeObject *cls); static PyObject * -zlib_Decompress___copy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress___copy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "__copy__() takes no arguments"); return NULL; } - return zlib_Decompress___copy___impl(self, cls); + return zlib_Decompress___copy___impl((compobject *)self, cls); } #endif /* defined(HAVE_ZLIB_COPY) */ @@ -792,7 +792,7 @@ zlib_Decompress___deepcopy___impl(compobject *self, PyTypeObject *cls, PyObject *memo); static PyObject * -zlib_Decompress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress___deepcopy__(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -817,7 +817,7 @@ zlib_Decompress___deepcopy__(compobject *self, PyTypeObject *cls, PyObject *cons goto exit; } memo = args[0]; - return_value = zlib_Decompress___deepcopy___impl(self, cls, memo); + return_value = zlib_Decompress___deepcopy___impl((compobject *)self, cls, memo); exit: return return_value; @@ -842,7 +842,7 @@ zlib_Decompress_flush_impl(compobject *self, PyTypeObject *cls, Py_ssize_t length); static PyObject * -zlib_Decompress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_Decompress_flush(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -882,7 +882,7 @@ zlib_Decompress_flush(compobject *self, PyTypeObject *cls, PyObject *const *args length = ival; } skip_optional_posonly: - return_value = zlib_Decompress_flush_impl(self, cls, length); + return_value = zlib_Decompress_flush_impl((compobject *)self, cls, length); exit: return return_value; @@ -915,7 +915,7 @@ zlib_ZlibDecompressor_decompress_impl(ZlibDecompressor *self, Py_buffer *data, Py_ssize_t max_length); static PyObject * -zlib_ZlibDecompressor_decompress(ZlibDecompressor *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +zlib_ZlibDecompressor_decompress(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -972,7 +972,7 @@ zlib_ZlibDecompressor_decompress(ZlibDecompressor *self, PyObject *const *args, max_length = ival; } skip_optional_pos: - return_value = zlib_ZlibDecompressor_decompress_impl(self, &data, max_length); + return_value = zlib_ZlibDecompressor_decompress_impl((ZlibDecompressor *)self, &data, max_length); exit: /* Cleanup for data */ @@ -1109,4 +1109,4 @@ zlib_crc32(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF #endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */ -/*[clinic end generated code: output=2fef49f168842b17 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=969872868c303e8a input=a9049054013a1b77]*/ diff --git a/Modules/config.c.in b/Modules/config.c.in index 53b4fb285498d0..c578cd103dc629 100644 --- a/Modules/config.c.in +++ b/Modules/config.c.in @@ -1,13 +1,3 @@ -/* -*- C -*- *********************************************** -Copyright (c) 2000, BeOpen.com. -Copyright (c) 1995-2000, Corporation for National Research Initiatives. -Copyright (c) 1990-1995, Stichting Mathematisch Centrum. -All rights reserved. - -See the file "Misc/COPYRIGHT" for information on usage and -redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES. -******************************************************************/ - /* Module configuration */ /* !!! !!! !!! This file is edited by the makesetup script !!! !!! !!! */ diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 29638114dd94a9..b4c15a143f9838 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -858,12 +858,15 @@ math_lcm_impl(PyObject *module, PyObject * const *args, * true (1), but may return false (0) without setting up an exception. */ static int -is_error(double x) +is_error(double x, int raise_edom) { int result = 1; /* presumption of guilt */ assert(errno); /* non-zero errno is a precondition for calling */ - if (errno == EDOM) - PyErr_SetString(PyExc_ValueError, "math domain error"); + if (errno == EDOM) { + if (raise_edom) { + PyErr_SetString(PyExc_ValueError, "math domain error"); + } + } else if (errno == ERANGE) { /* ANSI C generally requires libm functions to set ERANGE @@ -928,7 +931,8 @@ is_error(double x) */ static PyObject * -math_1(PyObject *arg, double (*func) (double), int can_overflow) +math_1(PyObject *arg, double (*func) (double), int can_overflow, + const char *err_msg) { double x, r; x = PyFloat_AsDouble(arg); @@ -936,25 +940,34 @@ math_1(PyObject *arg, double (*func) (double), int can_overflow) return NULL; errno = 0; r = (*func)(x); - if (isnan(r) && !isnan(x)) { - PyErr_SetString(PyExc_ValueError, - "math domain error"); /* invalid arg */ - return NULL; - } + if (isnan(r) && !isnan(x)) + goto domain_err; /* domain error */ if (isinf(r) && isfinite(x)) { if (can_overflow) PyErr_SetString(PyExc_OverflowError, "math range error"); /* overflow */ else - PyErr_SetString(PyExc_ValueError, - "math domain error"); /* singularity */ + goto domain_err; /* singularity */ return NULL; } - if (isfinite(r) && errno && is_error(r)) + if (isfinite(r) && errno && is_error(r, 1)) /* this branch unnecessary on most platforms */ return NULL; return PyFloat_FromDouble(r); + +domain_err: + if (err_msg) { + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, err_msg, buf); + PyMem_Free(buf); + } + } + else { + PyErr_SetString(PyExc_ValueError, "math domain error"); + } + return NULL; } /* variant of math_1, to be used when the function being wrapped is known to @@ -962,7 +975,7 @@ math_1(PyObject *arg, double (*func) (double), int can_overflow) errno = ERANGE for overflow). */ static PyObject * -math_1a(PyObject *arg, double (*func) (double)) +math_1a(PyObject *arg, double (*func) (double), const char *err_msg) { double x, r; x = PyFloat_AsDouble(arg); @@ -970,8 +983,17 @@ math_1a(PyObject *arg, double (*func) (double)) return NULL; errno = 0; r = (*func)(x); - if (errno && is_error(r)) + if (errno && is_error(r, err_msg ? 0 : 1)) { + if (err_msg && errno == EDOM) { + assert(!PyErr_Occurred()); /* exception is not set by is_error() */ + char *buf = PyOS_double_to_string(x, 'r', 0, Py_DTSF_ADD_DOT_0, NULL); + if (buf) { + PyErr_Format(PyExc_ValueError, err_msg, buf); + PyMem_Free(buf); + } + } return NULL; + } return PyFloat_FromDouble(r); } @@ -1031,7 +1053,7 @@ math_2(PyObject *const *args, Py_ssize_t nargs, else errno = 0; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); @@ -1039,13 +1061,25 @@ math_2(PyObject *const *args, Py_ssize_t nargs, #define FUNC1(funcname, func, can_overflow, docstring) \ static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ - return math_1(args, func, can_overflow); \ + return math_1(args, func, can_overflow, NULL); \ + }\ + PyDoc_STRVAR(math_##funcname##_doc, docstring); + +#define FUNC1D(funcname, func, can_overflow, docstring, err_msg) \ + static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ + return math_1(args, func, can_overflow, err_msg); \ }\ PyDoc_STRVAR(math_##funcname##_doc, docstring); #define FUNC1A(funcname, func, docstring) \ static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ - return math_1a(args, func); \ + return math_1a(args, func, NULL); \ + }\ + PyDoc_STRVAR(math_##funcname##_doc, docstring); + +#define FUNC1AD(funcname, func, docstring, err_msg) \ + static PyObject * math_##funcname(PyObject *self, PyObject *args) { \ + return math_1a(args, func, err_msg); \ }\ PyDoc_STRVAR(math_##funcname##_doc, docstring); @@ -1077,9 +1111,10 @@ FUNC2(atan2, atan2, "atan2($module, y, x, /)\n--\n\n" "Return the arc tangent (measured in radians) of y/x.\n\n" "Unlike atan(y/x), the signs of both x and y are considered.") -FUNC1(atanh, atanh, 0, +FUNC1D(atanh, atanh, 0, "atanh($module, x, /)\n--\n\n" - "Return the inverse hyperbolic tangent of x.") + "Return the inverse hyperbolic tangent of x.", + "expected a number between -1 and 1, got %s") FUNC1(cbrt, cbrt, 0, "cbrt($module, x, /)\n--\n\n" "Return the cube root of x.") @@ -1190,9 +1225,10 @@ math_floor(PyObject *module, PyObject *number) return PyLong_FromDouble(floor(x)); } -FUNC1A(gamma, m_tgamma, +FUNC1AD(gamma, m_tgamma, "gamma($module, x, /)\n--\n\n" - "Gamma function at x.") + "Gamma function at x.", + "expected a float or nonnegative integer, got %s") FUNC1A(lgamma, m_lgamma, "lgamma($module, x, /)\n--\n\n" "Natural logarithm of absolute value of Gamma function at x.") @@ -1212,9 +1248,10 @@ FUNC1(sin, sin, 0, FUNC1(sinh, sinh, 1, "sinh($module, x, /)\n--\n\n" "Return the hyperbolic sine of x.") -FUNC1(sqrt, sqrt, 0, +FUNC1D(sqrt, sqrt, 0, "sqrt($module, x, /)\n--\n\n" - "Return the square root of x.") + "Return the square root of x.", + "expected a nonnegative input, got %s") FUNC1(tan, tan, 0, "tan($module, x, /)\n--\n\n" "Return the tangent of x (measured in radians).") @@ -2141,7 +2178,7 @@ math_ldexp_impl(PyObject *module, double x, PyObject *i) errno = ERANGE; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; return PyFloat_FromDouble(r); } @@ -2195,8 +2232,8 @@ loghelper(PyObject* arg, double (*func)(double)) /* Negative or zero inputs give a ValueError. */ if (!_PyLong_IsPositive((PyLongObject *)arg)) { - PyErr_SetString(PyExc_ValueError, - "math domain error"); + PyErr_Format(PyExc_ValueError, + "expected a positive input, got %S", arg); return NULL; } @@ -2220,7 +2257,7 @@ loghelper(PyObject* arg, double (*func)(double)) } /* Else let libm handle it by itself. */ - return math_1(arg, func, 0); + return math_1(arg, func, 0, "expected a positive input, got %s"); } @@ -2369,7 +2406,7 @@ math_fmod_impl(PyObject *module, double x, double y) else errno = 0; } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); @@ -3010,7 +3047,7 @@ math_pow_impl(PyObject *module, double x, double y) } } - if (errno && is_error(r)) + if (errno && is_error(r, 1)) return NULL; else return PyFloat_FromDouble(r); diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index bb8d698bfed375..a35a848a7ca4b8 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -9582,42 +9582,33 @@ os_kill_impl(PyObject *module, pid_t pid, Py_ssize_t signal) Py_RETURN_NONE; #else /* !MS_WINDOWS */ - PyObject *result; DWORD sig = (DWORD)signal; - DWORD err; - HANDLE handle; #ifdef HAVE_WINDOWS_CONSOLE_IO /* Console processes which share a common console can be sent CTRL+C or CTRL+BREAK events, provided they handle said events. */ if (sig == CTRL_C_EVENT || sig == CTRL_BREAK_EVENT) { if (GenerateConsoleCtrlEvent(sig, (DWORD)pid) == 0) { - err = GetLastError(); - PyErr_SetFromWindowsErr(err); - } - else { - Py_RETURN_NONE; + return PyErr_SetFromWindowsErr(0); } + Py_RETURN_NONE; } #endif /* HAVE_WINDOWS_CONSOLE_IO */ /* If the signal is outside of what GenerateConsoleCtrlEvent can use, attempt to open and terminate the process. */ - handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid); + HANDLE handle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, (DWORD)pid); if (handle == NULL) { - err = GetLastError(); - return PyErr_SetFromWindowsErr(err); + return PyErr_SetFromWindowsErr(0); } - if (TerminateProcess(handle, sig) == 0) { - err = GetLastError(); - result = PyErr_SetFromWindowsErr(err); - } else { - result = Py_NewRef(Py_None); + BOOL res = TerminateProcess(handle, sig); + CloseHandle(handle); + if (res == 0) { + return PyErr_SetFromWindowsErr(0); } - CloseHandle(handle); - return result; + Py_RETURN_NONE; #endif /* !MS_WINDOWS */ } #endif /* HAVE_KILL */ @@ -11442,6 +11433,38 @@ os_read_impl(PyObject *module, int fd, Py_ssize_t length) return buffer; } +/*[clinic input] +os.readinto -> Py_ssize_t + fd: int + buffer: Py_buffer(accept={rwbuffer}) + / + +Read into a buffer object from a file descriptor. + +The buffer should be mutable and bytes-like. On success, returns the number of +bytes read. Less bytes may be read than the size of the buffer. The underlying +system call will be retried when interrupted by a signal, unless the signal +handler raises an exception. Other errors will not be retried and an error will +be raised. + +Returns 0 if *fd* is at end of file or if the provided *buffer* has length 0 +(which can be used to check for errors without reading data). Never returns +negative. +[clinic start generated code]*/ + +static Py_ssize_t +os_readinto_impl(PyObject *module, int fd, Py_buffer *buffer) +/*[clinic end generated code: output=8091a3513c683a80 input=d40074d0a68de575]*/ +{ + assert(buffer->len >= 0); + Py_ssize_t result = _Py_read(fd, buffer->buf, buffer->len); + /* Ensure negative is never returned without an error. Simplifies calling + code. _Py_read should succeed, possibly reading 0 bytes, _or_ set an + error. */ + assert(result >= 0 || (result == -1 && PyErr_Occurred())); + return result; +} + #if (defined(HAVE_SENDFILE) && (defined(__FreeBSD__) || defined(__DragonFly__) \ || defined(__APPLE__))) \ || defined(HAVE_READV) || defined(HAVE_PREADV) || defined (HAVE_PREADV2) \ @@ -16982,6 +17005,7 @@ static PyMethodDef posix_methods[] = { OS_LOCKF_METHODDEF OS_LSEEK_METHODDEF OS_READ_METHODDEF + OS_READINTO_METHODDEF OS_READV_METHODDEF OS_PREAD_METHODDEF OS_PREADV_METHODDEF diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index e70aa304f2f3a3..5e81253ca4a591 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -110,6 +110,7 @@ Local naming conventions: #include "pycore_fileutils.h" // _Py_set_inheritable() #include "pycore_moduleobject.h" // _PyModule_GetState #include "pycore_time.h" // _PyTime_AsMilliseconds() +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include "pycore_pyatomic_ft_wrappers.h" #ifdef _Py_MEMORY_SANITIZER @@ -822,8 +823,8 @@ internal_select(PySocketSockObject *s, int writing, PyTime_t interval, struct timeval tv, *tvp; #endif - /* must be called with the GIL held */ - assert(PyGILState_Check()); + /* must be called with a thread state */ + _Py_AssertHoldsTstate(); /* Error condition is for output only */ assert(!(connect && !writing)); @@ -936,8 +937,8 @@ sock_call_ex(PySocketSockObject *s, int deadline_initialized = 0; int res; - /* sock_call() must be called with the GIL held. */ - assert(PyGILState_Check()); + /* sock_call() must be called with a thread state. */ + _Py_AssertHoldsTstate(); /* outer loop to retry select() when select() is interrupted by a signal or to retry select()+sock_func() on false positive (see above) */ diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c index 14e7ca591a076b..adbd2fcc6ed74d 100644 --- a/Modules/syslogmodule.c +++ b/Modules/syslogmodule.c @@ -258,7 +258,7 @@ syslog_closelog_impl(PyObject *module) // Since the sys.closelog changes the process level state of syslog library, // this operation is only allowed for the main interpreter. if (!is_main_interpreter()) { - PyErr_SetString(PyExc_RuntimeError, "sunbinterpreter can't use syslog.closelog()"); + PyErr_SetString(PyExc_RuntimeError, "subinterpreter can't use syslog.closelog()"); return NULL; } diff --git a/Modules/timemodule.c b/Modules/timemodule.c index 340011fc08b551..5d0cd52a93a2d3 100644 --- a/Modules/timemodule.c +++ b/Modules/timemodule.c @@ -913,9 +913,10 @@ time_strftime(PyObject *module, PyObject *args) PyErr_NoMemory(); return NULL; } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } Py_ssize_t i = 0; while (i < format_size) { fmtlen = 0; @@ -933,7 +934,7 @@ time_strftime(PyObject *module, PyObject *args) if (unicode == NULL) { goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, unicode) < 0) { + if (PyUnicodeWriter_WriteStr(writer, unicode) < 0) { Py_DECREF(unicode); goto error; } @@ -947,18 +948,18 @@ time_strftime(PyObject *module, PyObject *args) break; } } - if (_PyUnicodeWriter_WriteSubstring(&writer, format_arg, start, i) < 0) { + if (PyUnicodeWriter_WriteSubstring(writer, format_arg, start, i) < 0) { goto error; } } PyMem_Free(outbuf); PyMem_Free(format); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: PyMem_Free(outbuf); PyMem_Free(format); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 024653546563e6..b3d1c425ad18b7 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -1205,7 +1205,8 @@ PyObject *PyBytes_DecodeEscape(const char *s, unsigned char c = *first_invalid_escape; if ('4' <= c && c <= '7') { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid octal escape sequence '\\%.3s'", + "b\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. ", first_invalid_escape) < 0) { Py_DECREF(result); @@ -1214,7 +1215,8 @@ PyObject *PyBytes_DecodeEscape(const char *s, } else { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid escape sequence '\\%c'", + "b\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. ", c) < 0) { Py_DECREF(result); @@ -1223,7 +1225,6 @@ PyObject *PyBytes_DecodeEscape(const char *s, } } return result; - } /* -------------------------------------------------------------------- */ /* object api */ diff --git a/Objects/clinic/bytearrayobject.c.h b/Objects/clinic/bytearrayobject.c.h index dee7c1e8bffd25..91cf5363e639d1 100644 --- a/Objects/clinic/bytearrayobject.c.h +++ b/Objects/clinic/bytearrayobject.c.h @@ -123,7 +123,7 @@ bytearray_find_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_find(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_find(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -147,7 +147,7 @@ bytearray_find(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytearray_find_impl(self, sub, start, end); + return_value = bytearray_find_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -172,7 +172,7 @@ bytearray_count_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_count(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_count(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -196,7 +196,7 @@ bytearray_count(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_count_impl(self, sub, start, end); + return_value = bytearray_count_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -215,9 +215,9 @@ static PyObject * bytearray_clear_impl(PyByteArrayObject *self); static PyObject * -bytearray_clear(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_clear_impl(self); + return bytearray_clear_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_copy__doc__, @@ -233,9 +233,9 @@ static PyObject * bytearray_copy_impl(PyByteArrayObject *self); static PyObject * -bytearray_copy(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_copy_impl(self); + return bytearray_copy_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_index__doc__, @@ -259,7 +259,7 @@ bytearray_index_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_index(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -283,7 +283,7 @@ bytearray_index(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_index_impl(self, sub, start, end); + return_value = bytearray_index_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -310,7 +310,7 @@ bytearray_rfind_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_rfind(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rfind(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -334,7 +334,7 @@ bytearray_rfind(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional: - return_value = bytearray_rfind_impl(self, sub, start, end); + return_value = bytearray_rfind_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -361,7 +361,7 @@ bytearray_rindex_impl(PyByteArrayObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_rindex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rindex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -385,7 +385,7 @@ bytearray_rindex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg goto exit; } skip_optional: - return_value = bytearray_rindex_impl(self, sub, start, end); + return_value = bytearray_rindex_impl((PyByteArrayObject *)self, sub, start, end); exit: return return_value; @@ -412,7 +412,7 @@ bytearray_startswith_impl(PyByteArrayObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_startswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_startswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -436,7 +436,7 @@ bytearray_startswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional: - return_value = bytearray_startswith_impl(self, subobj, start, end); + return_value = bytearray_startswith_impl((PyByteArrayObject *)self, subobj, start, end); exit: return return_value; @@ -463,7 +463,7 @@ bytearray_endswith_impl(PyByteArrayObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytearray_endswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_endswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -487,7 +487,7 @@ bytearray_endswith(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t na goto exit; } skip_optional: - return_value = bytearray_endswith_impl(self, subobj, start, end); + return_value = bytearray_endswith_impl((PyByteArrayObject *)self, subobj, start, end); exit: return return_value; @@ -510,7 +510,7 @@ static PyObject * bytearray_removeprefix_impl(PyByteArrayObject *self, Py_buffer *prefix); static PyObject * -bytearray_removeprefix(PyByteArrayObject *self, PyObject *arg) +bytearray_removeprefix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer prefix = {NULL, NULL}; @@ -518,7 +518,7 @@ bytearray_removeprefix(PyByteArrayObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &prefix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytearray_removeprefix_impl(self, &prefix); + return_value = bytearray_removeprefix_impl((PyByteArrayObject *)self, &prefix); exit: /* Cleanup for prefix */ @@ -546,7 +546,7 @@ static PyObject * bytearray_removesuffix_impl(PyByteArrayObject *self, Py_buffer *suffix); static PyObject * -bytearray_removesuffix(PyByteArrayObject *self, PyObject *arg) +bytearray_removesuffix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer suffix = {NULL, NULL}; @@ -554,7 +554,7 @@ bytearray_removesuffix(PyByteArrayObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &suffix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytearray_removesuffix_impl(self, &suffix); + return_value = bytearray_removesuffix_impl((PyByteArrayObject *)self, &suffix); exit: /* Cleanup for suffix */ @@ -585,7 +585,7 @@ bytearray_translate_impl(PyByteArrayObject *self, PyObject *table, PyObject *deletechars); static PyObject * -bytearray_translate(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_translate(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -629,7 +629,7 @@ bytearray_translate(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t n } deletechars = args[1]; skip_optional_pos: - return_value = bytearray_translate_impl(self, table, deletechars); + return_value = bytearray_translate_impl((PyByteArrayObject *)self, table, deletechars); exit: return return_value; @@ -704,7 +704,7 @@ bytearray_replace_impl(PyByteArrayObject *self, Py_buffer *old, Py_buffer *new, Py_ssize_t count); static PyObject * -bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_buffer old = {NULL, NULL}; @@ -736,7 +736,7 @@ bytearray_replace(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nar count = ival; } skip_optional: - return_value = bytearray_replace_impl(self, &old, &new, count); + return_value = bytearray_replace_impl((PyByteArrayObject *)self, &old, &new, count); exit: /* Cleanup for old */ @@ -773,7 +773,7 @@ bytearray_split_impl(PyByteArrayObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -833,7 +833,7 @@ bytearray_split(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs maxsplit = ival; } skip_optional_pos: - return_value = bytearray_split_impl(self, sep, maxsplit); + return_value = bytearray_split_impl((PyByteArrayObject *)self, sep, maxsplit); exit: return return_value; @@ -896,7 +896,7 @@ bytearray_rsplit_impl(PyByteArrayObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -956,7 +956,7 @@ bytearray_rsplit(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg maxsplit = ival; } skip_optional_pos: - return_value = bytearray_rsplit_impl(self, sep, maxsplit); + return_value = bytearray_rsplit_impl((PyByteArrayObject *)self, sep, maxsplit); exit: return return_value; @@ -975,9 +975,9 @@ static PyObject * bytearray_reverse_impl(PyByteArrayObject *self); static PyObject * -bytearray_reverse(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_reverse_impl(self); + return bytearray_reverse_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_insert__doc__, @@ -998,7 +998,7 @@ static PyObject * bytearray_insert_impl(PyByteArrayObject *self, Py_ssize_t index, int item); static PyObject * -bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -1022,7 +1022,7 @@ bytearray_insert(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg if (!_getbytevalue(args[1], &item)) { goto exit; } - return_value = bytearray_insert_impl(self, index, item); + return_value = bytearray_insert_impl((PyByteArrayObject *)self, index, item); exit: return return_value; @@ -1044,7 +1044,7 @@ static PyObject * bytearray_append_impl(PyByteArrayObject *self, int item); static PyObject * -bytearray_append(PyByteArrayObject *self, PyObject *arg) +bytearray_append(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int item; @@ -1052,7 +1052,7 @@ bytearray_append(PyByteArrayObject *self, PyObject *arg) if (!_getbytevalue(arg, &item)) { goto exit; } - return_value = bytearray_append_impl(self, item); + return_value = bytearray_append_impl((PyByteArrayObject *)self, item); exit: return return_value; @@ -1089,7 +1089,7 @@ static PyObject * bytearray_pop_impl(PyByteArrayObject *self, Py_ssize_t index); static PyObject * -bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index = -1; @@ -1113,7 +1113,7 @@ bytearray_pop(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) index = ival; } skip_optional: - return_value = bytearray_pop_impl(self, index); + return_value = bytearray_pop_impl((PyByteArrayObject *)self, index); exit: return return_value; @@ -1135,7 +1135,7 @@ static PyObject * bytearray_remove_impl(PyByteArrayObject *self, int value); static PyObject * -bytearray_remove(PyByteArrayObject *self, PyObject *arg) +bytearray_remove(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; int value; @@ -1143,7 +1143,7 @@ bytearray_remove(PyByteArrayObject *self, PyObject *arg) if (!_getbytevalue(arg, &value)) { goto exit; } - return_value = bytearray_remove_impl(self, value); + return_value = bytearray_remove_impl((PyByteArrayObject *)self, value); exit: return return_value; @@ -1164,7 +1164,7 @@ static PyObject * bytearray_strip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_strip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_strip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1177,7 +1177,7 @@ bytearray_strip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs } bytes = args[0]; skip_optional: - return_value = bytearray_strip_impl(self, bytes); + return_value = bytearray_strip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1198,7 +1198,7 @@ static PyObject * bytearray_lstrip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_lstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_lstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1211,7 +1211,7 @@ bytearray_lstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } bytes = args[0]; skip_optional: - return_value = bytearray_lstrip_impl(self, bytes); + return_value = bytearray_lstrip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1232,7 +1232,7 @@ static PyObject * bytearray_rstrip_impl(PyByteArrayObject *self, PyObject *bytes); static PyObject * -bytearray_rstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_rstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -1245,7 +1245,7 @@ bytearray_rstrip(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg } bytes = args[0]; skip_optional: - return_value = bytearray_rstrip_impl(self, bytes); + return_value = bytearray_rstrip_impl((PyByteArrayObject *)self, bytes); exit: return return_value; @@ -1274,7 +1274,7 @@ bytearray_decode_impl(PyByteArrayObject *self, const char *encoding, const char *errors); static PyObject * -bytearray_decode(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1347,7 +1347,7 @@ bytearray_decode(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t narg goto exit; } skip_optional_pos: - return_value = bytearray_decode_impl(self, encoding, errors); + return_value = bytearray_decode_impl((PyByteArrayObject *)self, encoding, errors); exit: return return_value; @@ -1382,7 +1382,7 @@ static PyObject * bytearray_splitlines_impl(PyByteArrayObject *self, int keepends); static PyObject * -bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1427,7 +1427,7 @@ bytearray_splitlines(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t goto exit; } skip_optional_pos: - return_value = bytearray_splitlines_impl(self, keepends); + return_value = bytearray_splitlines_impl((PyByteArrayObject *)self, keepends); exit: return return_value; @@ -1495,7 +1495,7 @@ static PyObject * bytearray_hex_impl(PyByteArrayObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -bytearray_hex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytearray_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1547,7 +1547,7 @@ bytearray_hex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs, goto exit; } skip_optional_pos: - return_value = bytearray_hex_impl(self, sep, bytes_per_sep); + return_value = bytearray_hex_impl((PyByteArrayObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -1566,9 +1566,9 @@ static PyObject * bytearray_reduce_impl(PyByteArrayObject *self); static PyObject * -bytearray_reduce(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_reduce_impl(self); + return bytearray_reduce_impl((PyByteArrayObject *)self); } PyDoc_STRVAR(bytearray_reduce_ex__doc__, @@ -1584,7 +1584,7 @@ static PyObject * bytearray_reduce_ex_impl(PyByteArrayObject *self, int proto); static PyObject * -bytearray_reduce_ex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t nargs) +bytearray_reduce_ex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; int proto = 0; @@ -1600,7 +1600,7 @@ bytearray_reduce_ex(PyByteArrayObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional: - return_value = bytearray_reduce_ex_impl(self, proto); + return_value = bytearray_reduce_ex_impl((PyByteArrayObject *)self, proto); exit: return return_value; @@ -1619,8 +1619,8 @@ static PyObject * bytearray_sizeof_impl(PyByteArrayObject *self); static PyObject * -bytearray_sizeof(PyByteArrayObject *self, PyObject *Py_UNUSED(ignored)) +bytearray_sizeof(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytearray_sizeof_impl(self); + return bytearray_sizeof_impl((PyByteArrayObject *)self); } -/*[clinic end generated code: output=4488e38e7ffcc6ec input=a9049054013a1b77]*/ +/*[clinic end generated code: output=bc8bec8514102bf3 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/bytesobject.c.h b/Objects/clinic/bytesobject.c.h index d2c6cc88770999..9aef736428ad0e 100644 --- a/Objects/clinic/bytesobject.c.h +++ b/Objects/clinic/bytesobject.c.h @@ -22,9 +22,9 @@ static PyObject * bytes___bytes___impl(PyBytesObject *self); static PyObject * -bytes___bytes__(PyBytesObject *self, PyObject *Py_UNUSED(ignored)) +bytes___bytes__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return bytes___bytes___impl(self); + return bytes___bytes___impl((PyBytesObject *)self); } PyDoc_STRVAR(bytes_split__doc__, @@ -48,7 +48,7 @@ static PyObject * bytes_split_impl(PyBytesObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_split(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -108,7 +108,7 @@ bytes_split(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje maxsplit = ival; } skip_optional_pos: - return_value = bytes_split_impl(self, sep, maxsplit); + return_value = bytes_split_impl((PyBytesObject *)self, sep, maxsplit); exit: return return_value; @@ -134,7 +134,7 @@ static PyObject * bytes_partition_impl(PyBytesObject *self, Py_buffer *sep); static PyObject * -bytes_partition(PyBytesObject *self, PyObject *arg) +bytes_partition(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer sep = {NULL, NULL}; @@ -142,7 +142,7 @@ bytes_partition(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &sep, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_partition_impl(self, &sep); + return_value = bytes_partition_impl((PyBytesObject *)self, &sep); exit: /* Cleanup for sep */ @@ -173,7 +173,7 @@ static PyObject * bytes_rpartition_impl(PyBytesObject *self, Py_buffer *sep); static PyObject * -bytes_rpartition(PyBytesObject *self, PyObject *arg) +bytes_rpartition(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer sep = {NULL, NULL}; @@ -181,7 +181,7 @@ bytes_rpartition(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &sep, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_rpartition_impl(self, &sep); + return_value = bytes_rpartition_impl((PyBytesObject *)self, &sep); exit: /* Cleanup for sep */ @@ -215,7 +215,7 @@ static PyObject * bytes_rsplit_impl(PyBytesObject *self, PyObject *sep, Py_ssize_t maxsplit); static PyObject * -bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_rsplit(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -275,7 +275,7 @@ bytes_rsplit(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj maxsplit = ival; } skip_optional_pos: - return_value = bytes_rsplit_impl(self, sep, maxsplit); + return_value = bytes_rsplit_impl((PyBytesObject *)self, sep, maxsplit); exit: return return_value; @@ -317,7 +317,7 @@ bytes_find_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_find(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_find(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -341,7 +341,7 @@ bytes_find(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_find_impl(self, sub, start, end); + return_value = bytes_find_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -368,7 +368,7 @@ bytes_index_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_index(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -392,7 +392,7 @@ bytes_index(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_index_impl(self, sub, start, end); + return_value = bytes_index_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -419,7 +419,7 @@ bytes_rfind_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_rfind(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rfind(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -443,7 +443,7 @@ bytes_rfind(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_rfind_impl(self, sub, start, end); + return_value = bytes_rfind_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -470,7 +470,7 @@ bytes_rindex_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_rindex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rindex(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -494,7 +494,7 @@ bytes_rindex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_rindex_impl(self, sub, start, end); + return_value = bytes_rindex_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -515,7 +515,7 @@ static PyObject * bytes_strip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_strip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_strip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -528,7 +528,7 @@ bytes_strip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_strip_impl(self, bytes); + return_value = bytes_strip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -549,7 +549,7 @@ static PyObject * bytes_lstrip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_lstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_lstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -562,7 +562,7 @@ bytes_lstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_lstrip_impl(self, bytes); + return_value = bytes_lstrip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -583,7 +583,7 @@ static PyObject * bytes_rstrip_impl(PyBytesObject *self, PyObject *bytes); static PyObject * -bytes_rstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_rstrip(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *bytes = Py_None; @@ -596,7 +596,7 @@ bytes_rstrip(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) } bytes = args[0]; skip_optional: - return_value = bytes_rstrip_impl(self, bytes); + return_value = bytes_rstrip_impl((PyBytesObject *)self, bytes); exit: return return_value; @@ -621,7 +621,7 @@ bytes_count_impl(PyBytesObject *self, PyObject *sub, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_count(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_count(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *sub; @@ -645,7 +645,7 @@ bytes_count(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_count_impl(self, sub, start, end); + return_value = bytes_count_impl((PyBytesObject *)self, sub, start, end); exit: return return_value; @@ -671,7 +671,7 @@ bytes_translate_impl(PyBytesObject *self, PyObject *table, PyObject *deletechars); static PyObject * -bytes_translate(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_translate(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -715,7 +715,7 @@ bytes_translate(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, Py } deletechars = args[1]; skip_optional_pos: - return_value = bytes_translate_impl(self, table, deletechars); + return_value = bytes_translate_impl((PyBytesObject *)self, table, deletechars); exit: return return_value; @@ -790,7 +790,7 @@ bytes_replace_impl(PyBytesObject *self, Py_buffer *old, Py_buffer *new, Py_ssize_t count); static PyObject * -bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_buffer old = {NULL, NULL}; @@ -822,7 +822,7 @@ bytes_replace(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) count = ival; } skip_optional: - return_value = bytes_replace_impl(self, &old, &new, count); + return_value = bytes_replace_impl((PyBytesObject *)self, &old, &new, count); exit: /* Cleanup for old */ @@ -853,7 +853,7 @@ static PyObject * bytes_removeprefix_impl(PyBytesObject *self, Py_buffer *prefix); static PyObject * -bytes_removeprefix(PyBytesObject *self, PyObject *arg) +bytes_removeprefix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer prefix = {NULL, NULL}; @@ -861,7 +861,7 @@ bytes_removeprefix(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &prefix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_removeprefix_impl(self, &prefix); + return_value = bytes_removeprefix_impl((PyBytesObject *)self, &prefix); exit: /* Cleanup for prefix */ @@ -889,7 +889,7 @@ static PyObject * bytes_removesuffix_impl(PyBytesObject *self, Py_buffer *suffix); static PyObject * -bytes_removesuffix(PyBytesObject *self, PyObject *arg) +bytes_removesuffix(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; Py_buffer suffix = {NULL, NULL}; @@ -897,7 +897,7 @@ bytes_removesuffix(PyBytesObject *self, PyObject *arg) if (PyObject_GetBuffer(arg, &suffix, PyBUF_SIMPLE) != 0) { goto exit; } - return_value = bytes_removesuffix_impl(self, &suffix); + return_value = bytes_removesuffix_impl((PyBytesObject *)self, &suffix); exit: /* Cleanup for suffix */ @@ -929,7 +929,7 @@ bytes_startswith_impl(PyBytesObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_startswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_startswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -953,7 +953,7 @@ bytes_startswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_startswith_impl(self, subobj, start, end); + return_value = bytes_startswith_impl((PyBytesObject *)self, subobj, start, end); exit: return return_value; @@ -980,7 +980,7 @@ bytes_endswith_impl(PyBytesObject *self, PyObject *subobj, Py_ssize_t start, Py_ssize_t end); static PyObject * -bytes_endswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) +bytes_endswith(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *subobj; @@ -1004,7 +1004,7 @@ bytes_endswith(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = bytes_endswith_impl(self, subobj, start, end); + return_value = bytes_endswith_impl((PyBytesObject *)self, subobj, start, end); exit: return return_value; @@ -1033,7 +1033,7 @@ bytes_decode_impl(PyBytesObject *self, const char *encoding, const char *errors); static PyObject * -bytes_decode(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_decode(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1106,7 +1106,7 @@ bytes_decode(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObj goto exit; } skip_optional_pos: - return_value = bytes_decode_impl(self, encoding, errors); + return_value = bytes_decode_impl((PyBytesObject *)self, encoding, errors); exit: return return_value; @@ -1128,7 +1128,7 @@ static PyObject * bytes_splitlines_impl(PyBytesObject *self, int keepends); static PyObject * -bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_splitlines(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1173,7 +1173,7 @@ bytes_splitlines(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, P goto exit; } skip_optional_pos: - return_value = bytes_splitlines_impl(self, keepends); + return_value = bytes_splitlines_impl((PyBytesObject *)self, keepends); exit: return return_value; @@ -1241,7 +1241,7 @@ static PyObject * bytes_hex_impl(PyBytesObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +bytes_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -1293,7 +1293,7 @@ bytes_hex(PyBytesObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject goto exit; } skip_optional_pos: - return_value = bytes_hex_impl(self, sep, bytes_per_sep); + return_value = bytes_hex_impl((PyBytesObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -1391,4 +1391,4 @@ bytes_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=fb7939a1983e463a input=a9049054013a1b77]*/ +/*[clinic end generated code: output=96fe2d6ef9ac8f6a input=a9049054013a1b77]*/ diff --git a/Objects/clinic/classobject.c.h b/Objects/clinic/classobject.c.h index 3e149c97324a6a..5934f1c2a41669 100644 --- a/Objects/clinic/classobject.c.h +++ b/Objects/clinic/classobject.c.h @@ -16,9 +16,9 @@ static PyObject * method___reduce___impl(PyMethodObject *self); static PyObject * -method___reduce__(PyMethodObject *self, PyObject *Py_UNUSED(ignored)) +method___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return method___reduce___impl(self); + return method___reduce___impl((PyMethodObject *)self); } PyDoc_STRVAR(method_new__doc__, @@ -82,4 +82,4 @@ instancemethod_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=5a5e3f2d0726f189 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ab546abf90aac94e input=a9049054013a1b77]*/ diff --git a/Objects/clinic/codeobject.c.h b/Objects/clinic/codeobject.c.h index 45738f767df50f..2184742cc0d99d 100644 --- a/Objects/clinic/codeobject.c.h +++ b/Objects/clinic/codeobject.c.h @@ -174,7 +174,7 @@ code_replace_impl(PyCodeObject *self, int co_argcount, PyObject *co_exceptiontable); static PyObject * -code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +code_replace(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -204,24 +204,24 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje #undef KWTUPLE PyObject *argsbuf[18]; Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0; - int co_argcount = self->co_argcount; - int co_posonlyargcount = self->co_posonlyargcount; - int co_kwonlyargcount = self->co_kwonlyargcount; - int co_nlocals = self->co_nlocals; - int co_stacksize = self->co_stacksize; - int co_flags = self->co_flags; - int co_firstlineno = self->co_firstlineno; + int co_argcount = ((PyCodeObject *)self)->co_argcount; + int co_posonlyargcount = ((PyCodeObject *)self)->co_posonlyargcount; + int co_kwonlyargcount = ((PyCodeObject *)self)->co_kwonlyargcount; + int co_nlocals = ((PyCodeObject *)self)->co_nlocals; + int co_stacksize = ((PyCodeObject *)self)->co_stacksize; + int co_flags = ((PyCodeObject *)self)->co_flags; + int co_firstlineno = ((PyCodeObject *)self)->co_firstlineno; PyObject *co_code = NULL; - PyObject *co_consts = self->co_consts; - PyObject *co_names = self->co_names; + PyObject *co_consts = ((PyCodeObject *)self)->co_consts; + PyObject *co_names = ((PyCodeObject *)self)->co_names; PyObject *co_varnames = NULL; PyObject *co_freevars = NULL; PyObject *co_cellvars = NULL; - PyObject *co_filename = self->co_filename; - PyObject *co_name = self->co_name; - PyObject *co_qualname = self->co_qualname; - PyObject *co_linetable = self->co_linetable; - PyObject *co_exceptiontable = self->co_exceptiontable; + PyObject *co_filename = ((PyCodeObject *)self)->co_filename; + PyObject *co_name = ((PyCodeObject *)self)->co_name; + PyObject *co_qualname = ((PyCodeObject *)self)->co_qualname; + PyObject *co_linetable = ((PyCodeObject *)self)->co_linetable; + PyObject *co_exceptiontable = ((PyCodeObject *)self)->co_exceptiontable; args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, /*minpos*/ 0, /*maxpos*/ 0, /*minkw*/ 0, /*varpos*/ 0, argsbuf); @@ -400,7 +400,7 @@ code_replace(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje } co_exceptiontable = args[17]; skip_optional_kwonly: - return_value = code_replace_impl(self, co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, co_stacksize, co_flags, co_firstlineno, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_linetable, co_exceptiontable); + return_value = code_replace_impl((PyCodeObject *)self, co_argcount, co_posonlyargcount, co_kwonlyargcount, co_nlocals, co_stacksize, co_flags, co_firstlineno, co_code, co_consts, co_names, co_varnames, co_freevars, co_cellvars, co_filename, co_name, co_qualname, co_linetable, co_exceptiontable); exit: return return_value; @@ -421,7 +421,7 @@ static PyObject * code__varname_from_oparg_impl(PyCodeObject *self, int oparg); static PyObject * -code__varname_from_oparg(PyCodeObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +code__varname_from_oparg(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -461,9 +461,9 @@ code__varname_from_oparg(PyCodeObject *self, PyObject *const *args, Py_ssize_t n if (oparg == -1 && PyErr_Occurred()) { goto exit; } - return_value = code__varname_from_oparg_impl(self, oparg); + return_value = code__varname_from_oparg_impl((PyCodeObject *)self, oparg); exit: return return_value; } -/*[clinic end generated code: output=e919ea67a1bcf524 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=73861c79e93aaee5 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/complexobject.c.h b/Objects/clinic/complexobject.c.h index 3c3d1071b6eec3..e00da1d960c54d 100644 --- a/Objects/clinic/complexobject.c.h +++ b/Objects/clinic/complexobject.c.h @@ -21,9 +21,9 @@ static PyObject * complex_conjugate_impl(PyComplexObject *self); static PyObject * -complex_conjugate(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex_conjugate(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex_conjugate_impl(self); + return complex_conjugate_impl((PyComplexObject *)self); } PyDoc_STRVAR(complex___getnewargs____doc__, @@ -38,9 +38,9 @@ static PyObject * complex___getnewargs___impl(PyComplexObject *self); static PyObject * -complex___getnewargs__(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex___getnewargs__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex___getnewargs___impl(self); + return complex___getnewargs___impl((PyComplexObject *)self); } PyDoc_STRVAR(complex___format____doc__, @@ -56,7 +56,7 @@ static PyObject * complex___format___impl(PyComplexObject *self, PyObject *format_spec); static PyObject * -complex___format__(PyComplexObject *self, PyObject *arg) +complex___format__(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *format_spec; @@ -66,7 +66,7 @@ complex___format__(PyComplexObject *self, PyObject *arg) goto exit; } format_spec = arg; - return_value = complex___format___impl(self, format_spec); + return_value = complex___format___impl((PyComplexObject *)self, format_spec); exit: return return_value; @@ -85,9 +85,9 @@ static PyObject * complex___complex___impl(PyComplexObject *self); static PyObject * -complex___complex__(PyComplexObject *self, PyObject *Py_UNUSED(ignored)) +complex___complex__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return complex___complex___impl(self); + return complex___complex___impl((PyComplexObject *)self); } PyDoc_STRVAR(complex_new__doc__, @@ -170,4 +170,4 @@ PyDoc_STRVAR(complex_from_number__doc__, #define COMPLEX_FROM_NUMBER_METHODDEF \ {"from_number", (PyCFunction)complex_from_number, METH_O|METH_CLASS, complex_from_number__doc__}, -/*[clinic end generated code: output=8c49a41c5a7f0aee input=a9049054013a1b77]*/ +/*[clinic end generated code: output=252cddef7f9169a0 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/dictobject.c.h b/Objects/clinic/dictobject.c.h index fb46c4c64334f9..cdf39ce147203b 100644 --- a/Objects/clinic/dictobject.c.h +++ b/Objects/clinic/dictobject.c.h @@ -52,9 +52,9 @@ static PyObject * dict_copy_impl(PyDictObject *self); static PyObject * -dict_copy(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_copy_impl(self); + return dict_copy_impl((PyDictObject *)self); } PyDoc_STRVAR(dict___contains____doc__, @@ -79,7 +79,7 @@ static PyObject * dict_get_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_get(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -95,7 +95,7 @@ dict_get(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) default_value = args[1]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_get_impl(self, key, default_value); + return_value = dict_get_impl((PyDictObject *)self, key, default_value); Py_END_CRITICAL_SECTION(); exit: @@ -118,7 +118,7 @@ dict_setdefault_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_setdefault(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -134,7 +134,7 @@ dict_setdefault(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) default_value = args[1]; skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_setdefault_impl(self, key, default_value); + return_value = dict_setdefault_impl((PyDictObject *)self, key, default_value); Py_END_CRITICAL_SECTION(); exit: @@ -154,9 +154,9 @@ static PyObject * dict_clear_impl(PyDictObject *self); static PyObject * -dict_clear(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_clear_impl(self); + return dict_clear_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_pop__doc__, @@ -175,7 +175,7 @@ static PyObject * dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value); static PyObject * -dict_pop(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) +dict_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -190,7 +190,7 @@ dict_pop(PyDictObject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: - return_value = dict_pop_impl(self, key, default_value); + return_value = dict_pop_impl((PyDictObject *)self, key, default_value); exit: return return_value; @@ -212,12 +212,12 @@ static PyObject * dict_popitem_impl(PyDictObject *self); static PyObject * -dict_popitem(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_popitem(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = dict_popitem_impl(self); + return_value = dict_popitem_impl((PyDictObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -236,9 +236,9 @@ static PyObject * dict___sizeof___impl(PyDictObject *self); static PyObject * -dict___sizeof__(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict___sizeof___impl(self); + return dict___sizeof___impl((PyDictObject *)self); } PyDoc_STRVAR(dict___reversed____doc__, @@ -254,9 +254,9 @@ static PyObject * dict___reversed___impl(PyDictObject *self); static PyObject * -dict___reversed__(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict___reversed__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict___reversed___impl(self); + return dict___reversed___impl((PyDictObject *)self); } PyDoc_STRVAR(dict_keys__doc__, @@ -272,9 +272,9 @@ static PyObject * dict_keys_impl(PyDictObject *self); static PyObject * -dict_keys(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_keys_impl(self); + return dict_keys_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_items__doc__, @@ -290,9 +290,9 @@ static PyObject * dict_items_impl(PyDictObject *self); static PyObject * -dict_items(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_items_impl(self); + return dict_items_impl((PyDictObject *)self); } PyDoc_STRVAR(dict_values__doc__, @@ -308,8 +308,8 @@ static PyObject * dict_values_impl(PyDictObject *self); static PyObject * -dict_values(PyDictObject *self, PyObject *Py_UNUSED(ignored)) +dict_values(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return dict_values_impl(self); + return dict_values_impl((PyDictObject *)self); } -/*[clinic end generated code: output=f3dd5f3fb8122aef input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4956c5b276ea652f input=a9049054013a1b77]*/ diff --git a/Objects/clinic/exceptions.c.h b/Objects/clinic/exceptions.c.h index caa5b0c63e53c5..8699df07495ad8 100644 --- a/Objects/clinic/exceptions.c.h +++ b/Objects/clinic/exceptions.c.h @@ -17,12 +17,12 @@ static PyObject * BaseException___reduce___impl(PyBaseExceptionObject *self); static PyObject * -BaseException___reduce__(PyBaseExceptionObject *self, PyObject *Py_UNUSED(ignored)) +BaseException___reduce__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___reduce___impl(self); + return_value = BaseException___reduce___impl((PyBaseExceptionObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -45,7 +45,7 @@ BaseException___setstate__(PyBaseExceptionObject *self, PyObject *state) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___setstate___impl(self, state); + return_value = BaseException___setstate___impl((PyBaseExceptionObject *)self, state); Py_END_CRITICAL_SECTION(); return return_value; @@ -69,7 +69,7 @@ BaseException_with_traceback(PyBaseExceptionObject *self, PyObject *tb) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException_with_traceback_impl(self, tb); + return_value = BaseException_with_traceback_impl((PyBaseExceptionObject *)self, tb); Py_END_CRITICAL_SECTION(); return return_value; @@ -88,7 +88,7 @@ static PyObject * BaseException_add_note_impl(PyBaseExceptionObject *self, PyObject *note); static PyObject * -BaseException_add_note(PyBaseExceptionObject *self, PyObject *arg) +BaseException_add_note(PyObject *self, PyObject *arg) { PyObject *return_value = NULL; PyObject *note; @@ -99,7 +99,7 @@ BaseException_add_note(PyBaseExceptionObject *self, PyObject *arg) } note = arg; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException_add_note_impl(self, note); + return_value = BaseException_add_note_impl((PyBaseExceptionObject *)self, note); Py_END_CRITICAL_SECTION(); exit: @@ -120,12 +120,12 @@ static PyObject * BaseException_args_get_impl(PyBaseExceptionObject *self); static PyObject * -BaseException_args_get(PyBaseExceptionObject *self, void *Py_UNUSED(context)) +BaseException_args_get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException_args_get_impl(self); + return_value = BaseException_args_get_impl((PyBaseExceptionObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -145,12 +145,12 @@ static int BaseException_args_set_impl(PyBaseExceptionObject *self, PyObject *value); static int -BaseException_args_set(PyBaseExceptionObject *self, PyObject *value, void *Py_UNUSED(context)) +BaseException_args_set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException_args_set_impl(self, value); + return_value = BaseException_args_set_impl((PyBaseExceptionObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -170,12 +170,12 @@ static PyObject * BaseException___traceback___get_impl(PyBaseExceptionObject *self); static PyObject * -BaseException___traceback___get(PyBaseExceptionObject *self, void *Py_UNUSED(context)) +BaseException___traceback___get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___traceback___get_impl(self); + return_value = BaseException___traceback___get_impl((PyBaseExceptionObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -196,12 +196,12 @@ BaseException___traceback___set_impl(PyBaseExceptionObject *self, PyObject *value); static int -BaseException___traceback___set(PyBaseExceptionObject *self, PyObject *value, void *Py_UNUSED(context)) +BaseException___traceback___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___traceback___set_impl(self, value); + return_value = BaseException___traceback___set_impl((PyBaseExceptionObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -221,12 +221,12 @@ static PyObject * BaseException___context___get_impl(PyBaseExceptionObject *self); static PyObject * -BaseException___context___get(PyBaseExceptionObject *self, void *Py_UNUSED(context)) +BaseException___context___get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___context___get_impl(self); + return_value = BaseException___context___get_impl((PyBaseExceptionObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -247,12 +247,12 @@ BaseException___context___set_impl(PyBaseExceptionObject *self, PyObject *value); static int -BaseException___context___set(PyBaseExceptionObject *self, PyObject *value, void *Py_UNUSED(context)) +BaseException___context___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___context___set_impl(self, value); + return_value = BaseException___context___set_impl((PyBaseExceptionObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -272,12 +272,12 @@ static PyObject * BaseException___cause___get_impl(PyBaseExceptionObject *self); static PyObject * -BaseException___cause___get(PyBaseExceptionObject *self, void *Py_UNUSED(context)) +BaseException___cause___get(PyObject *self, void *Py_UNUSED(context)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___cause___get_impl(self); + return_value = BaseException___cause___get_impl((PyBaseExceptionObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -298,14 +298,86 @@ BaseException___cause___set_impl(PyBaseExceptionObject *self, PyObject *value); static int -BaseException___cause___set(PyBaseExceptionObject *self, PyObject *value, void *Py_UNUSED(context)) +BaseException___cause___set(PyObject *self, PyObject *value, void *Py_UNUSED(context)) { int return_value; Py_BEGIN_CRITICAL_SECTION(self); - return_value = BaseException___cause___set_impl(self, value); + return_value = BaseException___cause___set_impl((PyBaseExceptionObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=58afcfd60057fc39 input=a9049054013a1b77]*/ + +PyDoc_STRVAR(BaseExceptionGroup_derive__doc__, +"derive($self, excs, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_DERIVE_METHODDEF \ + {"derive", (PyCFunction)BaseExceptionGroup_derive, METH_O, BaseExceptionGroup_derive__doc__}, + +static PyObject * +BaseExceptionGroup_derive_impl(PyBaseExceptionGroupObject *self, + PyObject *excs); + +static PyObject * +BaseExceptionGroup_derive(PyBaseExceptionGroupObject *self, PyObject *excs) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_derive_impl((PyBaseExceptionGroupObject *)self, excs); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseExceptionGroup_split__doc__, +"split($self, matcher_value, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_SPLIT_METHODDEF \ + {"split", (PyCFunction)BaseExceptionGroup_split, METH_O, BaseExceptionGroup_split__doc__}, + +static PyObject * +BaseExceptionGroup_split_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value); + +static PyObject * +BaseExceptionGroup_split(PyBaseExceptionGroupObject *self, PyObject *matcher_value) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_split_impl((PyBaseExceptionGroupObject *)self, matcher_value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} + +PyDoc_STRVAR(BaseExceptionGroup_subgroup__doc__, +"subgroup($self, matcher_value, /)\n" +"--\n" +"\n"); + +#define BASEEXCEPTIONGROUP_SUBGROUP_METHODDEF \ + {"subgroup", (PyCFunction)BaseExceptionGroup_subgroup, METH_O, BaseExceptionGroup_subgroup__doc__}, + +static PyObject * +BaseExceptionGroup_subgroup_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value); + +static PyObject * +BaseExceptionGroup_subgroup(PyBaseExceptionGroupObject *self, PyObject *matcher_value) +{ + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = BaseExceptionGroup_subgroup_impl((PyBaseExceptionGroupObject *)self, matcher_value); + Py_END_CRITICAL_SECTION(); + + return return_value; +} +/*[clinic end generated code: output=19aed708dcaf7184 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/listobject.c.h b/Objects/clinic/listobject.c.h index 975f253c096275..a29ed9f7088700 100644 --- a/Objects/clinic/listobject.c.h +++ b/Objects/clinic/listobject.c.h @@ -23,7 +23,7 @@ static PyObject * list_insert_impl(PyListObject *self, Py_ssize_t index, PyObject *object); static PyObject * -list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_insert(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index; @@ -46,7 +46,7 @@ list_insert(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } object = args[1]; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_insert_impl(self, index, object); + return_value = list_insert_impl((PyListObject *)self, index, object); Py_END_CRITICAL_SECTION(); exit: @@ -66,12 +66,12 @@ static PyObject * py_list_clear_impl(PyListObject *self); static PyObject * -py_list_clear(PyListObject *self, PyObject *Py_UNUSED(ignored)) +py_list_clear(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = py_list_clear_impl(self); + return_value = py_list_clear_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -90,12 +90,12 @@ static PyObject * list_copy_impl(PyListObject *self); static PyObject * -list_copy(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_copy_impl(self); + return_value = list_copy_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -119,7 +119,7 @@ list_append(PyListObject *self, PyObject *object) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_append_impl(self, object); + return_value = list_append_impl((PyListObject *)self, object); Py_END_CRITICAL_SECTION(); return return_value; @@ -149,7 +149,7 @@ static PyObject * list_pop_impl(PyListObject *self, Py_ssize_t index); static PyObject * -list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; Py_ssize_t index = -1; @@ -174,7 +174,7 @@ list_pop(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) } skip_optional: Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_pop_impl(self, index); + return_value = list_pop_impl((PyListObject *)self, index); Py_END_CRITICAL_SECTION(); exit: @@ -202,7 +202,7 @@ static PyObject * list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse); static PyObject * -list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +list_sort(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -255,7 +255,7 @@ list_sort(PyListObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject } skip_optional_kwonly: Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_sort_impl(self, keyfunc, reverse); + return_value = list_sort_impl((PyListObject *)self, keyfunc, reverse); Py_END_CRITICAL_SECTION(); exit: @@ -275,12 +275,12 @@ static PyObject * list_reverse_impl(PyListObject *self); static PyObject * -list_reverse(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list_reverse(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_reverse_impl(self); + return_value = list_reverse_impl((PyListObject *)self); Py_END_CRITICAL_SECTION(); return return_value; @@ -302,7 +302,7 @@ list_index_impl(PyListObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -list_index(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) +list_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -326,7 +326,7 @@ list_index(PyListObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = list_index_impl(self, value, start, stop); + return_value = list_index_impl((PyListObject *)self, value, start, stop); exit: return return_value; @@ -361,7 +361,7 @@ list_remove(PyListObject *self, PyObject *value) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(self); - return_value = list_remove_impl(self, value); + return_value = list_remove_impl((PyListObject *)self, value); Py_END_CRITICAL_SECTION(); return return_value; @@ -418,9 +418,9 @@ static PyObject * list___sizeof___impl(PyListObject *self); static PyObject * -list___sizeof__(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return list___sizeof___impl(self); + return list___sizeof___impl((PyListObject *)self); } PyDoc_STRVAR(list___reversed____doc__, @@ -436,8 +436,8 @@ static PyObject * list___reversed___impl(PyListObject *self); static PyObject * -list___reversed__(PyListObject *self, PyObject *Py_UNUSED(ignored)) +list___reversed__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return list___reversed___impl(self); + return list___reversed___impl((PyListObject *)self); } -/*[clinic end generated code: output=9357151278d77ea1 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=35c43dc33f9ba521 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/memoryobject.c.h b/Objects/clinic/memoryobject.c.h index a6cf1f431a15b0..4706c92051926c 100644 --- a/Objects/clinic/memoryobject.c.h +++ b/Objects/clinic/memoryobject.c.h @@ -137,9 +137,9 @@ static PyObject * memoryview_release_impl(PyMemoryViewObject *self); static PyObject * -memoryview_release(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_release(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_release_impl(self); + return memoryview_release_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_cast__doc__, @@ -156,7 +156,7 @@ memoryview_cast_impl(PyMemoryViewObject *self, PyObject *format, PyObject *shape); static PyObject * -memoryview_cast(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_cast(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -204,7 +204,7 @@ memoryview_cast(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t narg } shape = args[1]; skip_optional_pos: - return_value = memoryview_cast_impl(self, format, shape); + return_value = memoryview_cast_impl((PyMemoryViewObject *)self, format, shape); exit: return return_value; @@ -223,9 +223,9 @@ static PyObject * memoryview_toreadonly_impl(PyMemoryViewObject *self); static PyObject * -memoryview_toreadonly(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_toreadonly(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_toreadonly_impl(self); + return memoryview_toreadonly_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_tolist__doc__, @@ -241,9 +241,9 @@ static PyObject * memoryview_tolist_impl(PyMemoryViewObject *self); static PyObject * -memoryview_tolist(PyMemoryViewObject *self, PyObject *Py_UNUSED(ignored)) +memoryview_tolist(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return memoryview_tolist_impl(self); + return memoryview_tolist_impl((PyMemoryViewObject *)self); } PyDoc_STRVAR(memoryview_tobytes__doc__, @@ -265,7 +265,7 @@ static PyObject * memoryview_tobytes_impl(PyMemoryViewObject *self, const char *order); static PyObject * -memoryview_tobytes(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_tobytes(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -324,7 +324,7 @@ memoryview_tobytes(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = memoryview_tobytes_impl(self, order); + return_value = memoryview_tobytes_impl((PyMemoryViewObject *)self, order); exit: return return_value; @@ -361,7 +361,7 @@ memoryview_hex_impl(PyMemoryViewObject *self, PyObject *sep, int bytes_per_sep); static PyObject * -memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +memoryview_hex(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -413,7 +413,7 @@ memoryview_hex(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional_pos: - return_value = memoryview_hex_impl(self, sep, bytes_per_sep); + return_value = memoryview_hex_impl((PyMemoryViewObject *)self, sep, bytes_per_sep); exit: return return_value; @@ -444,7 +444,7 @@ memoryview_index_impl(PyMemoryViewObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -memoryview_index(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nargs) +memoryview_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -468,9 +468,9 @@ memoryview_index(PyMemoryViewObject *self, PyObject *const *args, Py_ssize_t nar goto exit; } skip_optional: - return_value = memoryview_index_impl(self, value, start, stop); + return_value = memoryview_index_impl((PyMemoryViewObject *)self, value, start, stop); exit: return return_value; } -/*[clinic end generated code: output=132893ef5f67ad73 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=2ef6c061d9c4e3dc input=a9049054013a1b77]*/ diff --git a/Objects/clinic/odictobject.c.h b/Objects/clinic/odictobject.c.h index 4b97596e5dbd2f..44d89c4e0ef2f7 100644 --- a/Objects/clinic/odictobject.c.h +++ b/Objects/clinic/odictobject.c.h @@ -87,7 +87,7 @@ OrderedDict_setdefault_impl(PyODictObject *self, PyObject *key, PyObject *default_value); static PyObject * -OrderedDict_setdefault(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -131,7 +131,7 @@ OrderedDict_setdefault(PyODictObject *self, PyObject *const *args, Py_ssize_t na } default_value = args[1]; skip_optional_pos: - return_value = OrderedDict_setdefault_impl(self, key, default_value); + return_value = OrderedDict_setdefault_impl((PyODictObject *)self, key, default_value); exit: return return_value; @@ -154,7 +154,7 @@ OrderedDict_pop_impl(PyODictObject *self, PyObject *key, PyObject *default_value); static PyObject * -OrderedDict_pop(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_pop(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -198,7 +198,7 @@ OrderedDict_pop(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, Py } default_value = args[1]; skip_optional_pos: - return_value = OrderedDict_pop_impl(self, key, default_value); + return_value = OrderedDict_pop_impl((PyODictObject *)self, key, default_value); exit: return return_value; @@ -219,7 +219,7 @@ static PyObject * OrderedDict_popitem_impl(PyODictObject *self, int last); static PyObject * -OrderedDict_popitem(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_popitem(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -264,7 +264,7 @@ OrderedDict_popitem(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs goto exit; } skip_optional_pos: - return_value = OrderedDict_popitem_impl(self, last); + return_value = OrderedDict_popitem_impl((PyODictObject *)self, last); exit: return return_value; @@ -285,7 +285,7 @@ static PyObject * OrderedDict_move_to_end_impl(PyODictObject *self, PyObject *key, int last); static PyObject * -OrderedDict_move_to_end(PyODictObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +OrderedDict_move_to_end(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -332,9 +332,9 @@ OrderedDict_move_to_end(PyODictObject *self, PyObject *const *args, Py_ssize_t n goto exit; } skip_optional_pos: - return_value = OrderedDict_move_to_end_impl(self, key, last); + return_value = OrderedDict_move_to_end_impl((PyODictObject *)self, key, last); exit: return return_value; } -/*[clinic end generated code: output=2aa6fc0567c9252c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=55bd390bb516e997 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/setobject.c.h b/Objects/clinic/setobject.c.h index 986993b4aa9bda..bf7e604e4b0a46 100644 --- a/Objects/clinic/setobject.c.h +++ b/Objects/clinic/setobject.c.h @@ -19,12 +19,12 @@ static PyObject * set_pop_impl(PySetObject *so); static PyObject * -set_pop(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_pop(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_pop_impl(so); + return_value = set_pop_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -44,7 +44,7 @@ set_update_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_update(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -52,7 +52,7 @@ set_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_update_impl(so, others, others_length); + return_value = set_update_impl((PySetObject *)so, others, others_length); return return_value; } @@ -70,12 +70,12 @@ static PyObject * set_copy_impl(PySetObject *so); static PyObject * -set_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_copy(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_copy_impl(so); + return_value = set_copy_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -94,12 +94,12 @@ static PyObject * frozenset_copy_impl(PySetObject *so); static PyObject * -frozenset_copy(PySetObject *so, PyObject *Py_UNUSED(ignored)) +frozenset_copy(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = frozenset_copy_impl(so); + return_value = frozenset_copy_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -118,12 +118,12 @@ static PyObject * set_clear_impl(PySetObject *so); static PyObject * -set_clear(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set_clear(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_clear_impl(so); + return_value = set_clear_impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -143,7 +143,7 @@ set_union_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_union(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_union(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -151,7 +151,7 @@ set_union(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_union_impl(so, others, others_length); + return_value = set_union_impl((PySetObject *)so, others, others_length); return return_value; } @@ -170,7 +170,7 @@ set_intersection_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_intersection_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_intersection_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -178,7 +178,7 @@ set_intersection_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_intersection_multi_impl(so, others, others_length); + return_value = set_intersection_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -197,7 +197,7 @@ set_intersection_update_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_intersection_update_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_intersection_update_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -205,7 +205,7 @@ set_intersection_update_multi(PySetObject *so, PyObject *const *args, Py_ssize_t others = args; others_length = nargs; - return_value = set_intersection_update_multi_impl(so, others, others_length); + return_value = set_intersection_update_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -228,7 +228,7 @@ set_isdisjoint(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_isdisjoint_impl(so, other); + return_value = set_isdisjoint_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -248,7 +248,7 @@ set_difference_update_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_difference_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_difference_update(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -256,7 +256,7 @@ set_difference_update(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_difference_update_impl(so, others, others_length); + return_value = set_difference_update_impl((PySetObject *)so, others, others_length); return return_value; } @@ -275,7 +275,7 @@ set_difference_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t others_length); static PyObject * -set_difference_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) +set_difference_multi(PyObject *so, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject * const *others; @@ -283,7 +283,7 @@ set_difference_multi(PySetObject *so, PyObject *const *args, Py_ssize_t nargs) others = args; others_length = nargs; - return_value = set_difference_multi_impl(so, others, others_length); + return_value = set_difference_multi_impl((PySetObject *)so, others, others_length); return return_value; } @@ -315,7 +315,7 @@ set_symmetric_difference(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_symmetric_difference_impl(so, other); + return_value = set_symmetric_difference_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -339,7 +339,7 @@ set_issubset(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_issubset_impl(so, other); + return_value = set_issubset_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -363,7 +363,7 @@ set_issuperset(PySetObject *so, PyObject *other) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION2(so, other); - return_value = set_issuperset_impl(so, other); + return_value = set_issuperset_impl((PySetObject *)so, other); Py_END_CRITICAL_SECTION2(); return return_value; @@ -389,7 +389,7 @@ set_add(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_add_impl(so, key); + return_value = set_add_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -413,7 +413,7 @@ set___contains__(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___contains___impl(so, key); + return_value = set___contains___impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -439,7 +439,7 @@ set_remove(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_remove_impl(so, key); + return_value = set_remove_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -466,7 +466,7 @@ set_discard(PySetObject *so, PyObject *key) PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set_discard_impl(so, key); + return_value = set_discard_impl((PySetObject *)so, key); Py_END_CRITICAL_SECTION(); return return_value; @@ -485,12 +485,12 @@ static PyObject * set___reduce___impl(PySetObject *so); static PyObject * -set___reduce__(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set___reduce__(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___reduce___impl(so); + return_value = set___reduce___impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; @@ -509,14 +509,14 @@ static PyObject * set___sizeof___impl(PySetObject *so); static PyObject * -set___sizeof__(PySetObject *so, PyObject *Py_UNUSED(ignored)) +set___sizeof__(PyObject *so, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; Py_BEGIN_CRITICAL_SECTION(so); - return_value = set___sizeof___impl(so); + return_value = set___sizeof___impl((PySetObject *)so); Py_END_CRITICAL_SECTION(); return return_value; } -/*[clinic end generated code: output=4b65e7709927f31f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=83b7742a762ce465 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/tupleobject.c.h b/Objects/clinic/tupleobject.c.h index 5d6a2c481a5f2a..40ffd4c1755769 100644 --- a/Objects/clinic/tupleobject.c.h +++ b/Objects/clinic/tupleobject.c.h @@ -20,7 +20,7 @@ tuple_index_impl(PyTupleObject *self, PyObject *value, Py_ssize_t start, Py_ssize_t stop); static PyObject * -tuple_index(PyTupleObject *self, PyObject *const *args, Py_ssize_t nargs) +tuple_index(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *value; @@ -44,7 +44,7 @@ tuple_index(PyTupleObject *self, PyObject *const *args, Py_ssize_t nargs) goto exit; } skip_optional: - return_value = tuple_index_impl(self, value, start, stop); + return_value = tuple_index_impl((PyTupleObject *)self, value, start, stop); exit: return return_value; @@ -110,8 +110,8 @@ static PyObject * tuple___getnewargs___impl(PyTupleObject *self); static PyObject * -tuple___getnewargs__(PyTupleObject *self, PyObject *Py_UNUSED(ignored)) +tuple___getnewargs__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return tuple___getnewargs___impl(self); + return tuple___getnewargs___impl((PyTupleObject *)self); } -/*[clinic end generated code: output=a6a9abba5d121f4c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=779cb4a13db67397 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/typeobject.c.h b/Objects/clinic/typeobject.c.h index 1fa153598db213..5e8187b3f5b748 100644 --- a/Objects/clinic/typeobject.c.h +++ b/Objects/clinic/typeobject.c.h @@ -22,7 +22,7 @@ type___instancecheck__(PyTypeObject *self, PyObject *instance) PyObject *return_value = NULL; int _return_value; - _return_value = type___instancecheck___impl(self, instance); + _return_value = type___instancecheck___impl((PyTypeObject *)self, instance); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -50,7 +50,7 @@ type___subclasscheck__(PyTypeObject *self, PyObject *subclass) PyObject *return_value = NULL; int _return_value; - _return_value = type___subclasscheck___impl(self, subclass); + _return_value = type___subclasscheck___impl((PyTypeObject *)self, subclass); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -73,9 +73,9 @@ static PyObject * type_mro_impl(PyTypeObject *self); static PyObject * -type_mro(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type_mro(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type_mro_impl(self); + return type_mro_impl((PyTypeObject *)self); } PyDoc_STRVAR(type___subclasses____doc__, @@ -91,9 +91,9 @@ static PyObject * type___subclasses___impl(PyTypeObject *self); static PyObject * -type___subclasses__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___subclasses__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___subclasses___impl(self); + return type___subclasses___impl((PyTypeObject *)self); } PyDoc_STRVAR(type___dir____doc__, @@ -109,9 +109,9 @@ static PyObject * type___dir___impl(PyTypeObject *self); static PyObject * -type___dir__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___dir__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___dir___impl(self); + return type___dir___impl((PyTypeObject *)self); } PyDoc_STRVAR(type___sizeof____doc__, @@ -127,9 +127,9 @@ static PyObject * type___sizeof___impl(PyTypeObject *self); static PyObject * -type___sizeof__(PyTypeObject *self, PyObject *Py_UNUSED(ignored)) +type___sizeof__(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return type___sizeof___impl(self); + return type___sizeof___impl((PyTypeObject *)self); } PyDoc_STRVAR(object___getstate____doc__, @@ -262,4 +262,4 @@ object___dir__(PyObject *self, PyObject *Py_UNUSED(ignored)) { return object___dir___impl(self); } -/*[clinic end generated code: output=b56c87f9cace1921 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f7db85fd11818c63 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/typevarobject.c.h b/Objects/clinic/typevarobject.c.h index c17998830b02eb..e50ed7d95b9b2a 100644 --- a/Objects/clinic/typevarobject.c.h +++ b/Objects/clinic/typevarobject.c.h @@ -143,7 +143,7 @@ typevar_typing_prepare_subst_impl(typevarobject *self, PyObject *alias, PyObject *args); static PyObject * -typevar_typing_prepare_subst(typevarobject *self, PyObject *const *args, Py_ssize_t nargs) +typevar_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -154,7 +154,7 @@ typevar_typing_prepare_subst(typevarobject *self, PyObject *const *args, Py_ssiz } alias = args[0]; __clinic_args = args[1]; - return_value = typevar_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = typevar_typing_prepare_subst_impl((typevarobject *)self, alias, __clinic_args); exit: return return_value; @@ -172,9 +172,9 @@ static PyObject * typevar_reduce_impl(typevarobject *self); static PyObject * -typevar_reduce(typevarobject *self, PyObject *Py_UNUSED(ignored)) +typevar_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevar_reduce_impl(self); + return typevar_reduce_impl((typevarobject *)self); } PyDoc_STRVAR(typevar_has_default__doc__, @@ -189,9 +189,9 @@ static PyObject * typevar_has_default_impl(typevarobject *self); static PyObject * -typevar_has_default(typevarobject *self, PyObject *Py_UNUSED(ignored)) +typevar_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevar_has_default_impl(self); + return typevar_has_default_impl((typevarobject *)self); } PyDoc_STRVAR(paramspecargs_new__doc__, @@ -431,7 +431,7 @@ paramspec_typing_prepare_subst_impl(paramspecobject *self, PyObject *alias, PyObject *args); static PyObject * -paramspec_typing_prepare_subst(paramspecobject *self, PyObject *const *args, Py_ssize_t nargs) +paramspec_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -442,7 +442,7 @@ paramspec_typing_prepare_subst(paramspecobject *self, PyObject *const *args, Py_ } alias = args[0]; __clinic_args = args[1]; - return_value = paramspec_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = paramspec_typing_prepare_subst_impl((paramspecobject *)self, alias, __clinic_args); exit: return return_value; @@ -460,9 +460,9 @@ static PyObject * paramspec_reduce_impl(paramspecobject *self); static PyObject * -paramspec_reduce(paramspecobject *self, PyObject *Py_UNUSED(ignored)) +paramspec_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return paramspec_reduce_impl(self); + return paramspec_reduce_impl((paramspecobject *)self); } PyDoc_STRVAR(paramspec_has_default__doc__, @@ -477,9 +477,9 @@ static PyObject * paramspec_has_default_impl(paramspecobject *self); static PyObject * -paramspec_has_default(paramspecobject *self, PyObject *Py_UNUSED(ignored)) +paramspec_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return paramspec_has_default_impl(self); + return paramspec_has_default_impl((paramspecobject *)self); } PyDoc_STRVAR(typevartuple__doc__, @@ -570,7 +570,7 @@ typevartuple_typing_prepare_subst_impl(typevartupleobject *self, PyObject *alias, PyObject *args); static PyObject * -typevartuple_typing_prepare_subst(typevartupleobject *self, PyObject *const *args, Py_ssize_t nargs) +typevartuple_typing_prepare_subst(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *alias; @@ -581,7 +581,7 @@ typevartuple_typing_prepare_subst(typevartupleobject *self, PyObject *const *arg } alias = args[0]; __clinic_args = args[1]; - return_value = typevartuple_typing_prepare_subst_impl(self, alias, __clinic_args); + return_value = typevartuple_typing_prepare_subst_impl((typevartupleobject *)self, alias, __clinic_args); exit: return return_value; @@ -599,9 +599,9 @@ static PyObject * typevartuple_reduce_impl(typevartupleobject *self); static PyObject * -typevartuple_reduce(typevartupleobject *self, PyObject *Py_UNUSED(ignored)) +typevartuple_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevartuple_reduce_impl(self); + return typevartuple_reduce_impl((typevartupleobject *)self); } PyDoc_STRVAR(typevartuple_has_default__doc__, @@ -616,9 +616,9 @@ static PyObject * typevartuple_has_default_impl(typevartupleobject *self); static PyObject * -typevartuple_has_default(typevartupleobject *self, PyObject *Py_UNUSED(ignored)) +typevartuple_has_default(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typevartuple_has_default_impl(self); + return typevartuple_has_default_impl((typevartupleobject *)self); } PyDoc_STRVAR(typealias_reduce__doc__, @@ -633,9 +633,9 @@ static PyObject * typealias_reduce_impl(typealiasobject *self); static PyObject * -typealias_reduce(typealiasobject *self, PyObject *Py_UNUSED(ignored)) +typealias_reduce(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return typealias_reduce_impl(self); + return typealias_reduce_impl((typealiasobject *)self); } PyDoc_STRVAR(typealias_new__doc__, @@ -706,4 +706,4 @@ typealias_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=26351c3549f5ad83 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=f499d959a942c599 input=a9049054013a1b77]*/ diff --git a/Objects/clinic/unicodeobject.c.h b/Objects/clinic/unicodeobject.c.h index 361f20c0fa4c1d..5c6a425b0f803a 100644 --- a/Objects/clinic/unicodeobject.c.h +++ b/Objects/clinic/unicodeobject.c.h @@ -22,9 +22,9 @@ static PyObject * EncodingMap_size_impl(struct encoding_map *self); static PyObject * -EncodingMap_size(struct encoding_map *self, PyObject *Py_UNUSED(ignored)) +EncodingMap_size(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return EncodingMap_size_impl(self); + return EncodingMap_size_impl((struct encoding_map *)self); } PyDoc_STRVAR(unicode_title__doc__, @@ -1895,4 +1895,4 @@ unicode_new(PyTypeObject *type, PyObject *args, PyObject *kwargs) exit: return return_value; } -/*[clinic end generated code: output=30efbf79c5a07dd2 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=4d1cecd6d08498a4 input=a9049054013a1b77]*/ diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 15b36a868a47df..7eea61968bc4d9 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -979,6 +979,9 @@ PyCode_Addr2Line(PyCodeObject *co, int addrq) if (addrq < 0) { return co->co_firstlineno; } + if (co->_co_monitoring && co->_co_monitoring->lines) { + return _Py_Instrumentation_GetLine(co, addrq/sizeof(_Py_CODEUNIT)); + } assert(addrq >= 0 && addrq < _PyCode_NBYTES(co)); PyCodeAddressRange bounds; _PyCode_InitAddressRange(co, &bounds); @@ -1911,7 +1914,7 @@ code_dealloc(PyObject *self) Py_XDECREF(co->co_linetable); Py_XDECREF(co->co_exceptiontable); #ifdef Py_GIL_DISABLED - assert(co->_co_unique_id == -1); + assert(co->_co_unique_id == _Py_INVALID_UNIQUE_ID); #endif if (co->_co_cached != NULL) { Py_XDECREF(co->_co_cached->_co_code); @@ -2212,24 +2215,24 @@ code_branchesiterator(PyObject *self, PyObject *Py_UNUSED(args)) code.replace * - co_argcount: int(c_default="self->co_argcount") = unchanged - co_posonlyargcount: int(c_default="self->co_posonlyargcount") = unchanged - co_kwonlyargcount: int(c_default="self->co_kwonlyargcount") = unchanged - co_nlocals: int(c_default="self->co_nlocals") = unchanged - co_stacksize: int(c_default="self->co_stacksize") = unchanged - co_flags: int(c_default="self->co_flags") = unchanged - co_firstlineno: int(c_default="self->co_firstlineno") = unchanged + co_argcount: int(c_default="((PyCodeObject *)self)->co_argcount") = unchanged + co_posonlyargcount: int(c_default="((PyCodeObject *)self)->co_posonlyargcount") = unchanged + co_kwonlyargcount: int(c_default="((PyCodeObject *)self)->co_kwonlyargcount") = unchanged + co_nlocals: int(c_default="((PyCodeObject *)self)->co_nlocals") = unchanged + co_stacksize: int(c_default="((PyCodeObject *)self)->co_stacksize") = unchanged + co_flags: int(c_default="((PyCodeObject *)self)->co_flags") = unchanged + co_firstlineno: int(c_default="((PyCodeObject *)self)->co_firstlineno") = unchanged co_code: object(subclass_of="&PyBytes_Type", c_default="NULL") = unchanged - co_consts: object(subclass_of="&PyTuple_Type", c_default="self->co_consts") = unchanged - co_names: object(subclass_of="&PyTuple_Type", c_default="self->co_names") = unchanged + co_consts: object(subclass_of="&PyTuple_Type", c_default="((PyCodeObject *)self)->co_consts") = unchanged + co_names: object(subclass_of="&PyTuple_Type", c_default="((PyCodeObject *)self)->co_names") = unchanged co_varnames: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged co_freevars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged co_cellvars: object(subclass_of="&PyTuple_Type", c_default="NULL") = unchanged - co_filename: unicode(c_default="self->co_filename") = unchanged - co_name: unicode(c_default="self->co_name") = unchanged - co_qualname: unicode(c_default="self->co_qualname") = unchanged - co_linetable: object(subclass_of="&PyBytes_Type", c_default="self->co_linetable") = unchanged - co_exceptiontable: object(subclass_of="&PyBytes_Type", c_default="self->co_exceptiontable") = unchanged + co_filename: unicode(c_default="((PyCodeObject *)self)->co_filename") = unchanged + co_name: unicode(c_default="((PyCodeObject *)self)->co_name") = unchanged + co_qualname: unicode(c_default="((PyCodeObject *)self)->co_qualname") = unchanged + co_linetable: object(subclass_of="&PyBytes_Type", c_default="((PyCodeObject *)self)->co_linetable") = unchanged + co_exceptiontable: object(subclass_of="&PyBytes_Type", c_default="((PyCodeObject *)self)->co_exceptiontable") = unchanged Return a copy of the code object with new values for the specified fields. [clinic start generated code]*/ @@ -2244,7 +2247,7 @@ code_replace_impl(PyCodeObject *self, int co_argcount, PyObject *co_filename, PyObject *co_name, PyObject *co_qualname, PyObject *co_linetable, PyObject *co_exceptiontable) -/*[clinic end generated code: output=e75c48a15def18b9 input=18e280e07846c122]*/ +/*[clinic end generated code: output=e75c48a15def18b9 input=a455a89c57ac9d42]*/ { #define CHECK_INT_ARG(ARG) \ if (ARG < 0) { \ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index 1852118359f014..238becee241d1d 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1349,7 +1349,7 @@ wrapper_hash(PyObject *self) wrapperobject *wp = (wrapperobject *)self; Py_hash_t x, y; x = PyObject_GenericHash(wp->self); - y = _Py_HashPointer(wp->descr); + y = Py_HashPointer(wp->descr); x = x ^ y; if (x == -1) x = -2; diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 82789d5e56f523..8fe71123252a75 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -1659,6 +1659,9 @@ _PyDict_EnablePerThreadRefcounting(PyObject *op) assert(PyDict_Check(op)); #ifdef Py_GIL_DISABLED Py_ssize_t id = _PyObject_AssignUniqueId(op); + if (id == _Py_INVALID_UNIQUE_ID) { + return; + } if ((uint64_t)id >= (uint64_t)DICT_UNIQUE_ID_MAX) { _PyObject_ReleaseUniqueId(id); return; @@ -1666,8 +1669,7 @@ _PyDict_EnablePerThreadRefcounting(PyObject *op) PyDictObject *mp = (PyDictObject *)op; assert((mp->_ma_watcher_tag >> DICT_UNIQUE_ID_SHIFT) == 0); - // Plus 1 so that _ma_watcher_tag=0 represents an unassigned id - mp->_ma_watcher_tag += ((uint64_t)id + 1) << DICT_UNIQUE_ID_SHIFT; + mp->_ma_watcher_tag += (uint64_t)id << DICT_UNIQUE_ID_SHIFT; #endif } @@ -3088,8 +3090,8 @@ PyDict_PopString(PyObject *op, const char *key, PyObject **result) } -PyObject * -_PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) +static PyObject * +dict_pop_default(PyObject *dict, PyObject *key, PyObject *default_value) { PyObject *result; if (PyDict_Pop(dict, key, &result) == 0) { @@ -3102,6 +3104,12 @@ _PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) return result; } +PyObject * +_PyDict_Pop(PyObject *dict, PyObject *key, PyObject *default_value) +{ + return dict_pop_default(dict, key, default_value); +} + static PyDictObject * dict_dict_fromkeys(PyInterpreterState *interp, PyDictObject *mp, PyObject *iterable, PyObject *value) @@ -4463,7 +4471,7 @@ static PyObject * dict_pop_impl(PyDictObject *self, PyObject *key, PyObject *default_value) /*[clinic end generated code: output=3abb47b89f24c21c input=e221baa01044c44c]*/ { - return _PyDict_Pop((PyObject*)self, key, default_value); + return dict_pop_default((PyObject*)self, key, default_value); } /*[clinic input] diff --git a/Objects/exceptions.c b/Objects/exceptions.c index 4df89edfaf3953..ea2733435fc3ec 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -20,8 +20,9 @@ /*[clinic input] class BaseException "PyBaseExceptionObject *" "&PyExc_BaseException" +class BaseExceptionGroup "PyBaseExceptionGroupObject *" "&PyExc_BaseExceptionGroup" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=90558eb0fbf8a3d0]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=b7c45e78cff8edc3]*/ /* Compatibility aliases */ @@ -1034,10 +1035,18 @@ BaseExceptionGroup_str(PyBaseExceptionGroupObject *self) self->msg, num_excs, num_excs > 1 ? "s" : ""); } +/*[clinic input] +@critical_section +BaseExceptionGroup.derive + excs: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_derive(PyObject *self_, PyObject *excs) +BaseExceptionGroup_derive_impl(PyBaseExceptionGroupObject *self, + PyObject *excs) +/*[clinic end generated code: output=4307564218dfbf06 input=f72009d38e98cec1]*/ { - PyBaseExceptionGroupObject *self = _PyBaseExceptionGroupObject_cast(self_); PyObject *init_args = PyTuple_Pack(2, self->msg, excs); if (!init_args) { return NULL; @@ -1330,8 +1339,17 @@ exceptiongroup_split_recursive(PyObject *exc, return retval; } +/*[clinic input] +@critical_section +BaseExceptionGroup.split + matcher_value: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) +BaseExceptionGroup_split_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value) +/*[clinic end generated code: output=d74db579da4df6e2 input=0c5cfbfed57e0052]*/ { _exceptiongroup_split_matcher_type matcher_type; if (get_matcher_type(matcher_value, &matcher_type) < 0) { @@ -1341,7 +1359,7 @@ BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) _exceptiongroup_split_result split_result; bool construct_rest = true; if (exceptiongroup_split_recursive( - self, matcher_type, matcher_value, + (PyObject *)self, matcher_type, matcher_value, construct_rest, &split_result) < 0) { return NULL; } @@ -1356,8 +1374,17 @@ BaseExceptionGroup_split(PyObject *self, PyObject *matcher_value) return result; } +/*[clinic input] +@critical_section +BaseExceptionGroup.subgroup + matcher_value: object + / +[clinic start generated code]*/ + static PyObject * -BaseExceptionGroup_subgroup(PyObject *self, PyObject *matcher_value) +BaseExceptionGroup_subgroup_impl(PyBaseExceptionGroupObject *self, + PyObject *matcher_value) +/*[clinic end generated code: output=07dbec8f77d4dd8e input=988ffdd755a151ce]*/ { _exceptiongroup_split_matcher_type matcher_type; if (get_matcher_type(matcher_value, &matcher_type) < 0) { @@ -1367,7 +1394,7 @@ BaseExceptionGroup_subgroup(PyObject *self, PyObject *matcher_value) _exceptiongroup_split_result split_result; bool construct_rest = false; if (exceptiongroup_split_recursive( - self, matcher_type, matcher_value, + (PyObject *)self, matcher_type, matcher_value, construct_rest, &split_result) < 0) { return NULL; } @@ -1633,9 +1660,9 @@ static PyMemberDef BaseExceptionGroup_members[] = { static PyMethodDef BaseExceptionGroup_methods[] = { {"__class_getitem__", (PyCFunction)Py_GenericAlias, METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, - {"derive", (PyCFunction)BaseExceptionGroup_derive, METH_O}, - {"split", (PyCFunction)BaseExceptionGroup_split, METH_O}, - {"subgroup", (PyCFunction)BaseExceptionGroup_subgroup, METH_O}, + BASEEXCEPTIONGROUP_DERIVE_METHODDEF + BASEEXCEPTIONGROUP_SPLIT_METHODDEF + BASEEXCEPTIONGROUP_SUBGROUP_METHODDEF {NULL} }; @@ -2954,8 +2981,10 @@ unicode_error_set_end_impl(PyObject *self, Py_ssize_t end) * The 'start' can be negative or not, but when adjusting the value, * we clip it in [0, max(0, objlen - 1)] and do not interpret it as * a relative offset. + * + * This function always succeeds. */ -static inline Py_ssize_t +static Py_ssize_t unicode_error_adjust_start(Py_ssize_t start, Py_ssize_t objlen) { assert(objlen >= 0); @@ -2969,14 +2998,34 @@ unicode_error_adjust_start(Py_ssize_t start, Py_ssize_t objlen) } +/* Assert some properties of the adjusted 'start' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_start(Py_ssize_t start, Py_ssize_t objlen) +{ + assert(objlen >= 0); + /* in the future, `min_start` may be something else */ + Py_ssize_t min_start = 0; + assert(start >= min_start); + /* in the future, `max_start` may be something else */ + Py_ssize_t max_start = Py_MAX(min_start, objlen - 1); + assert(start <= max_start); +} +#else +#define assert_adjusted_unicode_error_start(...) +#endif + + /* * Adjust the (exclusive) 'end' value of a UnicodeError object. * * The 'end' can be negative or not, but when adjusting the value, * we clip it in [min(1, objlen), max(min(1, objlen), objlen)] and * do not interpret it as a relative offset. + * + * This function always succeeds. */ -static inline Py_ssize_t +static Py_ssize_t unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) { assert(objlen >= 0); @@ -2990,6 +3039,59 @@ unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) } +/* Assert some properties of the adjusted 'end' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_end(Py_ssize_t end, Py_ssize_t objlen) +{ + assert(objlen >= 0); + /* in the future, `min_end` may be something else */ + Py_ssize_t min_end = Py_MIN(1, objlen); + assert(end >= min_end); + /* in the future, `max_end` may be something else */ + Py_ssize_t max_end = Py_MAX(min_end, objlen); + assert(end <= max_end); +} +#else +#define assert_adjusted_unicode_error_end(...) +#endif + + +/* + * Adjust the length of the range described by a UnicodeError object. + * + * The 'start' and 'end' arguments must have been obtained by + * unicode_error_adjust_start() and unicode_error_adjust_end(). + * + * The result is clipped in [0, objlen]. By construction, it + * will always be smaller than 'objlen' as 'start' and 'end' + * are smaller than 'objlen'. + */ +static Py_ssize_t +unicode_error_adjust_len(Py_ssize_t start, Py_ssize_t end, Py_ssize_t objlen) +{ + assert_adjusted_unicode_error_start(start, objlen); + assert_adjusted_unicode_error_end(end, objlen); + Py_ssize_t ranlen = end - start; + assert(ranlen <= objlen); + return ranlen < 0 ? 0 : ranlen; +} + + +/* Assert some properties of the adjusted range 'len' value. */ +#ifndef NDEBUG +static void +assert_adjusted_unicode_error_len(Py_ssize_t ranlen, Py_ssize_t objlen) +{ + assert(objlen >= 0); + assert(ranlen >= 0); + assert(ranlen <= objlen); +} +#else +#define assert_adjusted_unicode_error_len(...) +#endif + + /* * Get various common parameters of a UnicodeError object. * @@ -3004,22 +3106,24 @@ unicode_error_adjust_end(Py_ssize_t end, Py_ssize_t objlen) * objlen The 'object' length. * start The clipped 'start' attribute. * end The clipped 'end' attribute. + * slen The length of the slice described by the clipped 'start' + * and 'end' values. It always lies in [0, objlen]. * * An output parameter can be NULL to indicate that * the corresponding value does not need to be stored. * * Input parameter: * - * as_bytes If 1, the error's 'object' attribute must be a bytes object, - * i.e. the call is for a `UnicodeDecodeError`. Otherwise, the - * 'object' attribute must be a string. + * as_bytes If true, the error's 'object' attribute must be a `bytes`, + * i.e. 'self' is a `UnicodeDecodeError` instance. Otherwise, + * the 'object' attribute must be a string. * * A TypeError is raised if the 'object' type is incompatible. */ int _PyUnicodeError_GetParams(PyObject *self, PyObject **obj, Py_ssize_t *objlen, - Py_ssize_t *start, Py_ssize_t *end, + Py_ssize_t *start, Py_ssize_t *end, Py_ssize_t *slen, int as_bytes) { assert(self != NULL); @@ -3034,16 +3138,30 @@ _PyUnicodeError_GetParams(PyObject *self, if (objlen != NULL) { *objlen = n; } + + Py_ssize_t start_value = -1; + if (start != NULL || slen != NULL) { + start_value = unicode_error_adjust_start(exc->start, n); + } if (start != NULL) { - *start = unicode_error_adjust_start(exc->start, n); - assert(*start >= 0); - assert(*start <= n); + assert_adjusted_unicode_error_start(start_value, n); + *start = start_value; + } + + Py_ssize_t end_value = -1; + if (end != NULL || slen != NULL) { + end_value = unicode_error_adjust_end(exc->end, n); } if (end != NULL) { - *end = unicode_error_adjust_end(exc->end, n); - assert(*end >= 0); - assert(*end <= n); + assert_adjusted_unicode_error_end(end_value, n); + *end = end_value; } + + if (slen != NULL) { + *slen = unicode_error_adjust_len(start_value, end_value, n); + assert_adjusted_unicode_error_len(*slen, n); + } + if (obj != NULL) { *obj = r; } @@ -3111,7 +3229,9 @@ static inline int unicode_error_get_start_impl(PyObject *self, Py_ssize_t *start, int as_bytes) { assert(self != NULL); - return _PyUnicodeError_GetParams(self, NULL, NULL, start, NULL, as_bytes); + return _PyUnicodeError_GetParams(self, NULL, NULL, + start, NULL, NULL, + as_bytes); } @@ -3177,7 +3297,9 @@ static inline int unicode_error_get_end_impl(PyObject *self, Py_ssize_t *end, int as_bytes) { assert(self != NULL); - return _PyUnicodeError_GetParams(self, NULL, NULL, NULL, end, as_bytes); + return _PyUnicodeError_GetParams(self, NULL, NULL, + NULL, end, NULL, + as_bytes); } @@ -4256,7 +4378,7 @@ _PyException_AddNote(PyObject *exc, PyObject *note) Py_TYPE(exc)->tp_name); return -1; } - PyObject *r = BaseException_add_note(_PyBaseExceptionObject_cast(exc), note); + PyObject *r = BaseException_add_note(exc, note); int res = r == NULL ? -1 : 0; Py_XDECREF(r); return res; diff --git a/Objects/floatobject.c b/Objects/floatobject.c index bcc77287454768..7ca43033d722ab 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -428,9 +428,10 @@ float_richcompare(PyObject *v, PyObject *w, int op) else if (PyLong_Check(w)) { int vsign = i == 0.0 ? 0 : i < 0.0 ? -1 : 1; - int wsign = _PyLong_Sign(w); + int wsign; int exponent; + (void)PyLong_GetSign(w, &wsign); if (vsign != wsign) { /* Magnitudes are irrelevant -- the signs alone * determine the outcome. diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 4f0040df4f3017..15ec4b78235992 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -1681,6 +1681,15 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure) return 0; } +static PyObject * +frame_getgenerator(PyFrameObject *f, void *arg) { + if (f->f_frame->owner == FRAME_OWNED_BY_GENERATOR) { + PyObject *gen = (PyObject *)_PyGen_GetGeneratorFromFrame(f->f_frame); + return Py_NewRef(gen); + } + Py_RETURN_NONE; +} + static PyGetSetDef frame_getsetlist[] = { {"f_back", (getter)frame_getback, NULL, NULL}, @@ -1693,6 +1702,7 @@ static PyGetSetDef frame_getsetlist[] = { {"f_builtins", (getter)frame_getbuiltins, NULL, NULL}, {"f_code", (getter)frame_getcode, NULL, NULL}, {"f_trace_opcodes", (getter)frame_gettrace_opcodes, (setter)frame_settrace_opcodes, NULL}, + {"f_generator", (getter)frame_getgenerator, NULL, NULL}, {0} }; @@ -2142,7 +2152,7 @@ _PyFrame_IsEntryFrame(PyFrameObject *frame) assert(frame != NULL); _PyInterpreterFrame *f = frame->f_frame; assert(!_PyFrame_IsIncomplete(f)); - return f->previous && f->previous->owner == FRAME_OWNED_BY_CSTACK; + return f->previous && f->previous->owner == FRAME_OWNED_BY_INTERPRETER; } PyCodeObject * diff --git a/Objects/genobject.c b/Objects/genobject.c index b32140766c4a38..73bbf86588c457 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -134,6 +134,19 @@ _PyGen_Finalize(PyObject *self) PyErr_SetRaisedException(exc); } +static void +gen_clear_frame(PyGenObject *gen) +{ + if (gen->gi_frame_state == FRAME_CLEARED) + return; + + gen->gi_frame_state = FRAME_CLEARED; + _PyInterpreterFrame *frame = &gen->gi_iframe; + frame->previous = NULL; + _PyFrame_ClearExceptCode(frame); + _PyErr_ClearExcState(&gen->gi_exc_state); +} + static void gen_dealloc(PyObject *self) { @@ -159,13 +172,7 @@ gen_dealloc(PyObject *self) if (PyCoro_CheckExact(gen)) { Py_CLEAR(((PyCoroObject *)gen)->cr_origin_or_finalizer); } - if (gen->gi_frame_state != FRAME_CLEARED) { - _PyInterpreterFrame *frame = &gen->gi_iframe; - gen->gi_frame_state = FRAME_CLEARED; - frame->previous = NULL; - _PyFrame_ClearExceptCode(frame); - _PyErr_ClearExcState(&gen->gi_exc_state); - } + gen_clear_frame(gen); assert(gen->gi_exc_state.exc_value == NULL); PyStackRef_CLEAR(gen->gi_iframe.f_executable); Py_CLEAR(gen->gi_name); @@ -400,7 +407,7 @@ gen_close(PyObject *self, PyObject *args) // RESUME after YIELD_VALUE and exception depth is 1 assert((oparg & RESUME_OPARG_LOCATION_MASK) != RESUME_AT_FUNC_START); gen->gi_frame_state = FRAME_COMPLETED; - _PyFrame_ClearLocals(&gen->gi_iframe); + gen_clear_frame(gen); Py_RETURN_NONE; } } @@ -1146,7 +1153,6 @@ cr_getcode(PyObject *coro, void *Py_UNUSED(ignored)) return _gen_getcode(_PyGen_CAST(coro), "cr_code"); } - static PyGetSetDef coro_getsetlist[] = { {"__name__", gen_get_name, gen_set_name, PyDoc_STR("name of the coroutine")}, diff --git a/Objects/listobject.c b/Objects/listobject.c index bbd53e7de94a31..099e65c0c25fed 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -3190,7 +3190,7 @@ _PyList_AsTupleAndClear(PyListObject *self) } PyObject * -_PyList_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) +_PyList_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n) { if (n == 0) { return PyList_New(0); @@ -3198,9 +3198,6 @@ _PyList_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) PyListObject *list = (PyListObject *)PyList_New(n); if (list == NULL) { - for (Py_ssize_t i = 0; i < n; i++) { - PyStackRef_CLOSE(src[i]); - } return NULL; } diff --git a/Objects/longobject.c b/Objects/longobject.c index d449a01cedf886..43be1ab056e0fe 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -152,8 +152,8 @@ long_normalize(PyLongObject *v) # define MAX_LONG_DIGITS ((INT64_MAX-1) / PyLong_SHIFT) #endif -PyLongObject * -_PyLong_New(Py_ssize_t size) +static PyLongObject * +long_alloc(Py_ssize_t size) { assert(size >= 0); PyLongObject *result = NULL; @@ -190,6 +190,12 @@ _PyLong_New(Py_ssize_t size) return result; } +PyLongObject * +_PyLong_New(Py_ssize_t size) +{ + return long_alloc(size); +} + PyLongObject * _PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) { @@ -197,9 +203,8 @@ _PyLong_FromDigits(int negative, Py_ssize_t digit_count, digit *digits) if (digit_count == 0) { return (PyLongObject *)_PyLong_GetZero(); } - PyLongObject *result = _PyLong_New(digit_count); + PyLongObject *result = long_alloc(digit_count); if (result == NULL) { - PyErr_NoMemory(); return NULL; } _PyLong_SetSignAndDigitCount(result, negative?-1:1, digit_count); @@ -211,15 +216,29 @@ PyObject * _PyLong_Copy(PyLongObject *src) { assert(src != NULL); + int sign; if (_PyLong_IsCompact(src)) { stwodigits ival = medium_value(src); if (IS_SMALL_INT(ival)) { return get_small_int((sdigit)ival); } + sign = _PyLong_CompactSign(src); + } + else { + sign = _PyLong_NonCompactSign(src); } + Py_ssize_t size = _PyLong_DigitCount(src); - return (PyObject *)_PyLong_FromDigits(_PyLong_IsNegative(src), size, src->long_value.ob_digit); + PyLongObject *result = long_alloc(size); + + if (result == NULL) { + return NULL; + } + _PyLong_SetSignAndDigitCount(result, sign, size); + memcpy(result->long_value.ob_digit, src->long_value.ob_digit, + size * sizeof(digit)); + return (PyObject *)result; } static PyObject * @@ -268,7 +287,7 @@ _PyLong_FromLarge(stwodigits ival) ++ndigits; t >>= PyLong_SHIFT; } - PyLongObject *v = _PyLong_New(ndigits); + PyLongObject *v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, sign, ndigits); @@ -341,7 +360,7 @@ PyLong_FromLong(long ival) } /* Construct output value. */ - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); @@ -359,14 +378,18 @@ PyLong_FromLong(long ival) if (IS_SMALL_UINT(ival)) { \ return get_small_int((sdigit)(ival)); \ } \ + if ((ival) <= PyLong_MASK) { \ + return _PyLong_FromMedium((sdigit)(ival)); \ + } \ + /* Do shift in two steps to avoid possible undefined behavior. */ \ + INT_TYPE t = (ival) >> PyLong_SHIFT >> PyLong_SHIFT; \ /* Count the number of Python digits. */ \ - Py_ssize_t ndigits = 0; \ - INT_TYPE t = (ival); \ + Py_ssize_t ndigits = 2; \ while (t) { \ ++ndigits; \ t >>= PyLong_SHIFT; \ } \ - PyLongObject *v = _PyLong_New(ndigits); \ + PyLongObject *v = long_alloc(ndigits); \ if (v == NULL) { \ return NULL; \ } \ @@ -443,7 +466,7 @@ PyLong_FromDouble(double dval) frac = frexp(dval, &expo); /* dval = frac*2**expo; 0.0 <= frac < 1.0 */ assert(expo > 0); ndig = (expo-1) / PyLong_SHIFT + 1; /* Number of 'digits' in result */ - v = _PyLong_New(ndig); + v = long_alloc(ndig); if (v == NULL) return NULL; frac = ldexp(frac, (expo-1) % PyLong_SHIFT + 1); @@ -827,19 +850,25 @@ PyLong_IsZero(PyObject *obj) return _PyLong_IsZero((PyLongObject *)obj); } -int -_PyLong_Sign(PyObject *vv) +static int +long_sign(PyObject *vv) { + assert(vv != NULL); + assert(PyLong_Check(vv)); PyLongObject *v = (PyLongObject *)vv; - assert(v != NULL); - assert(PyLong_Check(v)); if (_PyLong_IsCompact(v)) { return _PyLong_CompactSign(v); } return _PyLong_NonCompactSign(v); } +int +_PyLong_Sign(PyObject *vv) +{ + return long_sign(vv); +} + int PyLong_GetSign(PyObject *vv, int *sign) { @@ -848,7 +877,7 @@ PyLong_GetSign(PyObject *vv, int *sign) return -1; } - *sign = _PyLong_Sign(vv); + *sign = long_sign(vv); return 0; } @@ -946,7 +975,7 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n, return NULL; } ndigits = (numsignificantbytes * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT; - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v == NULL) return NULL; @@ -1476,7 +1505,7 @@ PyLong_FromLongLong(long long ival) } /* Construct output value. */ - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, ival < 0 ? -1 : 1, ndigits); @@ -1519,7 +1548,7 @@ PyLong_FromSsize_t(Py_ssize_t ival) ++ndigits; t >>= PyLong_SHIFT; } - v = _PyLong_New(ndigits); + v = long_alloc(ndigits); if (v != NULL) { digit *p = v->long_value.ob_digit; _PyLong_SetSignAndDigitCount(v, negative ? -1 : 1, ndigits); @@ -2009,7 +2038,7 @@ divrem1(PyLongObject *a, digit n, digit *prem) PyLongObject *z; assert(n > 0 && n <= PyLong_MASK); - z = _PyLong_New(size); + z = long_alloc(size); if (z == NULL) return NULL; *prem = inplace_divrem1(z->long_value.ob_digit, a->long_value.ob_digit, size, n); @@ -2186,7 +2215,7 @@ long_to_decimal_string_internal(PyObject *aa, (10 * PyLong_SHIFT - 33 * _PyLong_DECIMAL_SHIFT); assert(size_a < PY_SSIZE_T_MAX/2); size = 1 + size_a + size_a / d; - scratch = _PyLong_New(size); + scratch = long_alloc(size); if (scratch == NULL) return -1; @@ -2629,7 +2658,7 @@ long_from_binary_base(const char *start, const char *end, Py_ssize_t digits, int return 0; } n = (digits * bits_per_char + PyLong_SHIFT - 1) / PyLong_SHIFT; - z = _PyLong_New(n); + z = long_alloc(n); if (z == NULL) { *res = NULL; return 0; @@ -2833,7 +2862,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, */ double fsize_z = (double)digits * log_base_BASE[base] + 1.0; if (fsize_z > (double)MAX_LONG_DIGITS) { - /* The same exception as in _PyLong_New(). */ + /* The same exception as in long_alloc(). */ PyErr_SetString(PyExc_OverflowError, "too many digits in integer"); *res = NULL; @@ -2843,7 +2872,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, /* Uncomment next line to test exceedingly rare copy code */ /* size_z = 1; */ assert(size_z > 0); - z = _PyLong_New(size_z); + z = long_alloc(size_z); if (z == NULL) { *res = NULL; return 0; @@ -2906,7 +2935,7 @@ long_from_non_binary_base(const char *start, const char *end, Py_ssize_t digits, PyLongObject *tmp; /* Extremely rare. Get more space. */ assert(_PyLong_DigitCount(z) == size_z); - tmp = _PyLong_New(size_z + 1); + tmp = long_alloc(size_z + 1); if (tmp == NULL) { Py_DECREF(z); *res = NULL; @@ -3327,12 +3356,12 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) size_v = _PyLong_DigitCount(v1); size_w = _PyLong_DigitCount(w1); assert(size_v >= size_w && size_w >= 2); /* Assert checks by div() */ - v = _PyLong_New(size_v+1); + v = long_alloc(size_v+1); if (v == NULL) { *prem = NULL; return NULL; } - w = _PyLong_New(size_w); + w = long_alloc(size_w); if (w == NULL) { Py_DECREF(v); *prem = NULL; @@ -3354,7 +3383,7 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) at most (and usually exactly) k = size_v - size_w digits. */ k = size_v - size_w; assert(k >= 0); - a = _PyLong_New(k); + a = long_alloc(k); if (a == NULL) { Py_DECREF(w); Py_DECREF(v); @@ -3752,7 +3781,7 @@ x_add(PyLongObject *a, PyLongObject *b) size_a = size_b; size_b = size_temp; } } - z = _PyLong_New(size_a+1); + z = long_alloc(size_a+1); if (z == NULL) return NULL; for (i = 0; i < size_b; ++i) { @@ -3801,7 +3830,7 @@ x_sub(PyLongObject *a, PyLongObject *b) } size_a = size_b = i+1; } - z = _PyLong_New(size_a); + z = long_alloc(size_a); if (z == NULL) return NULL; for (i = 0; i < size_b; ++i) { @@ -3926,7 +3955,7 @@ x_mul(PyLongObject *a, PyLongObject *b) Py_ssize_t size_b = _PyLong_DigitCount(b); Py_ssize_t i; - z = _PyLong_New(size_a + size_b); + z = long_alloc(size_a + size_b); if (z == NULL) return NULL; @@ -4036,9 +4065,9 @@ kmul_split(PyLongObject *n, size_lo = Py_MIN(size_n, size); size_hi = size_n - size_lo; - if ((hi = _PyLong_New(size_hi)) == NULL) + if ((hi = long_alloc(size_hi)) == NULL) return -1; - if ((lo = _PyLong_New(size_lo)) == NULL) { + if ((lo = long_alloc(size_lo)) == NULL) { Py_DECREF(hi); return -1; } @@ -4138,7 +4167,7 @@ k_mul(PyLongObject *a, PyLongObject *b) */ /* 1. Allocate result space. */ - ret = _PyLong_New(asize + bsize); + ret = long_alloc(asize + bsize); if (ret == NULL) goto fail; #ifdef Py_DEBUG /* Fill with trash, to catch reference to uninitialized digits. */ @@ -4288,13 +4317,13 @@ k_lopsided_mul(PyLongObject *a, PyLongObject *b) assert(2 * asize <= bsize); /* Allocate result space, and zero it out. */ - ret = _PyLong_New(asize + bsize); + ret = long_alloc(asize + bsize); if (ret == NULL) return NULL; memset(ret->long_value.ob_digit, 0, _PyLong_DigitCount(ret) * sizeof(digit)); /* Successive slices of b are copied into bslice. */ - bslice = _PyLong_New(asize); + bslice = long_alloc(asize); if (bslice == NULL) goto fail; @@ -4760,7 +4789,7 @@ long_true_divide(PyObject *v, PyObject *w) "intermediate overflow during division"); goto error; } - x = _PyLong_New(a_size + shift_digits + 1); + x = long_alloc(a_size + shift_digits + 1); if (x == NULL) goto error; for (i = 0; i < shift_digits; i++) @@ -4774,7 +4803,7 @@ long_true_divide(PyObject *v, PyObject *w) digit rem; /* x = a >> shift */ assert(a_size >= shift_digits); - x = _PyLong_New(a_size - shift_digits); + x = long_alloc(a_size - shift_digits); if (x == NULL) goto error; rem = v_rshift(x->long_value.ob_digit, a->long_value.ob_digit + shift_digits, @@ -5354,7 +5383,7 @@ long_rshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) /* Shifting all the bits of 'a' out gives either -1 or 0. */ return PyLong_FromLong(-a_negative); } - z = _PyLong_New(newsize); + z = long_alloc(newsize); if (z == NULL) { return NULL; } @@ -5469,7 +5498,7 @@ long_lshift1(PyLongObject *a, Py_ssize_t wordshift, digit remshift) newsize = oldsize + wordshift; if (remshift) ++newsize; - z = _PyLong_New(newsize); + z = long_alloc(newsize); if (z == NULL) return NULL; if (_PyLong_IsNegative(a)) { @@ -5584,7 +5613,7 @@ long_bitwise(PyLongObject *a, size_a = _PyLong_DigitCount(a); nega = _PyLong_IsNegative(a); if (nega) { - z = _PyLong_New(size_a); + z = long_alloc(size_a); if (z == NULL) return NULL; v_complement(z->long_value.ob_digit, a->long_value.ob_digit, size_a); @@ -5598,7 +5627,7 @@ long_bitwise(PyLongObject *a, size_b = _PyLong_DigitCount(b); negb = _PyLong_IsNegative(b); if (negb) { - z = _PyLong_New(size_b); + z = long_alloc(size_b); if (z == NULL) { Py_DECREF(a); return NULL; @@ -5642,7 +5671,7 @@ long_bitwise(PyLongObject *a, /* We allow an extra digit if z is negative, to make sure that the final two's complement of z doesn't overflow. */ - z = _PyLong_New(size_z + negz); + z = long_alloc(size_z + negz); if (z == NULL) { Py_DECREF(a); Py_DECREF(b); @@ -5840,7 +5869,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } else { alloc_a = size_a; - c = _PyLong_New(size_a); + c = long_alloc(size_a); if (c == NULL) goto error; } @@ -5856,7 +5885,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) } else { alloc_b = size_a; - d = _PyLong_New(size_a); + d = long_alloc(size_a); if (d == NULL) goto error; } @@ -6904,7 +6933,7 @@ PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) } assert(digits != NULL); - PyLongObject *obj = _PyLong_New(ndigits); + PyLongObject *obj = long_alloc(ndigits); if (obj == NULL) { goto error; } @@ -6924,6 +6953,10 @@ PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits) void PyLongWriter_Discard(PyLongWriter *writer) { + if (writer == NULL) { + return; + } + PyLongObject *obj = (PyLongObject *)writer; assert(Py_REFCNT(obj) == 1); Py_DECREF(obj); diff --git a/Objects/methodobject.c b/Objects/methodobject.c index fc77055b0a2af7..1f459dea44192c 100644 --- a/Objects/methodobject.c +++ b/Objects/methodobject.c @@ -345,7 +345,7 @@ meth_hash(PyObject *self) { PyCFunctionObject *a = _PyCFunctionObject_CAST(self); Py_hash_t x = PyObject_GenericHash(a->m_self); - Py_hash_t y = _Py_HashPointer((void*)(a->m_ml->ml_meth)); + Py_hash_t y = Py_HashPointer((void*)(a->m_ml->ml_meth)); x ^= y; if (x == -1) { x = -2; diff --git a/Objects/object.c b/Objects/object.c index 7239cd0f465217..8fe1c69aeaec01 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -2382,8 +2382,6 @@ static PyTypeObject* static_types[] = { &_PyContextTokenMissing_Type, &_PyCoroWrapper_Type, #ifdef _Py_TIER2 - &_PyCounterExecutor_Type, - &_PyCounterOptimizer_Type, &_PyDefaultOptimizer_Type, #endif &_Py_GenericAliasIterType, @@ -3077,14 +3075,14 @@ _Py_SetRefcnt(PyObject *ob, Py_ssize_t refcnt) } int PyRefTracer_SetTracer(PyRefTracer tracer, void *data) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); _PyRuntime.ref_tracer.tracer_func = tracer; _PyRuntime.ref_tracer.tracer_data = data; return 0; } PyRefTracer PyRefTracer_GetTracer(void** data) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); if (data != NULL) { *data = _PyRuntime.ref_tracer.tracer_data; } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index b103deb01ca712..5688049b024696 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -2909,7 +2909,8 @@ _PyMem_DebugRawRealloc(void *ctx, void *p, size_t nbytes) static inline void _PyMem_DebugCheckGIL(const char *func) { - if (!PyGILState_Check()) { + PyThreadState *tstate = _PyThreadState_GET(); + if (tstate == NULL) { #ifndef Py_GIL_DISABLED _Py_FatalErrorFunc(func, "Python memory allocator called " diff --git a/Objects/odictobject.c b/Objects/odictobject.c index e151023dd764bf..f2d8da0c567878 100644 --- a/Objects/odictobject.c +++ b/Objects/odictobject.c @@ -260,7 +260,7 @@ mp_length __len__ - dict_length mp_subscript __getitem__ - dict_subscript mp_ass_subscript __setitem__ - dict_ass_sub __delitem__ -tp_hash __hash__ _Py_HashPointer ..._HashNotImpl +tp_hash __hash__ Py_HashPointer ..._HashNotImpl tp_str __str__ object_str - tp_getattro __getattribute__ ..._GenericGetAttr (repeated) __getattr__ diff --git a/Objects/setobject.c b/Objects/setobject.c index 955ccbebf74b54..26ab352ca6d989 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -1298,7 +1298,7 @@ set_union_impl(PySetObject *so, PyObject * const *others, PyObject *other; Py_ssize_t i; - result = (PySetObject *)set_copy(so, NULL); + result = (PySetObject *)set_copy((PyObject *)so, NULL); if (result == NULL) return NULL; @@ -1321,13 +1321,12 @@ set_or(PyObject *self, PyObject *other) if (!PyAnySet_Check(self) || !PyAnySet_Check(other)) Py_RETURN_NOTIMPLEMENTED; - PySetObject *so = _PySet_CAST(self); - result = (PySetObject *)set_copy(so, NULL); + result = (PySetObject *)set_copy(self, NULL); if (result == NULL) { return NULL; } - if (Py_Is((PyObject *)so, other)) { + if (Py_Is(self, other)) { return (PyObject *)result; } if (set_update_local(result, other)) { @@ -1449,7 +1448,7 @@ set_intersection_multi_impl(PySetObject *so, PyObject * const *others, Py_ssize_t i; if (others_length == 0) { - return set_copy(so, NULL); + return set_copy((PyObject *)so, NULL); } PyObject *result = Py_NewRef(so); @@ -1806,7 +1805,7 @@ set_difference_multi_impl(PySetObject *so, PyObject * const *others, PyObject *result, *other; if (others_length == 0) { - return set_copy(so, NULL); + return set_copy((PyObject *)so, NULL); } other = others[0]; @@ -1929,7 +1928,7 @@ set_symmetric_difference_update(PySetObject *so, PyObject *other) /*[clinic end generated code: output=fbb049c0806028de input=a50acf0365e1f0a5]*/ { if (Py_Is((PyObject *)so, other)) { - return set_clear(so, NULL); + return set_clear((PyObject *)so, NULL); } int rv; @@ -2646,7 +2645,7 @@ PySet_Clear(PyObject *set) PyErr_BadInternalCall(); return -1; } - (void)set_clear((PySetObject *)set, NULL); + (void)set_clear(set, NULL); return 0; } @@ -2742,7 +2741,7 @@ PySet_Pop(PyObject *set) PyErr_BadInternalCall(); return NULL; } - return set_pop((PySetObject *)set, NULL); + return set_pop(set, NULL); } int diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index 4fef0af93fe095..1b07c2a2c498b9 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -399,11 +399,14 @@ _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, step_is_negative = 0; } else { - int step_sign; step = evaluate_slice_index(self->step); - if (step == NULL) + if (step == NULL) { goto error; - step_sign = _PyLong_Sign(step); + } + assert(PyLong_Check(step)); + + int step_sign; + (void)PyLong_GetSign(step, &step_sign); if (step_sign == 0) { PyErr_SetString(PyExc_ValueError, "slice step cannot be zero"); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 002002eb455556..7fe8553030a02e 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -391,16 +391,13 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) } PyObject * -_PyTuple_FromStackRefSteal(const _PyStackRef *src, Py_ssize_t n) +_PyTuple_FromStackRefStealOnSuccess(const _PyStackRef *src, Py_ssize_t n) { if (n == 0) { return tuple_get_empty(); } PyTupleObject *tuple = tuple_alloc(n); if (tuple == NULL) { - for (Py_ssize_t i = 0; i < n; i++) { - PyStackRef_CLOSE(src[i]); - } return NULL; } PyObject **dst = tuple->ob_item; diff --git a/Objects/typeobject.c b/Objects/typeobject.c index d8f5f6d9cb2366..93920341a179e8 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -6160,7 +6160,7 @@ type_dealloc(PyObject *self) Py_XDECREF(et->ht_module); PyMem_Free(et->_ht_tpname); #ifdef Py_GIL_DISABLED - assert(et->unique_id == -1); + assert(et->unique_id == _Py_INVALID_UNIQUE_ID); #endif et->ht_token = NULL; Py_TYPE(type)->tp_free((PyObject *)type); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 3eafa2381c1a4d..d9952f764bb178 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -6853,7 +6853,8 @@ _PyUnicode_DecodeUnicodeEscapeStateful(const char *s, unsigned char c = *first_invalid_escape; if ('4' <= c && c <= '7') { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid octal escape sequence '\\%.3s'", + "\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. ", first_invalid_escape) < 0) { Py_DECREF(result); @@ -6862,7 +6863,8 @@ _PyUnicode_DecodeUnicodeEscapeStateful(const char *s, } else { if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "invalid escape sequence '\\%c'", + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. ", c) < 0) { Py_DECREF(result); diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h index b14913366b77eb..00fa6a75ec113e 100644 --- a/PC/clinic/winreg.c.h +++ b/PC/clinic/winreg.c.h @@ -26,9 +26,9 @@ static PyObject * winreg_HKEYType_Close_impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType_Close(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType_Close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return winreg_HKEYType_Close_impl(self); + return winreg_HKEYType_Close_impl((PyHKEYObject *)self); } #endif /* (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)) */ @@ -56,9 +56,9 @@ static PyObject * winreg_HKEYType_Detach_impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType_Detach(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType_Detach(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return winreg_HKEYType_Detach_impl(self); + return winreg_HKEYType_Detach_impl((PyHKEYObject *)self); } #endif /* (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) || defined(MS_WINDOWS_GAMES)) */ @@ -77,12 +77,12 @@ static PyHKEYObject * winreg_HKEYType___enter___impl(PyHKEYObject *self); static PyObject * -winreg_HKEYType___enter__(PyHKEYObject *self, PyObject *Py_UNUSED(ignored)) +winreg_HKEYType___enter__(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; PyHKEYObject *_return_value; - _return_value = winreg_HKEYType___enter___impl(self); + _return_value = winreg_HKEYType___enter___impl((PyHKEYObject *)self); return_value = (PyObject *)_return_value; return return_value; @@ -105,7 +105,7 @@ winreg_HKEYType___exit___impl(PyHKEYObject *self, PyObject *exc_type, PyObject *exc_value, PyObject *traceback); static PyObject * -winreg_HKEYType___exit__(PyHKEYObject *self, PyObject *const *args, Py_ssize_t nargs) +winreg_HKEYType___exit__(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *exc_type; @@ -118,7 +118,7 @@ winreg_HKEYType___exit__(PyHKEYObject *self, PyObject *const *args, Py_ssize_t n exc_type = args[0]; exc_value = args[1]; traceback = args[2]; - return_value = winreg_HKEYType___exit___impl(self, exc_type, exc_value, traceback); + return_value = winreg_HKEYType___exit___impl((PyHKEYObject *)self, exc_type, exc_value, traceback); exit: return return_value; @@ -1766,4 +1766,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) #ifndef WINREG_QUERYREFLECTIONKEY_METHODDEF #define WINREG_QUERYREFLECTIONKEY_METHODDEF #endif /* !defined(WINREG_QUERYREFLECTIONKEY_METHODDEF) */ -/*[clinic end generated code: output=aef4aa8ab8ddf38f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=fbe9b075cd2fa833 input=a9049054013a1b77]*/ diff --git a/PC/pyconfig.h.in b/PC/pyconfig.h.in index 010f5fe5646630..f4f57c5d270028 100644 --- a/PC/pyconfig.h.in +++ b/PC/pyconfig.h.in @@ -753,4 +753,7 @@ Py_NO_ENABLE_SHARED to find out. Also support MS_NO_COREDLL for b/w compat */ /* Define if libssl has X509_VERIFY_PARAM_set1_host and related function */ #define HAVE_X509_VERIFY_PARAM_SET1_HOST 1 +// Truncate the thread name to 32766 characters. +#define _PYTHREAD_NAME_MAXLEN 32766 + #endif /* !Py_CONFIG_H */ diff --git a/PCbuild/_testlimitedcapi.vcxproj b/PCbuild/_testlimitedcapi.vcxproj index 0ea5edba3aa9a7..87abff52493098 100644 --- a/PCbuild/_testlimitedcapi.vcxproj +++ b/PCbuild/_testlimitedcapi.vcxproj @@ -103,6 +103,7 @@ + diff --git a/PCbuild/_testlimitedcapi.vcxproj.filters b/PCbuild/_testlimitedcapi.vcxproj.filters index b379090eb599f5..a975a508506905 100644 --- a/PCbuild/_testlimitedcapi.vcxproj.filters +++ b/PCbuild/_testlimitedcapi.vcxproj.filters @@ -18,6 +18,7 @@ + diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props index 17abfa85201a90..dbdb6b743bea37 100644 --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -53,7 +53,6 @@ $(PySourcePath)Include;$(PySourcePath)Include\internal;$(PySourcePath)Include\internal\mimalloc;$(GeneratedPyConfigDir);$(PySourcePath)PC;%(AdditionalIncludeDirectories) WIN32;$(_Py3NamePreprocessorDefinition);$(_PlatformPreprocessorDefinition)$(_DebugPreprocessorDefinition)$(_PyStatsPreprocessorDefinition)$(_PydPreprocessorDefinition)%(PreprocessorDefinitions) - _Py_USING_PGO=1;%(PreprocessorDefinitions) MaxSpeed true diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 5ac1dd7813689c..2fe8d11badcbac 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -969,8 +969,6 @@ _PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) return result_token_with_metadata(p, conv, conv_token->metadata); } -static asdl_expr_seq * -unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions); ResultTokenWithMetadata * _PyPegen_setup_full_format_spec(Parser *p, Token *colon, asdl_expr_seq *spec, int lineno, int col_offset, int end_lineno, int end_col_offset, PyArena *arena) @@ -1279,9 +1277,9 @@ _PyPegen_decode_fstring_part(Parser* p, int is_raw, expr_ty constant, Token* tok p->arena); } -static asdl_expr_seq * -unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) -{ +expr_ty +_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* expr, Token*b) { + /* The parser might put multiple f-string values into an individual * JoinedStr node at the top level due to stuff like f-string debugging * expressions. This function flattens those and promotes them to the @@ -1289,44 +1287,14 @@ unpack_top_level_joined_strs(Parser *p, asdl_expr_seq *raw_expressions) * of the regular output, so this is not necessary if you are not going * to expose the output AST to Python level. */ - Py_ssize_t i, req_size, raw_size; - - req_size = raw_size = asdl_seq_LEN(raw_expressions); - expr_ty expr; - for (i = 0; i < raw_size; i++) { - expr = asdl_seq_GET(raw_expressions, i); - if (expr->kind == JoinedStr_kind) { - req_size += asdl_seq_LEN(expr->v.JoinedStr.values) - 1; - } - } - - asdl_expr_seq *expressions = _Py_asdl_expr_seq_new(req_size, p->arena); - if (expressions == NULL) { - return NULL; - } - - Py_ssize_t raw_index, req_index = 0; - for (raw_index = 0; raw_index < raw_size; raw_index++) { - expr = asdl_seq_GET(raw_expressions, raw_index); - if (expr->kind == JoinedStr_kind) { - asdl_expr_seq *values = expr->v.JoinedStr.values; - for (Py_ssize_t n = 0; n < asdl_seq_LEN(values); n++) { - asdl_seq_SET(expressions, req_index, asdl_seq_GET(values, n)); - req_index++; - } - } else { - asdl_seq_SET(expressions, req_index, expr); - req_index++; + Py_ssize_t n_items = asdl_seq_LEN(expr); + Py_ssize_t total_items = n_items; + for (Py_ssize_t i = 0; i < n_items; i++) { + expr_ty item = asdl_seq_GET(expr, i); + if (item->kind == JoinedStr_kind) { + total_items += asdl_seq_LEN(item->v.JoinedStr.values) - 1; } } - return expressions; -} - -expr_ty -_PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b) { - - asdl_expr_seq *expr = unpack_top_level_joined_strs(p, raw_expressions); - Py_ssize_t n_items = asdl_seq_LEN(expr); const char* quote_str = PyBytes_AsString(a->bytes); if (quote_str == NULL) { @@ -1334,7 +1302,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b } int is_raw = strpbrk(quote_str, "rR") != NULL; - asdl_expr_seq *seq = _Py_asdl_expr_seq_new(n_items, p->arena); + asdl_expr_seq *seq = _Py_asdl_expr_seq_new(total_items, p->arena); if (seq == NULL) { return NULL; } @@ -1342,6 +1310,31 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b Py_ssize_t index = 0; for (Py_ssize_t i = 0; i < n_items; i++) { expr_ty item = asdl_seq_GET(expr, i); + + // This should correspond to a JoinedStr node of two elements + // created _PyPegen_formatted_value. This situation can only be the result of + // a f-string debug expression where the first element is a constant with the text and the second + // a formatted value with the expression. + if (item->kind == JoinedStr_kind) { + asdl_expr_seq *values = item->v.JoinedStr.values; + if (asdl_seq_LEN(values) != 2) { + PyErr_Format(PyExc_SystemError, + "unexpected JoinedStr node without debug data in f-string at line %d", + item->lineno); + return NULL; + } + + expr_ty first = asdl_seq_GET(values, 0); + assert(first->kind == Constant_kind); + asdl_seq_SET(seq, index++, first); + + expr_ty second = asdl_seq_GET(values, 1); + assert(second->kind == FormattedValue_kind); + asdl_seq_SET(seq, index++, second); + + continue; + } + if (item->kind == Constant_kind) { item = _PyPegen_decode_fstring_part(p, is_raw, item, b); if (item == NULL) { @@ -1360,7 +1353,7 @@ _PyPegen_joined_str(Parser *p, Token* a, asdl_expr_seq* raw_expressions, Token*b } asdl_expr_seq *resized_exprs; - if (index != n_items) { + if (index != total_items) { resized_exprs = _Py_asdl_expr_seq_new(index, p->arena); if (resized_exprs == NULL) { return NULL; diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py index 853a3e99807bca..7b2df738119115 100755 --- a/Parser/asdl_c.py +++ b/Parser/asdl_c.py @@ -1462,10 +1462,11 @@ def visitModule(self, mod): return PyObject_Repr(list); } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; PyObject *items[2] = {NULL, NULL}; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } items[0] = PySequence_GetItem(list, 0); if (!items[0]) { @@ -1479,52 +1480,54 @@ def visitModule(self, mod): } bool is_list = PyList_Check(list); - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? '[' : '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? '[' : '(') < 0) { goto error; } for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) { - PyObject *item = items[i]; - PyObject *item_repr; + if (i > 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { + goto error; + } + } + PyObject *item = items[i]; if (PyType_IsSubtype(Py_TYPE(item), (PyTypeObject *)state->AST_type)) { + PyObject *item_repr; item_repr = ast_repr_max_depth((AST_object*)item, depth - 1); - } else { - item_repr = PyObject_Repr(item); - } - if (!item_repr) { - goto error; - } - if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (!item_repr) { + goto error; + } + if (PyUnicodeWriter_WriteStr(writer, item_repr) < 0) { + Py_DECREF(item_repr); goto error; } - } - if (_PyUnicodeWriter_WriteStr(&writer, item_repr) < 0) { Py_DECREF(item_repr); - goto error; + } else { + if (PyUnicodeWriter_WriteRepr(writer, item) < 0) { + goto error; + } } + if (i == 0 && length > 2) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ...", 5) < 0) { - Py_DECREF(item_repr); + if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) { goto error; } } - Py_DECREF(item_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? ']' : ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? ']' : ')') < 0) { goto error; } Py_XDECREF(items[0]); Py_XDECREF(items[1]); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_XDECREF(items[0]); Py_XDECREF(items[1]); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } @@ -1568,14 +1571,15 @@ def visitModule(self, mod): } const char* tp_name = Py_TYPE(self)->tp_name; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } - if (_PyUnicodeWriter_WriteASCIIString(&writer, tp_name, strlen(tp_name)) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, tp_name, -1) < 0) { goto error; } - if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { goto error; } @@ -1610,13 +1614,13 @@ def visitModule(self, mod): } if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; } } - if (_PyUnicodeWriter_WriteStr(&writer, name) < 0) { + if (PyUnicodeWriter_WriteStr(writer, name) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; @@ -1624,11 +1628,11 @@ def visitModule(self, mod): Py_DECREF(name); - if (_PyUnicodeWriter_WriteChar(&writer, '=') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '=') < 0) { Py_DECREF(value_repr); goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, value_repr) < 0) { + if (PyUnicodeWriter_WriteStr(writer, value_repr) < 0) { Py_DECREF(value_repr); goto error; } @@ -1636,17 +1640,17 @@ def visitModule(self, mod): Py_DECREF(value_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { goto error; } Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Parser/lexer/lexer.c b/Parser/lexer/lexer.c index dbbb94a407c81d..8c5ae37fa90860 100644 --- a/Parser/lexer/lexer.c +++ b/Parser/lexer/lexer.c @@ -212,9 +212,7 @@ _PyLexer_update_fstring_expr(struct tok_state *tok, char cur) case '}': case '!': case ':': - if (tok_mode->last_expr_end == -1) { - tok_mode->last_expr_end = strlen(tok->start); - } + tok_mode->last_expr_end = strlen(tok->start); break; default: Py_UNREACHABLE(); diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 9537c543b0eb93..9dd8f9ef28bd4f 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -28,9 +28,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token int octal = ('4' <= c && c <= '7'); PyObject *msg = octal - ? PyUnicode_FromFormat("invalid octal escape sequence '\\%.3s'", - first_invalid_escape) - : PyUnicode_FromFormat("invalid escape sequence '\\%c'", c); + ? PyUnicode_FromFormat( + "\"\\%.3s\" is an invalid octal escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%.3s\"? A raw string is also an option.", + first_invalid_escape, first_invalid_escape) + : PyUnicode_FromFormat( + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + c, c); if (msg == NULL) { return -1; } @@ -53,11 +60,16 @@ warn_invalid_escape_sequence(Parser *p, const char *first_invalid_escape, Token error location, if p->known_err_token is not set. */ p->known_err_token = t; if (octal) { - RAISE_SYNTAX_ERROR("invalid octal escape sequence '\\%.3s'", - first_invalid_escape); + RAISE_SYNTAX_ERROR( + "\"\\%.3s\" is an invalid octal escape sequence. " + "Did you mean \"\\\\%.3s\"? A raw string is also an option.", + first_invalid_escape, first_invalid_escape); } else { - RAISE_SYNTAX_ERROR("invalid escape sequence '\\%c'", c); + RAISE_SYNTAX_ERROR( + "\"\\%c\" is an invalid escape sequence. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + c, c); } } Py_DECREF(msg); diff --git a/Parser/tokenizer/helpers.c b/Parser/tokenizer/helpers.c index 9c9d05bbef0f1a..5a416adb875aa1 100644 --- a/Parser/tokenizer/helpers.c +++ b/Parser/tokenizer/helpers.c @@ -113,7 +113,10 @@ _PyTokenizer_warn_invalid_escape_sequence(struct tok_state *tok, int first_inval } PyObject *msg = PyUnicode_FromFormat( - "invalid escape sequence '\\%c'", + "\"\\%c\" is an invalid escape sequence. " + "Such sequences will not work in the future. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + (char) first_invalid_escape_char, (char) first_invalid_escape_char ); @@ -129,7 +132,12 @@ _PyTokenizer_warn_invalid_escape_sequence(struct tok_state *tok, int first_inval /* Replace the SyntaxWarning exception with a SyntaxError to get a more accurate error report */ PyErr_Clear(); - return _PyTokenizer_syntaxerror(tok, "invalid escape sequence '\\%c'", (char) first_invalid_escape_char); + + return _PyTokenizer_syntaxerror(tok, + "\"\\%c\" is an invalid escape sequence. " + "Did you mean \"\\\\%c\"? A raw string is also an option.", + (char) first_invalid_escape_char, + (char) first_invalid_escape_char); } return -1; diff --git a/Programs/_testembed.c b/Programs/_testembed.c index d15dd519dbf6af..6f6d0cae58010e 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1791,55 +1791,6 @@ static int test_init_warnoptions(void) } -static int tune_config(void) -{ - PyConfig config; - PyConfig_InitPythonConfig(&config); - if (_PyInterpreterState_GetConfigCopy(&config) < 0) { - PyConfig_Clear(&config); - PyErr_Print(); - return -1; - } - - config.bytes_warning = 2; - - if (_PyInterpreterState_SetConfig(&config) < 0) { - PyConfig_Clear(&config); - return -1; - } - PyConfig_Clear(&config); - return 0; -} - - -static int test_init_set_config(void) -{ - // Initialize core - PyConfig config; - PyConfig_InitIsolatedConfig(&config); - config_set_string(&config, &config.program_name, PROGRAM_NAME); - config._init_main = 0; - config.bytes_warning = 0; - init_from_config_clear(&config); - - // Tune the configuration using _PyInterpreterState_SetConfig() - if (tune_config() < 0) { - PyErr_Print(); - return 1; - } - - // Finish initialization: main part - PyStatus status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - - dump_config(); - Py_Finalize(); - return 0; -} - - static int initconfig_getint(PyInitConfig *config, const char *name) { int64_t value; @@ -2089,33 +2040,6 @@ static int test_init_run_main(void) } -static int test_init_main(void) -{ - PyConfig config; - PyConfig_InitPythonConfig(&config); - - configure_init_main(&config); - config._init_main = 0; - init_from_config_clear(&config); - - /* sys.stdout don't exist yet: it is created by _Py_InitializeMain() */ - int res = PyRun_SimpleString( - "import sys; " - "print('Run Python code before _Py_InitializeMain', " - "file=sys.stderr)"); - if (res < 0) { - exit(1); - } - - PyStatus status = _Py_InitializeMain(); - if (PyStatus_Exception(status)) { - Py_ExitStatusException(status); - } - - return Py_RunMain(); -} - - static int test_run_main(void) { PyConfig config; @@ -2473,14 +2397,12 @@ static struct TestCase TestCases[] = { {"test_preinit_dont_parse_argv", test_preinit_dont_parse_argv}, {"test_init_read_set", test_init_read_set}, {"test_init_run_main", test_init_run_main}, - {"test_init_main", test_init_main}, {"test_init_sys_add", test_init_sys_add}, {"test_init_setpath", test_init_setpath}, {"test_init_setpath_config", test_init_setpath_config}, {"test_init_setpythonhome", test_init_setpythonhome}, {"test_init_is_python_build", test_init_is_python_build}, {"test_init_warnoptions", test_init_warnoptions}, - {"test_init_set_config", test_init_set_config}, {"test_initconfig_api", test_initconfig_api}, {"test_initconfig_get_api", test_initconfig_get_api}, {"test_initconfig_exit", test_initconfig_exit}, diff --git a/Python/Python-ast.c b/Python/Python-ast.c index 41299b29705848..7038e3c92ab8f0 100644 --- a/Python/Python-ast.c +++ b/Python/Python-ast.c @@ -5661,10 +5661,11 @@ ast_repr_list(PyObject *list, int depth) return PyObject_Repr(list); } - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; PyObject *items[2] = {NULL, NULL}; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } items[0] = PySequence_GetItem(list, 0); if (!items[0]) { @@ -5678,52 +5679,54 @@ ast_repr_list(PyObject *list, int depth) } bool is_list = PyList_Check(list); - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? '[' : '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? '[' : '(') < 0) { goto error; } for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) { - PyObject *item = items[i]; - PyObject *item_repr; + if (i > 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { + goto error; + } + } + PyObject *item = items[i]; if (PyType_IsSubtype(Py_TYPE(item), (PyTypeObject *)state->AST_type)) { + PyObject *item_repr; item_repr = ast_repr_max_depth((AST_object*)item, depth - 1); - } else { - item_repr = PyObject_Repr(item); - } - if (!item_repr) { - goto error; - } - if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (!item_repr) { + goto error; + } + if (PyUnicodeWriter_WriteStr(writer, item_repr) < 0) { + Py_DECREF(item_repr); goto error; } - } - if (_PyUnicodeWriter_WriteStr(&writer, item_repr) < 0) { Py_DECREF(item_repr); - goto error; + } else { + if (PyUnicodeWriter_WriteRepr(writer, item) < 0) { + goto error; + } } + if (i == 0 && length > 2) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ...", 5) < 0) { - Py_DECREF(item_repr); + if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) { goto error; } } - Py_DECREF(item_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, is_list ? ']' : ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, is_list ? ']' : ')') < 0) { goto error; } Py_XDECREF(items[0]); Py_XDECREF(items[1]); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_XDECREF(items[0]); Py_XDECREF(items[1]); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } @@ -5767,14 +5770,15 @@ ast_repr_max_depth(AST_object *self, int depth) } const char* tp_name = Py_TYPE(self)->tp_name; - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.overallocate = 1; + PyUnicodeWriter *writer = PyUnicodeWriter_Create(0); + if (writer == NULL) { + goto error; + } - if (_PyUnicodeWriter_WriteASCIIString(&writer, tp_name, strlen(tp_name)) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, tp_name, -1) < 0) { goto error; } - if (_PyUnicodeWriter_WriteChar(&writer, '(') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '(') < 0) { goto error; } @@ -5809,13 +5813,13 @@ ast_repr_max_depth(AST_object *self, int depth) } if (i > 0) { - if (_PyUnicodeWriter_WriteASCIIString(&writer, ", ", 2) < 0) { + if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; } } - if (_PyUnicodeWriter_WriteStr(&writer, name) < 0) { + if (PyUnicodeWriter_WriteStr(writer, name) < 0) { Py_DECREF(name); Py_DECREF(value_repr); goto error; @@ -5823,11 +5827,11 @@ ast_repr_max_depth(AST_object *self, int depth) Py_DECREF(name); - if (_PyUnicodeWriter_WriteChar(&writer, '=') < 0) { + if (PyUnicodeWriter_WriteChar(writer, '=') < 0) { Py_DECREF(value_repr); goto error; } - if (_PyUnicodeWriter_WriteStr(&writer, value_repr) < 0) { + if (PyUnicodeWriter_WriteStr(writer, value_repr) < 0) { Py_DECREF(value_repr); goto error; } @@ -5835,17 +5839,17 @@ ast_repr_max_depth(AST_object *self, int depth) Py_DECREF(value_repr); } - if (_PyUnicodeWriter_WriteChar(&writer, ')') < 0) { + if (PyUnicodeWriter_WriteChar(writer, ')') < 0) { goto error; } Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); error: Py_ReprLeave((PyObject *)self); Py_DECREF(fields); - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter_Discard(writer); return NULL; } diff --git a/Python/ast_unparse.c b/Python/ast_unparse.c index 8017cfc7fcf268..f3c669c33eb07c 100644 --- a/Python/ast_unparse.c +++ b/Python/ast_unparse.c @@ -16,24 +16,40 @@ _Py_DECLARE_STR(dbl_close_br, "}}"); static PyObject * expr_as_unicode(expr_ty e, int level); static int -append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level); +append_ast_expr(PyUnicodeWriter *writer, expr_ty e, int level); static int -append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec); +append_joinedstr(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec); static int -append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e); +append_formattedvalue(PyUnicodeWriter *writer, expr_ty e); static int -append_ast_slice(_PyUnicodeWriter *writer, expr_ty e); +append_ast_slice(PyUnicodeWriter *writer, expr_ty e); static int -append_charp(_PyUnicodeWriter *writer, const char *charp) +append_char(PyUnicodeWriter *writer, Py_UCS4 ch) { - return _PyUnicodeWriter_WriteASCIIString(writer, charp, -1); + return PyUnicodeWriter_WriteChar(writer, ch); } +static int +append_charp(PyUnicodeWriter *writer, const char *charp) +{ + return PyUnicodeWriter_WriteUTF8(writer, charp, -1); +} + +#define APPEND_CHAR_FINISH(ch) do { \ + return append_char(writer, (ch)); \ + } while (0) + #define APPEND_STR_FINISH(str) do { \ return append_charp(writer, (str)); \ } while (0) +#define APPEND_CHAR(ch) do { \ + if (-1 == append_char(writer, (ch))) { \ + return -1; \ + } \ + } while (0) + #define APPEND_STR(str) do { \ if (-1 == append_charp(writer, (str))) { \ return -1; \ @@ -64,10 +80,9 @@ append_charp(_PyUnicodeWriter *writer, const char *charp) } while (0) static int -append_repr(_PyUnicodeWriter *writer, PyObject *obj) +append_repr(PyUnicodeWriter *writer, PyObject *obj) { PyObject *repr = PyObject_Repr(obj); - if (!repr) { return -1; } @@ -88,7 +103,8 @@ append_repr(_PyUnicodeWriter *writer, PyObject *obj) } repr = new_repr; } - int ret = _PyUnicodeWriter_WriteStr(writer, repr); + + int ret = PyUnicodeWriter_WriteStr(writer, repr); Py_DECREF(repr); return ret; } @@ -117,7 +133,7 @@ enum { }; static int -append_ast_boolop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_boolop(PyUnicodeWriter *writer, expr_ty e, int level) { Py_ssize_t i, value_count; asdl_expr_seq *values; @@ -139,7 +155,7 @@ append_ast_boolop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_binop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_binop(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; int pr; @@ -174,7 +190,7 @@ append_ast_binop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_unaryop(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_unaryop(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; int pr; @@ -198,9 +214,9 @@ append_ast_unaryop(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_arg(_PyUnicodeWriter *writer, arg_ty arg) +append_ast_arg(PyUnicodeWriter *writer, arg_ty arg) { - if (-1 == _PyUnicodeWriter_WriteStr(writer, arg->arg)) { + if (PyUnicodeWriter_WriteStr(writer, arg->arg) < 0) { return -1; } if (arg->annotation) { @@ -211,7 +227,7 @@ append_ast_arg(_PyUnicodeWriter *writer, arg_ty arg) } static int -append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) +append_ast_args(PyUnicodeWriter *writer, arguments_ty args) { bool first; Py_ssize_t i, di, arg_count, posonlyarg_count, default_count; @@ -232,7 +248,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) di = i - posonlyarg_count - arg_count + default_count; if (di >= 0) { - APPEND_STR("="); + APPEND_CHAR('='); APPEND_EXPR((expr_ty)asdl_seq_GET(args->defaults, di), PR_TEST); } if (posonlyarg_count && i + 1 == posonlyarg_count) { @@ -260,7 +276,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) if (di >= 0) { expr_ty default_ = (expr_ty)asdl_seq_GET(args->kw_defaults, di); if (default_) { - APPEND_STR("="); + APPEND_CHAR('='); APPEND_EXPR(default_, PR_TEST); } } @@ -277,7 +293,7 @@ append_ast_args(_PyUnicodeWriter *writer, arguments_ty args) } static int -append_ast_lambda(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_lambda(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TEST, "("); Py_ssize_t n_positional = (asdl_seq_LEN(e->v.Lambda.args->args) + @@ -291,7 +307,7 @@ append_ast_lambda(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_ifexp(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_ifexp(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TEST, "("); APPEND_EXPR(e->v.IfExp.body, PR_TEST + 1); @@ -304,12 +320,12 @@ append_ast_ifexp(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_dict(_PyUnicodeWriter *writer, expr_ty e) +append_ast_dict(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, value_count; expr_ty key_node; - APPEND_STR("{"); + APPEND_CHAR('{'); value_count = asdl_seq_LEN(e->v.Dict.values); for (i = 0; i < value_count; i++) { @@ -326,41 +342,41 @@ append_ast_dict(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_set(_PyUnicodeWriter *writer, expr_ty e) +append_ast_set(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, elem_count; - APPEND_STR("{"); + APPEND_CHAR('{'); elem_count = asdl_seq_LEN(e->v.Set.elts); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); APPEND_EXPR((expr_ty)asdl_seq_GET(e->v.Set.elts, i), PR_TEST); } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_list(_PyUnicodeWriter *writer, expr_ty e) +append_ast_list(PyUnicodeWriter *writer, expr_ty e) { Py_ssize_t i, elem_count; - APPEND_STR("["); + APPEND_CHAR('['); elem_count = asdl_seq_LEN(e->v.List.elts); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); APPEND_EXPR((expr_ty)asdl_seq_GET(e->v.List.elts, i), PR_TEST); } - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_tuple(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_tuple(PyUnicodeWriter *writer, expr_ty e, int level) { Py_ssize_t i, elem_count; @@ -383,7 +399,7 @@ append_ast_tuple(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_comprehension(_PyUnicodeWriter *writer, comprehension_ty gen) +append_ast_comprehension(PyUnicodeWriter *writer, comprehension_ty gen) { Py_ssize_t i, if_count; @@ -401,7 +417,7 @@ append_ast_comprehension(_PyUnicodeWriter *writer, comprehension_ty gen) } static int -append_ast_comprehensions(_PyUnicodeWriter *writer, asdl_comprehension_seq *comprehensions) +append_ast_comprehensions(PyUnicodeWriter *writer, asdl_comprehension_seq *comprehensions) { Py_ssize_t i, gen_count; gen_count = asdl_seq_LEN(comprehensions); @@ -414,45 +430,45 @@ append_ast_comprehensions(_PyUnicodeWriter *writer, asdl_comprehension_seq *comp } static int -append_ast_genexp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_genexp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("("); + APPEND_CHAR('('); APPEND_EXPR(e->v.GeneratorExp.elt, PR_TEST); APPEND(comprehensions, e->v.GeneratorExp.generators); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_listcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_listcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("["); + APPEND_CHAR('['); APPEND_EXPR(e->v.ListComp.elt, PR_TEST); APPEND(comprehensions, e->v.ListComp.generators); - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_setcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_setcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("{"); + APPEND_CHAR('{'); APPEND_EXPR(e->v.SetComp.elt, PR_TEST); APPEND(comprehensions, e->v.SetComp.generators); - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_dictcomp(_PyUnicodeWriter *writer, expr_ty e) +append_ast_dictcomp(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("{"); + APPEND_CHAR('{'); APPEND_EXPR(e->v.DictComp.key, PR_TEST); APPEND_STR(": "); APPEND_EXPR(e->v.DictComp.value, PR_TEST); APPEND(comprehensions, e->v.DictComp.generators); - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_compare(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_compare(PyUnicodeWriter *writer, expr_ty e, int level) { const char *op; Py_ssize_t i, comparator_count; @@ -516,17 +532,17 @@ append_ast_compare(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_keyword(_PyUnicodeWriter *writer, keyword_ty kw) +append_ast_keyword(PyUnicodeWriter *writer, keyword_ty kw) { if (kw->arg == NULL) { APPEND_STR("**"); } else { - if (-1 == _PyUnicodeWriter_WriteStr(writer, kw->arg)) { + if (-1 == PyUnicodeWriter_WriteStr(writer, kw->arg)) { return -1; } - APPEND_STR("="); + APPEND_CHAR('='); } APPEND_EXPR(kw->value, PR_TEST); @@ -534,7 +550,7 @@ append_ast_keyword(_PyUnicodeWriter *writer, keyword_ty kw) } static int -append_ast_call(_PyUnicodeWriter *writer, expr_ty e) +append_ast_call(PyUnicodeWriter *writer, expr_ty e) { bool first; Py_ssize_t i, arg_count, kw_count; @@ -552,7 +568,7 @@ append_ast_call(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR("("); + APPEND_CHAR('('); first = true; for (i = 0; i < arg_count; i++) { @@ -565,7 +581,7 @@ append_ast_call(_PyUnicodeWriter *writer, expr_ty e) APPEND(keyword, (keyword_ty)asdl_seq_GET(e->v.Call.keywords, i)); } - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static PyObject * @@ -585,20 +601,20 @@ escape_braces(PyObject *orig) } static int -append_fstring_unicode(_PyUnicodeWriter *writer, PyObject *unicode) +append_fstring_unicode(PyUnicodeWriter *writer, PyObject *unicode) { PyObject *escaped; int result = -1; escaped = escape_braces(unicode); if (escaped) { - result = _PyUnicodeWriter_WriteStr(writer, escaped); + result = PyUnicodeWriter_WriteStr(writer, escaped); Py_DECREF(escaped); } return result; } static int -append_fstring_element(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) +append_fstring_element(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) { switch (e->kind) { case Constant_kind: @@ -619,28 +635,27 @@ append_fstring_element(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) static PyObject * build_fstring_body(asdl_expr_seq *values, bool is_format_spec) { - Py_ssize_t i, value_count; - _PyUnicodeWriter body_writer; - _PyUnicodeWriter_Init(&body_writer); - body_writer.min_length = 256; - body_writer.overallocate = 1; + PyUnicodeWriter *body_writer = PyUnicodeWriter_Create(256); + if (body_writer == NULL) { + return NULL; + } - value_count = asdl_seq_LEN(values); - for (i = 0; i < value_count; ++i) { - if (-1 == append_fstring_element(&body_writer, + Py_ssize_t value_count = asdl_seq_LEN(values); + for (Py_ssize_t i = 0; i < value_count; ++i) { + if (-1 == append_fstring_element(body_writer, (expr_ty)asdl_seq_GET(values, i), is_format_spec )) { - _PyUnicodeWriter_Dealloc(&body_writer); + PyUnicodeWriter_Discard(body_writer); return NULL; } } - return _PyUnicodeWriter_Finish(&body_writer); + return PyUnicodeWriter_Finish(body_writer); } static int -append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) +append_joinedstr(PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) { int result = -1; PyObject *body = build_fstring_body(e->v.JoinedStr.values, is_format_spec); @@ -656,14 +671,14 @@ append_joinedstr(_PyUnicodeWriter *writer, expr_ty e, bool is_format_spec) } } else { - result = _PyUnicodeWriter_WriteStr(writer, body); + result = PyUnicodeWriter_WriteStr(writer, body); } Py_DECREF(body); return result; } static int -append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) +append_formattedvalue(PyUnicodeWriter *writer, expr_ty e) { const char *conversion; const char *outer_brace = "{"; @@ -682,7 +697,7 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) Py_DECREF(temp_fv_str); return -1; } - if (-1 == _PyUnicodeWriter_WriteStr(writer, temp_fv_str)) { + if (-1 == PyUnicodeWriter_WriteStr(writer, temp_fv_str)) { Py_DECREF(temp_fv_str); return -1; } @@ -707,7 +722,7 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) APPEND_STR(conversion); } if (e->v.FormattedValue.format_spec) { - if (-1 == _PyUnicodeWriter_WriteASCIIString(writer, ":", 1) || + if (-1 == PyUnicodeWriter_WriteChar(writer, ':') || -1 == append_fstring_element(writer, e->v.FormattedValue.format_spec, true @@ -717,17 +732,17 @@ append_formattedvalue(_PyUnicodeWriter *writer, expr_ty e) } } - APPEND_STR_FINISH("}"); + APPEND_CHAR_FINISH('}'); } static int -append_ast_constant(_PyUnicodeWriter *writer, PyObject *constant) +append_ast_constant(PyUnicodeWriter *writer, PyObject *constant) { if (PyTuple_CheckExact(constant)) { Py_ssize_t i, elem_count; elem_count = PyTuple_GET_SIZE(constant); - APPEND_STR("("); + APPEND_CHAR('('); for (i = 0; i < elem_count; i++) { APPEND_STR_IF(i > 0, ", "); if (append_ast_constant(writer, PyTuple_GET_ITEM(constant, i)) < 0) { @@ -736,14 +751,13 @@ append_ast_constant(_PyUnicodeWriter *writer, PyObject *constant) } APPEND_STR_IF(elem_count == 1, ","); - APPEND_STR(")"); - return 0; + APPEND_CHAR_FINISH(')'); } return append_repr(writer, constant); } static int -append_ast_attribute(_PyUnicodeWriter *writer, expr_ty e) +append_ast_attribute(PyUnicodeWriter *writer, expr_ty e) { const char *period; expr_ty v = e->v.Attribute.value; @@ -759,48 +773,48 @@ append_ast_attribute(_PyUnicodeWriter *writer, expr_ty e) } APPEND_STR(period); - return _PyUnicodeWriter_WriteStr(writer, e->v.Attribute.attr); + return PyUnicodeWriter_WriteStr(writer, e->v.Attribute.attr); } static int -append_ast_slice(_PyUnicodeWriter *writer, expr_ty e) +append_ast_slice(PyUnicodeWriter *writer, expr_ty e) { if (e->v.Slice.lower) { APPEND_EXPR(e->v.Slice.lower, PR_TEST); } - APPEND_STR(":"); + APPEND_CHAR(':'); if (e->v.Slice.upper) { APPEND_EXPR(e->v.Slice.upper, PR_TEST); } if (e->v.Slice.step) { - APPEND_STR(":"); + APPEND_CHAR(':'); APPEND_EXPR(e->v.Slice.step, PR_TEST); } return 0; } static int -append_ast_subscript(_PyUnicodeWriter *writer, expr_ty e) +append_ast_subscript(PyUnicodeWriter *writer, expr_ty e) { APPEND_EXPR(e->v.Subscript.value, PR_ATOM); - APPEND_STR("["); + APPEND_CHAR('['); APPEND_EXPR(e->v.Subscript.slice, PR_TUPLE); - APPEND_STR_FINISH("]"); + APPEND_CHAR_FINISH(']'); } static int -append_ast_starred(_PyUnicodeWriter *writer, expr_ty e) +append_ast_starred(PyUnicodeWriter *writer, expr_ty e) { - APPEND_STR("*"); + APPEND_CHAR('*'); APPEND_EXPR(e->v.Starred.value, PR_EXPR); return 0; } static int -append_ast_yield(_PyUnicodeWriter *writer, expr_ty e) +append_ast_yield(PyUnicodeWriter *writer, expr_ty e) { if (!e->v.Yield.value) { APPEND_STR_FINISH("(yield)"); @@ -808,19 +822,19 @@ append_ast_yield(_PyUnicodeWriter *writer, expr_ty e) APPEND_STR("(yield "); APPEND_EXPR(e->v.Yield.value, PR_TEST); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_yield_from(_PyUnicodeWriter *writer, expr_ty e) +append_ast_yield_from(PyUnicodeWriter *writer, expr_ty e) { APPEND_STR("(yield from "); APPEND_EXPR(e->v.YieldFrom.value, PR_TEST); - APPEND_STR_FINISH(")"); + APPEND_CHAR_FINISH(')'); } static int -append_ast_await(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_await(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_AWAIT, "("); APPEND_STR("await "); @@ -830,7 +844,7 @@ append_ast_await(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_named_expr(_PyUnicodeWriter *writer, expr_ty e, int level) +append_named_expr(PyUnicodeWriter *writer, expr_ty e, int level) { APPEND_STR_IF(level > PR_TUPLE, "("); APPEND_EXPR(e->v.NamedExpr.target, PR_ATOM); @@ -841,7 +855,7 @@ append_named_expr(_PyUnicodeWriter *writer, expr_ty e, int level) } static int -append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) +append_ast_expr(PyUnicodeWriter *writer, expr_ty e, int level) { switch (e->kind) { case BoolOp_kind: @@ -881,7 +895,7 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) APPEND_STR_FINISH("..."); } if (e->v.Constant.kind != NULL - && -1 == _PyUnicodeWriter_WriteStr(writer, e->v.Constant.kind)) { + && -1 == PyUnicodeWriter_WriteStr(writer, e->v.Constant.kind)) { return -1; } return append_ast_constant(writer, e->v.Constant.value); @@ -899,7 +913,7 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) case Slice_kind: return append_ast_slice(writer, e); case Name_kind: - return _PyUnicodeWriter_WriteStr(writer, e->v.Name.id); + return PyUnicodeWriter_WriteStr(writer, e->v.Name.id); case List_kind: return append_ast_list(writer, e); case Tuple_kind: @@ -916,15 +930,16 @@ append_ast_expr(_PyUnicodeWriter *writer, expr_ty e, int level) static PyObject * expr_as_unicode(expr_ty e, int level) { - _PyUnicodeWriter writer; - _PyUnicodeWriter_Init(&writer); - writer.min_length = 256; - writer.overallocate = 1; - if (-1 == append_ast_expr(&writer, e, level)) { - _PyUnicodeWriter_Dealloc(&writer); + PyUnicodeWriter *writer = PyUnicodeWriter_Create(256); + if (writer == NULL) { + return NULL; + } + + if (-1 == append_ast_expr(writer, e, level)) { + PyUnicodeWriter_Discard(writer); return NULL; } - return _PyUnicodeWriter_Finish(&writer); + return PyUnicodeWriter_Finish(writer); } PyObject * diff --git a/Python/bytecodes.c b/Python/bytecodes.c index a906ded365650c..940ea9d4da6e41 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -53,6 +53,7 @@ #define super(name) static int SUPER_##name #define family(name, ...) static int family_##name #define pseudo(name) static int pseudo_##name +#define label(name) name: /* Annotations */ #define guard @@ -103,7 +104,6 @@ dummy_func( PyObject *codeobj; PyObject *cond; PyObject *descr; - _PyInterpreterFrame entry_frame; PyObject *exc; PyObject *exit; PyObject *fget; @@ -522,6 +522,7 @@ dummy_func( BINARY_OP_SUBTRACT_FLOAT, BINARY_OP_ADD_UNICODE, // BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode. + BINARY_OP_EXTEND, }; op(_GUARD_BOTH_INT, (left, right -- left, right)) { @@ -587,11 +588,11 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_MULTIPLY_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_MULTIPLY_INT; macro(BINARY_OP_ADD_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_ADD_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_ADD_INT; macro(BINARY_OP_SUBTRACT_INT) = - _GUARD_BOTH_INT + unused/1 + _BINARY_OP_SUBTRACT_INT; + _GUARD_BOTH_INT + unused/5 + _BINARY_OP_SUBTRACT_INT; op(_GUARD_BOTH_FLOAT, (left, right -- left, right)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -659,11 +660,11 @@ dummy_func( } macro(BINARY_OP_MULTIPLY_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_MULTIPLY_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_MULTIPLY_FLOAT; macro(BINARY_OP_ADD_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_ADD_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_ADD_FLOAT; macro(BINARY_OP_SUBTRACT_FLOAT) = - _GUARD_BOTH_FLOAT + unused/1 + _BINARY_OP_SUBTRACT_FLOAT; + _GUARD_BOTH_FLOAT + unused/5 + _BINARY_OP_SUBTRACT_FLOAT; op(_GUARD_BOTH_UNICODE, (left, right -- left, right)) { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -689,7 +690,7 @@ dummy_func( } macro(BINARY_OP_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_ADD_UNICODE; // This is a subtle one. It's a super-instruction for // BINARY_OP_ADD_UNICODE followed by STORE_FAST @@ -741,8 +742,34 @@ dummy_func( #endif } + op(_GUARD_BINARY_OP_EXTEND, (descr/4, left, right -- left, right)) { + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + int res = d->guard(left_o, right_o); + EXIT_IF(!res); + } + + pure op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) { + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + + STAT_INC(BINARY_OP, hit); + + PyObject *res_o = d->action(left_o, right_o); + DECREF_INPUTS(); + res = PyStackRef_FromPyObjectSteal(res_o); + } + + macro(BINARY_OP_EXTEND) = + unused/1 + _GUARD_BINARY_OP_EXTEND + rewind/-4 + _BINARY_OP_EXTEND; + macro(BINARY_OP_INPLACE_ADD_UNICODE) = - _GUARD_BOTH_UNICODE + unused/1 + _BINARY_OP_INPLACE_ADD_UNICODE; + _GUARD_BOTH_UNICODE + unused/5 + _BINARY_OP_INPLACE_ADD_UNICODE; family(BINARY_SUBSCR, INLINE_CACHE_ENTRIES_BINARY_SUBSCR) = { BINARY_SUBSCR_DICT, @@ -1063,7 +1090,7 @@ dummy_func( } tier1 inst(INTERPRETER_EXIT, (retval --)) { - assert(frame == &entry_frame); + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ tstate->current_frame = frame->previous; @@ -1078,9 +1105,7 @@ dummy_func( // retval is popped from the stack, but res // is pushed to a different frame, the callers' frame. inst(RETURN_VALUE, (retval -- res)) { - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; DEAD(retval); SAVE_STACK(); @@ -1178,7 +1203,7 @@ dummy_func( PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); PyObject *retval_o; - assert(frame != &entry_frame); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); if ((tstate->interp->eval_frame == NULL) && (Py_TYPE(receiver_o) == &PyGen_Type || Py_TYPE(receiver_o) == &PyCoro_Type) && ((PyGenObject *)receiver_o)->gi_frame_state < FRAME_EXECUTING) @@ -1251,9 +1276,7 @@ dummy_func( // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -1832,16 +1855,20 @@ dummy_func( } inst(BUILD_TUPLE, (values[oparg] -- tup)) { - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); + if (tup_o == NULL) { + ERROR_NO_POP(); + } INPUTS_DEAD(); - ERROR_IF(tup_o == NULL, error); tup = PyStackRef_FromPyObjectSteal(tup_o); } inst(BUILD_LIST, (values[oparg] -- list)) { - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); + if (list_o == NULL) { + ERROR_NO_POP(); + } INPUTS_DEAD(); - ERROR_IF(list_o == NULL, error); list = PyStackRef_FromPyObjectSteal(list_o); } @@ -2690,7 +2717,7 @@ dummy_func( PyObject *match_o = NULL; PyObject *rest_o = NULL; - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); DECREF_INPUTS(); ERROR_IF(res < 0, error); @@ -4742,7 +4769,7 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); } - macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + _BINARY_OP; + macro(BINARY_OP) = _SPECIALIZE_BINARY_OP + unused/4 + _BINARY_OP; pure inst(SWAP, (bottom_in, unused[oparg-2], top_in -- top_out, unused[oparg-2], bottom_out)) { @@ -4757,7 +4784,8 @@ dummy_func( int original_opcode = 0; if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); - original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode; + int index = (int)(this_instr - _PyFrame_GetBytecode(frame)); + original_opcode = code->_co_monitoring->lines->data[index*code->_co_monitoring->lines->bytes_per_entry]; next_instr = this_instr; } else { original_opcode = _Py_call_instrumentation_line( @@ -4934,7 +4962,7 @@ dummy_func( _Py_CODEUNIT *target = _PyFrame_GetBytecode(frame) + exit->target; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %u, temp %d, target %d -> %s]\n", @@ -5049,20 +5077,13 @@ dummy_func( DECREF_INPUTS(); } - /* Internal -- for testing executors */ - op(_INTERNAL_INCREMENT_OPT_COUNTER, (opt --)) { - _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)PyStackRef_AsPyObjectBorrow(opt); - exe->count++; - DEAD(opt); - } - tier2 op(_DYNAMIC_EXIT, (exit_p/4 --)) { tstate->previous_executor = (PyObject *)current_executor; _PyExitData *exit = (_PyExitData *)exit_p; _Py_CODEUNIT *target = frame->instr_ptr; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %u, temp %d, target %d -> %s]\n", @@ -5146,6 +5167,125 @@ dummy_func( assert(tstate->tracing || eval_breaker == FT_ATOMIC_LOAD_UINTPTR_ACQUIRE(_PyFrame_GetCode(frame)->_co_instrumentation_version)); } + label(pop_4_error) { + STACK_SHRINK(1); + goto pop_3_error; + } + + label(pop_3_error) { + STACK_SHRINK(1); + goto pop_2_error; + } + + label(pop_2_error) { + STACK_SHRINK(1); + goto pop_1_error; + } + + label(pop_1_error) { + STACK_SHRINK(1); + goto error; + } + + label(error) { + /* Double-check exception status. */ +#ifdef NDEBUG + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_SystemError, + "error return without exception set"); + } +#else + assert(_PyErr_Occurred(tstate)); +#endif + + /* Log traceback info. */ + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + if (!_PyFrame_IsIncomplete(frame)) { + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyTraceBack_Here(f); + } + } + _PyEval_MonitorRaise(tstate, frame, next_instr-1); + goto exception_unwind; + } + + label(exception_unwind) { + /* We can't use frame->instr_ptr here, as RERAISE may have set it */ + int offset = INSTR_OFFSET()-1; + int level, handler, lasti; + if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + // No handlers, so exit. + assert(_PyErr_Occurred(tstate)); + + /* Pop remaining stack entries. */ + _PyStackRef *stackbase = _PyFrame_Stackbase(frame); + while (stack_pointer > stackbase) { + PyStackRef_XCLOSE(POP()); + } + assert(STACK_LEVEL() == 0); + _PyFrame_SetStackPointer(frame, stack_pointer); + monitor_unwind(tstate, frame, next_instr-1); + goto exit_unwind; + } + + assert(STACK_LEVEL() >= level); + _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; + while (stack_pointer > new_top) { + PyStackRef_XCLOSE(POP()); + } + if (lasti) { + int frame_lasti = _PyInterpreterFrame_LASTI(frame); + PyObject *lasti = PyLong_FromLong(frame_lasti); + if (lasti == NULL) { + goto exception_unwind; + } + PUSH(PyStackRef_FromPyObjectSteal(lasti)); + } + + /* Make the raw exception data + available to the handler, + so a program can emulate the + Python main loop. */ + PyObject *exc = _PyErr_GetRaisedException(tstate); + PUSH(PyStackRef_FromPyObjectSteal(exc)); + next_instr = _PyFrame_GetBytecode(frame) + handler; + + if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + goto exception_unwind; + } + /* Resume normal execution */ +#ifdef LLTRACE + if (frame->lltrace >= 5) { + lltrace_resume_frame(frame); + } +#endif + DISPATCH(); + } + + label(exit_unwind) { + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; + } + goto resume_with_error; + } + + label(resume_with_error) { + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; + } // END BYTECODES // } diff --git a/Python/ceval.c b/Python/ceval.c index e0362c3c89fe6a..10c20faf852479 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -27,6 +27,7 @@ #include "pycore_range.h" // _PyRangeIterObject #include "pycore_setobject.h" // _PySet_Update() #include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs +#include "pycore_traceback.h" // _PyTraceBack_FromFrame #include "pycore_tuple.h" // _PyTuple_ITEMS() #include "pycore_uop_ids.h" // Uops #include "pycore_pyerrors.h" @@ -178,7 +179,7 @@ lltrace_instruction(_PyInterpreterFrame *frame, int opcode, int oparg) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return; } dump_stack(frame, stack_pointer); @@ -229,12 +230,12 @@ lltrace_resume_frame(_PyInterpreterFrame *frame) } static int -maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, _PyInterpreterFrame *skip_frame, PyObject *globals) +maybe_lltrace_resume_frame(_PyInterpreterFrame *frame, PyObject *globals) { if (globals == NULL) { return 0; } - if (frame == skip_frame) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { return 0; } int r = PyDict_Contains(globals, &_Py_ID(__lltrace__)); @@ -766,23 +767,6 @@ _PyObjectArray_Free(PyObject **array, PyObject **scratch) #define PY_EVAL_C_STACK_UNITS 2 -/* _PyEval_EvalFrameDefault is too large to optimize for speed with PGO on MSVC - when the JIT is enabled or GIL is disabled. Disable that optimization around - this function only. If this is fixed upstream, we should gate this on the - version of MSVC. - */ -#if (defined(_MSC_VER) && \ - defined(_Py_USING_PGO) && \ - (defined(_Py_JIT) || \ - defined(Py_GIL_DISABLED))) -#define DO_NOT_OPTIMIZE_INTERP_LOOP -#endif - -#ifdef DO_NOT_OPTIMIZE_INTERP_LOOP -# pragma optimize("t", off) -/* This setting is reversed below following _PyEval_EvalFrameDefault */ -#endif - PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag) { @@ -799,9 +783,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #endif uint8_t opcode; /* Current opcode */ int oparg; /* Current opcode argument, if any */ -#ifdef LLTRACE - int lltrace = 0; -#endif _PyInterpreterFrame entry_frame; @@ -818,9 +799,12 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int entry_frame.f_executable = PyStackRef_None; entry_frame.instr_ptr = (_Py_CODEUNIT *)_Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS + 1; entry_frame.stackpointer = entry_frame.localsplus; - entry_frame.owner = FRAME_OWNED_BY_CSTACK; + entry_frame.owner = FRAME_OWNED_BY_INTERPRETER; entry_frame.visited = 0; entry_frame.return_offset = 0; +#ifdef LLTRACE + entry_frame.lltrace = 0; +#endif /* Push frame */ entry_frame.previous = tstate->current_frame; frame->previous = &entry_frame; @@ -846,7 +830,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _Py_CODEUNIT *bytecode = _PyEval_GetExecutableCode(tstate, _PyFrame_GetCode(frame)); if (bytecode == NULL) { - goto error; + goto exit_unwind; } ptrdiff_t off = frame->instr_ptr - _PyFrame_GetBytecode(frame); frame->tlbc_index = ((_PyThreadStateImpl *)tstate)->tlbc_index; @@ -880,9 +864,12 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int stack_pointer = _PyFrame_GetStackPointer(frame); #ifdef LLTRACE - lltrace = maybe_lltrace_resume_frame(frame, &entry_frame, GLOBALS()); - if (lltrace < 0) { - goto exit_unwind; + { + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); + frame->lltrace = lltrace; + if (lltrace < 0) { + goto exit_unwind; + } } #endif @@ -895,143 +882,9 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int DISPATCH(); - { - /* Start instructions */ -#if !USE_COMPUTED_GOTOS - dispatch_opcode: - switch (opcode) -#endif - { - #include "generated_cases.c.h" -#if USE_COMPUTED_GOTOS - _unknown_opcode: -#else - EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode -#endif - /* Tell C compilers not to hold the opcode variable in the loop. - next_instr points the current instruction without TARGET(). */ - opcode = next_instr->op.code; - _PyErr_Format(tstate, PyExc_SystemError, - "%U:%d: unknown opcode %d", - _PyFrame_GetCode(frame)->co_filename, - PyUnstable_InterpreterFrame_GetLine(frame), - opcode); - goto error; - - } /* End instructions */ - - /* This should never be reached. Every opcode should end with DISPATCH() - or goto error. */ - Py_UNREACHABLE(); - -pop_4_error: - STACK_SHRINK(1); -pop_3_error: - STACK_SHRINK(1); -pop_2_error: - STACK_SHRINK(1); -pop_1_error: - STACK_SHRINK(1); -error: - /* Double-check exception status. */ -#ifdef NDEBUG - if (!_PyErr_Occurred(tstate)) { - _PyErr_SetString(tstate, PyExc_SystemError, - "error return without exception set"); - } -#else - assert(_PyErr_Occurred(tstate)); -#endif - - /* Log traceback info. */ - assert(frame != &entry_frame); - if (!_PyFrame_IsIncomplete(frame)) { - PyFrameObject *f = _PyFrame_GetFrameObject(frame); - if (f != NULL) { - PyTraceBack_Here(f); - } - } - _PyEval_MonitorRaise(tstate, frame, next_instr-1); -exception_unwind: - { - /* We can't use frame->instr_ptr here, as RERAISE may have set it */ - int offset = INSTR_OFFSET()-1; - int level, handler, lasti; - if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { - // No handlers, so exit. - assert(_PyErr_Occurred(tstate)); - - /* Pop remaining stack entries. */ - _PyStackRef *stackbase = _PyFrame_Stackbase(frame); - while (stack_pointer > stackbase) { - PyStackRef_XCLOSE(POP()); - } - assert(STACK_LEVEL() == 0); - _PyFrame_SetStackPointer(frame, stack_pointer); - monitor_unwind(tstate, frame, next_instr-1); - goto exit_unwind; - } - - assert(STACK_LEVEL() >= level); - _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; - while (stack_pointer > new_top) { - PyStackRef_XCLOSE(POP()); - } - if (lasti) { - int frame_lasti = _PyInterpreterFrame_LASTI(frame); - PyObject *lasti = PyLong_FromLong(frame_lasti); - if (lasti == NULL) { - goto exception_unwind; - } - PUSH(PyStackRef_FromPyObjectSteal(lasti)); - } - - /* Make the raw exception data - available to the handler, - so a program can emulate the - Python main loop. */ - PyObject *exc = _PyErr_GetRaisedException(tstate); - PUSH(PyStackRef_FromPyObjectSteal(exc)); - next_instr = _PyFrame_GetBytecode(frame) + handler; - - if (monitor_handled(tstate, frame, next_instr, exc) < 0) { - goto exception_unwind; - } - /* Resume normal execution */ -#ifdef LLTRACE - if (lltrace >= 5) { - lltrace_resume_frame(frame); - } -#endif - DISPATCH(); - } - } - -exit_unwind: - assert(_PyErr_Occurred(tstate)); - _Py_LeaveRecursiveCallPy(tstate); - assert(frame != &entry_frame); - // GH-99729: We need to unlink the frame *before* clearing it: - _PyInterpreterFrame *dying = frame; - frame = tstate->current_frame = dying->previous; - _PyEval_FrameClearAndPop(tstate, dying); - frame->return_offset = 0; - if (frame == &entry_frame) { - /* Restore previous frame and exit */ - tstate->current_frame = frame->previous; - tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; - return NULL; - } - -resume_with_error: - next_instr = frame->instr_ptr; - stack_pointer = _PyFrame_GetStackPointer(frame); - goto error; - - #ifdef _Py_TIER2 // Tier 2 is also here! @@ -1060,13 +913,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int #undef ENABLE_SPECIALIZATION_FT #define ENABLE_SPECIALIZATION_FT 0 -#ifdef Py_DEBUG - #define DPRINTF(level, ...) \ - if (lltrace >= (level)) { printf(__VA_ARGS__); } -#else - #define DPRINTF(level, ...) -#endif - ; // dummy statement after a label, before a declaration uint16_t uopcode; #ifdef Py_STATS @@ -1079,7 +925,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int for (;;) { uopcode = next_uop->opcode; #ifdef Py_DEBUG - if (lltrace >= 3) { + if (frame->lltrace >= 3) { dump_stack(frame, stack_pointer); if (next_uop->opcode == _START_EXECUTOR) { printf("%4d uop: ", 0); @@ -1121,7 +967,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int jump_to_error_target: #ifdef Py_DEBUG - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("Error: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(" @ %d -> %s]\n", @@ -1157,7 +1003,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int next_instr = next_uop[-1].target + _PyFrame_GetBytecode(frame); goto_to_tier1: #ifdef Py_DEBUG - if (lltrace >= 2) { + if (frame->lltrace >= 2) { printf("DEOPT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(" -> %s]\n", @@ -1175,10 +1021,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int } -#ifdef DO_NOT_OPTIMIZE_INTERP_LOOP -# pragma optimize("", on) -#endif - #if defined(__GNUC__) # pragma GCC diagnostic pop #elif defined(_MSC_VER) /* MS_WINDOWS */ @@ -1524,7 +1366,12 @@ initialize_locals(PyThreadState *tstate, PyFunctionObject *func, u = (PyObject *)&_Py_SINGLETON(tuple_empty); } else { - u = _PyTuple_FromStackRefSteal(args + n, argcount - n); + u = _PyTuple_FromStackRefStealOnSuccess(args + n, argcount - n); + if (u == NULL) { + for (Py_ssize_t i = n; i < argcount; i++) { + PyStackRef_CLOSE(args[i]); + } + } } if (u == NULL) { goto fail_post_positional; @@ -2094,8 +1941,8 @@ do_raise(PyThreadState *tstate, PyObject *exc, PyObject *cause) */ int -_PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, - PyObject **match, PyObject **rest) +_PyEval_ExceptionGroupMatch(_PyInterpreterFrame *frame, PyObject* exc_value, + PyObject *match_type, PyObject **match, PyObject **rest) { if (Py_IsNone(exc_value)) { *match = Py_NewRef(Py_None); @@ -2121,6 +1968,15 @@ _PyEval_ExceptionGroupMatch(PyObject* exc_value, PyObject *match_type, if (wrapped == NULL) { return -1; } + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyObject *tb = _PyTraceBack_FromFrame(NULL, f); + if (tb == NULL) { + return -1; + } + PyException_SetTraceback(wrapped, tb); + Py_DECREF(tb); + } *match = wrapped; } *rest = Py_NewRef(Py_None); diff --git a/Python/ceval_gil.c b/Python/ceval_gil.c index 1f811e72406130..416eec01052224 100644 --- a/Python/ceval_gil.c +++ b/Python/ceval_gil.c @@ -995,7 +995,7 @@ _Py_unset_eval_breaker_bit_all(PyInterpreterState *interp, uintptr_t bit) void _Py_FinishPendingCalls(PyThreadState *tstate) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); assert(_PyThreadState_CheckConsistency(tstate)); struct _pending_calls *pending = &tstate->interp->ceval.pending; @@ -1056,7 +1056,7 @@ _PyEval_MakePendingCalls(PyThreadState *tstate) int Py_MakePendingCalls(void) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); PyThreadState *tstate = _PyThreadState_GET(); assert(_PyThreadState_CheckConsistency(tstate)); diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index c37e1cf3afa60e..62c80c96e422fd 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -80,7 +80,7 @@ /* PRE_DISPATCH_GOTO() does lltrace if enabled. Normally a no-op */ #ifdef LLTRACE -#define PRE_DISPATCH_GOTO() if (lltrace >= 5) { \ +#define PRE_DISPATCH_GOTO() if (frame->lltrace >= 5) { \ lltrace_instruction(frame, stack_pointer, next_instr, opcode, oparg); } #else #define PRE_DISPATCH_GOTO() ((void)0) @@ -89,7 +89,8 @@ #if LLTRACE #define LLTRACE_RESUME_FRAME() \ do { \ - lltrace = maybe_lltrace_resume_frame(frame, &entry_frame, GLOBALS()); \ + int lltrace = maybe_lltrace_resume_frame(frame, GLOBALS()); \ + frame->lltrace = lltrace; \ if (lltrace < 0) { \ goto exit_unwind; \ } \ @@ -165,35 +166,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { #define JUMPBY(x) (next_instr += (x)) #define SKIP_OVER(x) (next_instr += (x)) -/* OpCode prediction macros - Some opcodes tend to come in pairs thus making it possible to - predict the second code when the first is run. For example, - COMPARE_OP is often followed by POP_JUMP_IF_FALSE or POP_JUMP_IF_TRUE. - - Verifying the prediction costs a single high-speed test of a register - variable against a constant. If the pairing was good, then the - processor's own internal branch predication has a high likelihood of - success, resulting in a nearly zero-overhead transition to the - next opcode. A successful prediction saves a trip through the eval-loop - including its unpredictable switch-case branch. Combined with the - processor's internal branch prediction, a successful PREDICT has the - effect of making the two opcodes run as if they were a single new opcode - with the bodies combined. - - If collecting opcode statistics, your choices are to either keep the - predictions turned-on and interpret the results as if some opcodes - had been combined or turn-off predictions so that the opcode frequency - counter updates for both opcodes. - - Opcode prediction is disabled with threaded code, since the latter allows - the CPU to record separate branch prediction information for each - opcode. - -*/ - -#define PREDICT_ID(op) PRED_##op -#define PREDICTED(op) PREDICT_ID(op): - /* Stack manipulation macros */ @@ -238,7 +210,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { #endif #define WITHIN_STACK_BOUNDS() \ - (frame == &entry_frame || (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())) + (frame->owner == FRAME_OWNED_BY_INTERPRETER || (STACK_LEVEL() >= 0 && STACK_LEVEL() <= STACK_SIZE())) /* Data access macros */ #define FRAME_CO_CONSTS (_PyFrame_GetCode(frame)->co_consts) @@ -259,8 +231,6 @@ GETITEM(PyObject *v, Py_ssize_t i) { GETLOCAL(i) = value; \ PyStackRef_XCLOSE(tmp); } while (0) -#define GO_TO_INSTRUCTION(op) goto PREDICT_ID(op) - #ifdef Py_STATS #define UPDATE_MISS_STATS(INSTNAME) \ do { \ @@ -280,7 +250,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { /* This is only a single jump on release builds! */ \ UPDATE_MISS_STATS((INSTNAME)); \ assert(_PyOpcode_Deopt[opcode] == (INSTNAME)); \ - GO_TO_INSTRUCTION(INSTNAME); \ + goto PREDICTED_##INSTNAME; \ } diff --git a/Python/clinic/context.c.h b/Python/clinic/context.c.h index 997ac6f63384a9..71f05aa02a51e7 100644 --- a/Python/clinic/context.c.h +++ b/Python/clinic/context.c.h @@ -21,7 +21,7 @@ _contextvars_Context_get_impl(PyContext *self, PyObject *key, PyObject *default_value); static PyObject * -_contextvars_Context_get(PyContext *self, PyObject *const *args, Py_ssize_t nargs) +_contextvars_Context_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -36,7 +36,7 @@ _contextvars_Context_get(PyContext *self, PyObject *const *args, Py_ssize_t narg } default_value = args[1]; skip_optional: - return_value = _contextvars_Context_get_impl(self, key, default_value); + return_value = _contextvars_Context_get_impl((PyContext *)self, key, default_value); exit: return return_value; @@ -57,9 +57,9 @@ static PyObject * _contextvars_Context_items_impl(PyContext *self); static PyObject * -_contextvars_Context_items(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_items(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_items_impl(self); + return _contextvars_Context_items_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_keys__doc__, @@ -75,9 +75,9 @@ static PyObject * _contextvars_Context_keys_impl(PyContext *self); static PyObject * -_contextvars_Context_keys(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_keys(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_keys_impl(self); + return _contextvars_Context_keys_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_values__doc__, @@ -93,9 +93,9 @@ static PyObject * _contextvars_Context_values_impl(PyContext *self); static PyObject * -_contextvars_Context_values(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_values(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_values_impl(self); + return _contextvars_Context_values_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_Context_copy__doc__, @@ -111,9 +111,9 @@ static PyObject * _contextvars_Context_copy_impl(PyContext *self); static PyObject * -_contextvars_Context_copy(PyContext *self, PyObject *Py_UNUSED(ignored)) +_contextvars_Context_copy(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _contextvars_Context_copy_impl(self); + return _contextvars_Context_copy_impl((PyContext *)self); } PyDoc_STRVAR(_contextvars_ContextVar_get__doc__, @@ -135,7 +135,7 @@ static PyObject * _contextvars_ContextVar_get_impl(PyContextVar *self, PyObject *default_value); static PyObject * -_contextvars_ContextVar_get(PyContextVar *self, PyObject *const *args, Py_ssize_t nargs) +_contextvars_ContextVar_get(PyObject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *default_value = NULL; @@ -148,7 +148,7 @@ _contextvars_ContextVar_get(PyContextVar *self, PyObject *const *args, Py_ssize_ } default_value = args[0]; skip_optional: - return_value = _contextvars_ContextVar_get_impl(self, default_value); + return_value = _contextvars_ContextVar_get_impl((PyContextVar *)self, default_value); exit: return return_value; @@ -179,4 +179,4 @@ PyDoc_STRVAR(_contextvars_ContextVar_reset__doc__, #define _CONTEXTVARS_CONTEXTVAR_RESET_METHODDEF \ {"reset", (PyCFunction)_contextvars_ContextVar_reset, METH_O, _contextvars_ContextVar_reset__doc__}, -/*[clinic end generated code: output=b667826178444c3f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=444567eaf0df25e0 input=a9049054013a1b77]*/ diff --git a/Python/clinic/instruction_sequence.c.h b/Python/clinic/instruction_sequence.c.h index 45693e5856f8a7..41ab2de44e426e 100644 --- a/Python/clinic/instruction_sequence.c.h +++ b/Python/clinic/instruction_sequence.c.h @@ -51,7 +51,7 @@ InstructionSequenceType_use_label_impl(_PyInstructionSequence *self, int label); static PyObject * -InstructionSequenceType_use_label(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_use_label(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -91,7 +91,7 @@ InstructionSequenceType_use_label(_PyInstructionSequence *self, PyObject *const if (label == -1 && PyErr_Occurred()) { goto exit; } - return_value = InstructionSequenceType_use_label_impl(self, label); + return_value = InstructionSequenceType_use_label_impl((_PyInstructionSequence *)self, label); exit: return return_value; @@ -113,7 +113,7 @@ InstructionSequenceType_addop_impl(_PyInstructionSequence *self, int opcode, int end_lineno, int end_col_offset); static PyObject * -InstructionSequenceType_addop(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_addop(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -178,7 +178,7 @@ InstructionSequenceType_addop(_PyInstructionSequence *self, PyObject *const *arg if (end_col_offset == -1 && PyErr_Occurred()) { goto exit; } - return_value = InstructionSequenceType_addop_impl(self, opcode, oparg, lineno, col_offset, end_lineno, end_col_offset); + return_value = InstructionSequenceType_addop_impl((_PyInstructionSequence *)self, opcode, oparg, lineno, col_offset, end_lineno, end_col_offset); exit: return return_value; @@ -197,12 +197,12 @@ static int InstructionSequenceType_new_label_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_new_label(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_new_label(PyObject *self, PyObject *Py_UNUSED(ignored)) { PyObject *return_value = NULL; int _return_value; - _return_value = InstructionSequenceType_new_label_impl(self); + _return_value = InstructionSequenceType_new_label_impl((_PyInstructionSequence *)self); if ((_return_value == -1) && PyErr_Occurred()) { goto exit; } @@ -226,7 +226,7 @@ InstructionSequenceType_add_nested_impl(_PyInstructionSequence *self, PyObject *nested); static PyObject * -InstructionSequenceType_add_nested(_PyInstructionSequence *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) +InstructionSequenceType_add_nested(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) @@ -263,7 +263,7 @@ InstructionSequenceType_add_nested(_PyInstructionSequence *self, PyObject *const goto exit; } nested = args[0]; - return_value = InstructionSequenceType_add_nested_impl(self, nested); + return_value = InstructionSequenceType_add_nested_impl((_PyInstructionSequence *)self, nested); exit: return return_value; @@ -282,9 +282,9 @@ static PyObject * InstructionSequenceType_get_nested_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_get_nested(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_get_nested(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return InstructionSequenceType_get_nested_impl(self); + return InstructionSequenceType_get_nested_impl((_PyInstructionSequence *)self); } PyDoc_STRVAR(InstructionSequenceType_get_instructions__doc__, @@ -300,8 +300,8 @@ static PyObject * InstructionSequenceType_get_instructions_impl(_PyInstructionSequence *self); static PyObject * -InstructionSequenceType_get_instructions(_PyInstructionSequence *self, PyObject *Py_UNUSED(ignored)) +InstructionSequenceType_get_instructions(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return InstructionSequenceType_get_instructions_impl(self); + return InstructionSequenceType_get_instructions_impl((_PyInstructionSequence *)self); } -/*[clinic end generated code: output=35163e5b589b4446 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e6b5d05bde008cc2 input=a9049054013a1b77]*/ diff --git a/Python/codecs.c b/Python/codecs.c index 2cb3875db35058..53680a79082634 100644 --- a/Python/codecs.c +++ b/Python/codecs.c @@ -659,91 +659,109 @@ PyObject *PyCodec_LookupError(const char *name) return handler; } -static void wrong_exception_type(PyObject *exc) + +static inline void +wrong_exception_type(PyObject *exc) { PyErr_Format(PyExc_TypeError, - "don't know how to handle %.200s in error callback", - Py_TYPE(exc)->tp_name); + "don't know how to handle %T in error callback", exc); } + +#define _PyIsUnicodeEncodeError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeEncodeError) +#define _PyIsUnicodeDecodeError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeDecodeError) +#define _PyIsUnicodeTranslateError(EXC) \ + PyObject_TypeCheck(EXC, (PyTypeObject *)PyExc_UnicodeTranslateError) + + +// --- handler: 'strict' ------------------------------------------------------ + PyObject *PyCodec_StrictErrors(PyObject *exc) { - if (PyExceptionInstance_Check(exc)) + if (PyExceptionInstance_Check(exc)) { PyErr_SetObject(PyExceptionInstance_Class(exc), exc); - else + } + else { PyErr_SetString(PyExc_TypeError, "codec must pass exception instance"); + } return NULL; } -PyObject *PyCodec_IgnoreErrors(PyObject *exc) +// --- handler: 'ignore' ------------------------------------------------------ + +static PyObject * +_PyCodec_IgnoreError(PyObject *exc, int as_bytes) { Py_ssize_t end; - - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; + if (_PyUnicodeError_GetParams(exc, NULL, NULL, NULL, + &end, NULL, as_bytes) < 0) + { + return NULL; } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - if (PyUnicodeDecodeError_GetEnd(exc, &end)) - return NULL; + return Py_BuildValue("(Nn)", Py_GetConstant(Py_CONSTANT_EMPTY_STR), end); +} + + +PyObject *PyCodec_IgnoreErrors(PyObject *exc) +{ + if (_PyIsUnicodeEncodeError(exc) || _PyIsUnicodeTranslateError(exc)) { + return _PyCodec_IgnoreError(exc, false); } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - if (PyUnicodeTranslateError_GetEnd(exc, &end)) - return NULL; + else if (_PyIsUnicodeDecodeError(exc)) { + return _PyCodec_IgnoreError(exc, true); } else { wrong_exception_type(exc); return NULL; } - return Py_BuildValue("(Nn)", Py_GetConstant(Py_CONSTANT_EMPTY_STR), end); } PyObject *PyCodec_ReplaceErrors(PyObject *exc) { - Py_ssize_t start, end, i, len; + Py_ssize_t start, end, slen; if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - PyObject *res; - Py_UCS1 *outp; - if (PyUnicodeEncodeError_GetStart(exc, &start)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + &start, &end, &slen, false) < 0) { return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - len = end - start; - res = PyUnicode_New(len, '?'); - if (res == NULL) + } + PyObject *res = PyUnicode_New(slen, '?'); + if (res == NULL) { return NULL; + } assert(PyUnicode_KIND(res) == PyUnicode_1BYTE_KIND); - outp = PyUnicode_1BYTE_DATA(res); - for (i = 0; i < len; ++i) - outp[i] = '?'; + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + memset(outp, '?', sizeof(Py_UCS1) * slen); assert(_PyUnicode_CheckConsistency(res, 1)); return Py_BuildValue("(Nn)", res, end); } else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - if (PyUnicodeDecodeError_GetEnd(exc, &end)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + NULL, &end, NULL, true) < 0) { return NULL; + } return Py_BuildValue("(Cn)", (int)Py_UNICODE_REPLACEMENT_CHARACTER, end); } else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - PyObject *res; - Py_UCS2 *outp; - if (PyUnicodeTranslateError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeTranslateError_GetEnd(exc, &end)) + if (_PyUnicodeError_GetParams(exc, NULL, NULL, + &start, &end, &slen, false) < 0) { return NULL; - len = end - start; - res = PyUnicode_New(len, Py_UNICODE_REPLACEMENT_CHARACTER); - if (res == NULL) + } + PyObject *res = PyUnicode_New(slen, Py_UNICODE_REPLACEMENT_CHARACTER); + if (res == NULL) { return NULL; - assert(PyUnicode_KIND(res) == PyUnicode_2BYTE_KIND); - outp = PyUnicode_2BYTE_DATA(res); - for (i = 0; i < len; i++) + } + assert(slen == 0 || PyUnicode_KIND(res) == PyUnicode_2BYTE_KIND); + Py_UCS2 *outp = PyUnicode_2BYTE_DATA(res); + for (Py_ssize_t i = 0; i < slen; ++i) { outp[i] = Py_UNICODE_REPLACEMENT_CHARACTER; + } assert(_PyUnicode_CheckConsistency(res, 1)); return Py_BuildValue("(Nn)", res, end); } @@ -755,206 +773,218 @@ PyObject *PyCodec_ReplaceErrors(PyObject *exc) PyObject *PyCodec_XMLCharRefReplaceErrors(PyObject *exc) { - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - PyObject *restuple; - PyObject *object; - Py_ssize_t i; - Py_ssize_t start; - Py_ssize_t end; - PyObject *res; - Py_UCS1 *outp; - Py_ssize_t ressize; - Py_UCS4 ch; - if (PyUnicodeEncodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeEncodeError_GetObject(exc))) - return NULL; - if (end - start > PY_SSIZE_T_MAX / (2+7+1)) - end = start + PY_SSIZE_T_MAX / (2+7+1); - for (i = start, ressize = 0; i < end; ++i) { - /* object is guaranteed to be "ready" */ - ch = PyUnicode_READ_CHAR(object, i); - if (ch<10) - ressize += 2+1+1; - else if (ch<100) - ressize += 2+2+1; - else if (ch<1000) - ressize += 2+3+1; - else if (ch<10000) - ressize += 2+4+1; - else if (ch<100000) - ressize += 2+5+1; - else if (ch<1000000) - ressize += 2+6+1; - else - ressize += 2+7+1; + if (!PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { + wrong_exception_type(exc); + return NULL; + } + + PyObject *obj; + Py_ssize_t objlen, start, end, slen; + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, false) < 0) + { + return NULL; + } + + // The number of characters that each character 'ch' contributes + // in the result is 2 + k + 1, where k = min{t >= 1 | 10^t > ch} + // and will be formatted as "&#" + DIGITS + ";". Since the Unicode + // range is below 10^7, each "block" requires at most 2 + 7 + 1 + // characters. + if (slen > PY_SSIZE_T_MAX / (2 + 7 + 1)) { + end = start + PY_SSIZE_T_MAX / (2 + 7 + 1); + end = Py_MIN(end, objlen); + slen = Py_MAX(0, end - start); + } + + Py_ssize_t ressize = 0; + for (Py_ssize_t i = start; i < end; ++i) { + /* object is guaranteed to be "ready" */ + Py_UCS4 ch = PyUnicode_READ_CHAR(obj, i); + if (ch < 10) { + ressize += 2 + 1 + 1; } - /* allocate replacement */ - res = PyUnicode_New(ressize, 127); - if (res == NULL) { - Py_DECREF(object); - return NULL; + else if (ch < 100) { + ressize += 2 + 2 + 1; } - outp = PyUnicode_1BYTE_DATA(res); - /* generate replacement */ - for (i = start; i < end; ++i) { - int digits; - int base; - ch = PyUnicode_READ_CHAR(object, i); - *outp++ = '&'; - *outp++ = '#'; - if (ch<10) { - digits = 1; - base = 1; - } - else if (ch<100) { - digits = 2; - base = 10; - } - else if (ch<1000) { - digits = 3; - base = 100; - } - else if (ch<10000) { - digits = 4; - base = 1000; - } - else if (ch<100000) { - digits = 5; - base = 10000; - } - else if (ch<1000000) { - digits = 6; - base = 100000; - } - else { - digits = 7; - base = 1000000; - } - while (digits-->0) { - *outp++ = '0' + ch/base; - ch %= base; - base /= 10; - } - *outp++ = ';'; + else if (ch < 1000) { + ressize += 2 + 3 + 1; + } + else if (ch < 10000) { + ressize += 2 + 4 + 1; + } + else if (ch < 100000) { + ressize += 2 + 5 + 1; + } + else if (ch < 1000000) { + ressize += 2 + 6 + 1; + } + else { + assert(ch < 10000000); + ressize += 2 + 7 + 1; } - assert(_PyUnicode_CheckConsistency(res, 1)); - restuple = Py_BuildValue("(Nn)", res, end); - Py_DECREF(object); - return restuple; } - else { - wrong_exception_type(exc); + + /* allocate replacement */ + PyObject *res = PyUnicode_New(ressize, 127); + if (res == NULL) { + Py_DECREF(obj); return NULL; } + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + /* generate replacement */ + for (Py_ssize_t i = start; i < end; ++i) { + int digits, base; + Py_UCS4 ch = PyUnicode_READ_CHAR(obj, i); + if (ch < 10) { + digits = 1; + base = 1; + } + else if (ch < 100) { + digits = 2; + base = 10; + } + else if (ch < 1000) { + digits = 3; + base = 100; + } + else if (ch < 10000) { + digits = 4; + base = 1000; + } + else if (ch < 100000) { + digits = 5; + base = 10000; + } + else if (ch < 1000000) { + digits = 6; + base = 100000; + } + else { + assert(ch < 10000000); + digits = 7; + base = 1000000; + } + *outp++ = '&'; + *outp++ = '#'; + while (digits-- > 0) { + assert(base >= 1); + *outp++ = '0' + ch / base; + ch %= base; + base /= 10; + } + *outp++ = ';'; + } + assert(_PyUnicode_CheckConsistency(res, 1)); + PyObject *restuple = Py_BuildValue("(Nn)", res, end); + Py_DECREF(obj); + return restuple; } PyObject *PyCodec_BackslashReplaceErrors(PyObject *exc) { - PyObject *object; - Py_ssize_t i; - Py_ssize_t start; - Py_ssize_t end; - PyObject *res; - Py_UCS1 *outp; - int ressize; - Py_UCS4 c; - + PyObject *obj; + Py_ssize_t objlen, start, end, slen; if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeDecodeError)) { - const unsigned char *p; - if (PyUnicodeDecodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeDecodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeDecodeError_GetObject(exc))) + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, true) < 0) + { return NULL; - p = (const unsigned char*)PyBytes_AS_STRING(object); - res = PyUnicode_New(4 * (end - start), 127); + } + PyObject *res = PyUnicode_New(4 * slen, 127); if (res == NULL) { - Py_DECREF(object); + Py_DECREF(obj); return NULL; } - outp = PyUnicode_1BYTE_DATA(res); - for (i = start; i < end; i++, outp += 4) { - unsigned char c = p[i]; + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + const unsigned char *p = (const unsigned char *)PyBytes_AS_STRING(obj); + for (Py_ssize_t i = start; i < end; i++, outp += 4) { + const unsigned char ch = p[i]; outp[0] = '\\'; outp[1] = 'x'; - outp[2] = Py_hexdigits[(c>>4)&0xf]; - outp[3] = Py_hexdigits[c&0xf]; + outp[2] = Py_hexdigits[(ch >> 4) & 0xf]; + outp[3] = Py_hexdigits[ch & 0xf]; } - assert(_PyUnicode_CheckConsistency(res, 1)); - Py_DECREF(object); + Py_DECREF(obj); return Py_BuildValue("(Nn)", res, end); } - if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError)) { - if (PyUnicodeEncodeError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeEncodeError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeEncodeError_GetObject(exc))) - return NULL; - } - else if (PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError)) { - if (PyUnicodeTranslateError_GetStart(exc, &start)) - return NULL; - if (PyUnicodeTranslateError_GetEnd(exc, &end)) - return NULL; - if (!(object = PyUnicodeTranslateError_GetObject(exc))) + + if ( + PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeEncodeError) + || PyObject_TypeCheck(exc, (PyTypeObject *)PyExc_UnicodeTranslateError) + ) { + if (_PyUnicodeError_GetParams(exc, + &obj, &objlen, + &start, &end, &slen, false) < 0) + { return NULL; + } } else { wrong_exception_type(exc); return NULL; } - if (end - start > PY_SSIZE_T_MAX / (1+1+8)) - end = start + PY_SSIZE_T_MAX / (1+1+8); - for (i = start, ressize = 0; i < end; ++i) { + // The number of characters that each character 'ch' contributes + // in the result is 1 + 1 + k, where k >= min{t >= 1 | 16^t > ch} + // and will be formatted as "\\" + ('U'|'u'|'x') + HEXDIGITS, + // where the number of hexdigits is either 2, 4, or 8 (not 6). + // Since the Unicode range is below 10^7, we choose k = 8 whence + // each "block" requires at most 1 + 1 + 8 characters. + if (slen > PY_SSIZE_T_MAX / (1 + 1 + 8)) { + end = start + PY_SSIZE_T_MAX / (1 + 1 + 8); + end = Py_MIN(end, objlen); + slen = Py_MAX(0, end - start); + } + + Py_ssize_t ressize = 0; + for (Py_ssize_t i = start; i < end; ++i) { /* object is guaranteed to be "ready" */ - c = PyUnicode_READ_CHAR(object, i); + Py_UCS4 c = PyUnicode_READ_CHAR(obj, i); if (c >= 0x10000) { - ressize += 1+1+8; + ressize += 1 + 1 + 8; } else if (c >= 0x100) { - ressize += 1+1+4; + ressize += 1 + 1 + 4; + } + else { + ressize += 1 + 1 + 2; } - else - ressize += 1+1+2; } - res = PyUnicode_New(ressize, 127); + PyObject *res = PyUnicode_New(ressize, 127); if (res == NULL) { - Py_DECREF(object); + Py_DECREF(obj); return NULL; } - outp = PyUnicode_1BYTE_DATA(res); - for (i = start; i < end; ++i) { - c = PyUnicode_READ_CHAR(object, i); + Py_UCS1 *outp = PyUnicode_1BYTE_DATA(res); + for (Py_ssize_t i = start; i < end; ++i) { + Py_UCS4 c = PyUnicode_READ_CHAR(obj, i); *outp++ = '\\'; if (c >= 0x00010000) { *outp++ = 'U'; - *outp++ = Py_hexdigits[(c>>28)&0xf]; - *outp++ = Py_hexdigits[(c>>24)&0xf]; - *outp++ = Py_hexdigits[(c>>20)&0xf]; - *outp++ = Py_hexdigits[(c>>16)&0xf]; - *outp++ = Py_hexdigits[(c>>12)&0xf]; - *outp++ = Py_hexdigits[(c>>8)&0xf]; + *outp++ = Py_hexdigits[(c >> 28) & 0xf]; + *outp++ = Py_hexdigits[(c >> 24) & 0xf]; + *outp++ = Py_hexdigits[(c >> 20) & 0xf]; + *outp++ = Py_hexdigits[(c >> 16) & 0xf]; + *outp++ = Py_hexdigits[(c >> 12) & 0xf]; + *outp++ = Py_hexdigits[(c >> 8) & 0xf]; } else if (c >= 0x100) { *outp++ = 'u'; - *outp++ = Py_hexdigits[(c>>12)&0xf]; - *outp++ = Py_hexdigits[(c>>8)&0xf]; + *outp++ = Py_hexdigits[(c >> 12) & 0xf]; + *outp++ = Py_hexdigits[(c >> 8) & 0xf]; } - else + else { *outp++ = 'x'; - *outp++ = Py_hexdigits[(c>>4)&0xf]; - *outp++ = Py_hexdigits[c&0xf]; + } + *outp++ = Py_hexdigits[(c >> 4) & 0xf]; + *outp++ = Py_hexdigits[c & 0xf]; } - assert(_PyUnicode_CheckConsistency(res, 1)); - Py_DECREF(object); + Py_DECREF(obj); return Py_BuildValue("(Nn)", res, end); } @@ -1358,13 +1388,17 @@ PyCodec_SurrogateEscapeErrors(PyObject *exc) } -static PyObject *strict_errors(PyObject *self, PyObject *exc) +// --- Codecs registry handlers ----------------------------------------------- + +static inline PyObject * +strict_errors(PyObject *Py_UNUSED(self), PyObject *exc) { return PyCodec_StrictErrors(exc); } -static PyObject *ignore_errors(PyObject *self, PyObject *exc) +static inline PyObject * +ignore_errors(PyObject *Py_UNUSED(self), PyObject *exc) { return PyCodec_IgnoreErrors(exc); } diff --git a/Python/context.c b/Python/context.c index f30b59b9443bbf..bb1aa42b9c5e4f 100644 --- a/Python/context.c +++ b/Python/context.c @@ -860,7 +860,7 @@ contextvar_generate_hash(void *addr, PyObject *name) return -1; } - Py_hash_t res = _Py_HashPointer(addr) ^ name_hash; + Py_hash_t res = Py_HashPointer(addr) ^ name_hash; return res == -1 ? -2 : res; } diff --git a/Python/emscripten_trampoline.c b/Python/emscripten_trampoline.c index 2f9648a12ef2e4..0293c7ea0f37ad 100644 --- a/Python/emscripten_trampoline.c +++ b/Python/emscripten_trampoline.c @@ -26,11 +26,9 @@ EM_JS(CountArgsFunc, _PyEM_GetCountArgsPtr, (), { // Binary module for the checks. It has to be done in web assembly because // clang/llvm have no support yet for the reference types yet. In fact, the wasm // binary toolkit doesn't yet support the ref.test instruction either. To -// convert the following module to the binary, my approach is to find and -// replace "ref.test $type" -> "drop i32.const n" on the source text. This -// results in the bytes "0x1a, 0x41, n" where we need the bytes "0xfb, 0x14, n" -// so doing a find and replace on the output from "0x1a, 0x41" -> "0xfb, 0x14" -// gets us the output we need. +// convert the following textual wasm to a binary, you can build wabt from this +// branch: https://github.com/WebAssembly/wabt/pull/2529 and then use that +// wat2wasm binary. // // (module // (type $type0 (func (param) (result i32))) @@ -154,15 +152,15 @@ addOnPreRun(() => { let ptr = 0; try { const mod = new WebAssembly.Module(code); - const inst = new WebAssembly.Instance(mod, {e: {t: wasmTable}}); + const inst = new WebAssembly.Instance(mod, { e: { t: wasmTable } }); ptr = addFunction(inst.exports.f); - } catch(e) { + } catch (e) { // If something goes wrong, we'll null out _PyEM_CountFuncParams and fall // back to the JS trampoline. } Module._PyEM_CountArgsPtr = ptr; - const offset = HEAP32[__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET/4]; - HEAP32[__PyRuntime/4 + offset] = ptr; + const offset = HEAP32[__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET / 4]; + HEAP32[(__PyRuntime + offset) / 4] = ptr; }); ); diff --git a/Python/errors.c b/Python/errors.c index b6ac2f767a283b..9c7b771133dcf4 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -314,8 +314,8 @@ _PyErr_SetLocaleString(PyObject *exception, const char *string) PyObject* _Py_HOT_FUNCTION PyErr_Occurred(void) { - /* The caller must hold the GIL. */ - assert(PyGILState_Check()); + /* The caller must hold a thread state. */ + _Py_AssertHoldsTstate(); PyThreadState *tstate = _PyThreadState_GET(); return _PyErr_Occurred(tstate); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index cda01bb768c269..b253850e78ddbb 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -899,6 +899,51 @@ break; } + case _GUARD_BINARY_OP_EXTEND: { + _PyStackRef right; + _PyStackRef left; + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + _PyFrame_SetStackPointer(frame, stack_pointer); + int res = d->guard(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (!res) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } + break; + } + + case _BINARY_OP_EXTEND: { + _PyStackRef right; + _PyStackRef left; + _PyStackRef res; + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = (PyObject *)CURRENT_OPERAND0(); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + STAT_INC(BINARY_OP, hit); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *res_o = d->action(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + PyStackRef_CLOSE(left); + PyStackRef_CLOSE(right); + res = PyStackRef_FromPyObjectSteal(res_o); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + case _BINARY_SUBSCR: { _PyStackRef sub; _PyStackRef container; @@ -1396,9 +1441,7 @@ _PyStackRef retval; _PyStackRef res; retval = stack_pointer[-1]; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -1534,9 +1577,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -2210,8 +2251,10 @@ _PyStackRef tup; oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); - if (tup_o == NULL) JUMP_TO_ERROR(); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); + if (tup_o == NULL) { + JUMP_TO_ERROR(); + } tup = PyStackRef_FromPyObjectSteal(tup_o); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; @@ -2224,8 +2267,10 @@ _PyStackRef list; oparg = CURRENT_OPARG(); values = &stack_pointer[-oparg]; - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); - if (list_o == NULL) JUMP_TO_ERROR(); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); + if (list_o == NULL) { + JUMP_TO_ERROR(); + } list = PyStackRef_FromPyObjectSteal(list_o); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; @@ -3407,7 +3452,7 @@ PyObject *match_o = NULL; PyObject *rest_o = NULL; _PyFrame_SetStackPointer(frame, stack_pointer); - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); @@ -5963,7 +6008,7 @@ stack_pointer = _PyFrame_GetStackPointer(frame); #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); @@ -6164,16 +6209,6 @@ break; } - case _INTERNAL_INCREMENT_OPT_COUNTER: { - _PyStackRef opt; - opt = stack_pointer[-1]; - _PyCounterOptimizerObject *exe = (_PyCounterOptimizerObject *)PyStackRef_AsPyObjectBorrow(opt); - exe->count++; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - break; - } - case _DYNAMIC_EXIT: { PyObject *exit_p = (PyObject *)CURRENT_OPERAND0(); tstate->previous_executor = (PyObject *)current_executor; @@ -6181,7 +6216,7 @@ _Py_CODEUNIT *target = frame->instr_ptr; #if defined(Py_DEBUG) && !defined(_Py_JIT) OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (lltrace >= 2) { + if (frame->lltrace >= 2) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); diff --git a/Python/fileutils.c b/Python/fileutils.c index 6bc3a44c3c1313..68d24bc6b93465 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1,6 +1,7 @@ #include "Python.h" #include "pycore_fileutils.h" // fileutils definitions #include "pycore_runtime.h" // _PyRuntime +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include "osdefs.h" // SEP #include // mbstowcs() @@ -1311,7 +1312,7 @@ _Py_fstat(int fd, struct _Py_stat_struct *status) { int res; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); Py_BEGIN_ALLOW_THREADS res = _Py_fstat_noraise(fd, status); @@ -1691,7 +1692,7 @@ int _Py_open(const char *pathname, int flags) { /* _Py_open() must be called with the GIL held. */ - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); return _Py_open_impl(pathname, flags, 1); } @@ -1766,7 +1767,7 @@ _Py_wfopen(const wchar_t *path, const wchar_t *mode) FILE* Py_fopen(PyObject *path, const char *mode) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); if (PySys_Audit("open", "Osi", path, mode, 0) < 0) { return NULL; @@ -1841,14 +1842,6 @@ Py_fopen(PyObject *path, const char *mode) } -// Deprecated alias to Py_fopen() kept for backward compatibility -FILE* -_Py_fopen_obj(PyObject *path, const char *mode) -{ - return Py_fopen(path, mode); -} - - // Call fclose(). // // On Windows, files opened by Py_fopen() in the Python DLL must be closed by @@ -1881,7 +1874,7 @@ _Py_read(int fd, void *buf, size_t count) int err; int async_err = 0; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* _Py_read() must not be called with an exception set, otherwise the * caller may think that read() was interrupted by a signal and the signal @@ -2047,7 +2040,7 @@ _Py_write_impl(int fd, const void *buf, size_t count, int gil_held) Py_ssize_t _Py_write(int fd, const void *buf, size_t count) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* _Py_write() must not be called with an exception set, otherwise the * caller may think that write() was interrupted by a signal and the signal @@ -2675,7 +2668,7 @@ _Py_dup(int fd) HANDLE handle; #endif - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); #ifdef MS_WINDOWS handle = _Py_get_osfhandle(fd); diff --git a/Python/frame.c b/Python/frame.c index 6eb32bcce0b799..68ac2acbaee342 100644 --- a/Python/frame.c +++ b/Python/frame.c @@ -48,7 +48,7 @@ _PyFrame_MakeAndSetFrameObject(_PyInterpreterFrame *frame) static void take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) { - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); assert(frame->owner != FRAME_OWNED_BY_FRAME_OBJECT); Py_ssize_t size = ((char*)frame->stackpointer) - (char *)frame; memcpy((_PyInterpreterFrame *)f->_f_frame_data, frame, size); @@ -69,7 +69,7 @@ take_ownership(PyFrameObject *f, _PyInterpreterFrame *frame) _PyInterpreterFrame *prev = _PyFrame_GetFirstComplete(frame->previous); frame->previous = NULL; if (prev) { - assert(prev->owner != FRAME_OWNED_BY_CSTACK); + assert(prev->owner < FRAME_OWNED_BY_INTERPRETER); /* Link PyFrameObjects.f_back and remove link through _PyInterpreterFrame.previous */ PyFrameObject *back = _PyFrame_GetFrameObject(prev); if (back == NULL) { diff --git a/Python/gc.c b/Python/gc.c index 5b9588c8741b97..3fe0b7f814544d 100644 --- a/Python/gc.c +++ b/Python/gc.c @@ -1476,7 +1476,7 @@ mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, b while (ts) { _PyInterpreterFrame *frame = ts->current_frame; while (frame) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { frame = frame->previous; continue; } diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c index f7f44407494e51..d1023d9351086f 100644 --- a/Python/gc_free_threading.c +++ b/Python/gc_free_threading.c @@ -17,6 +17,17 @@ #include "pydtrace.h" #include "pycore_uniqueid.h" // _PyObject_MergeThreadLocalRefcounts() + +// enable the "mark alive" pass of GC +#define GC_ENABLE_MARK_ALIVE 1 + +// include additional roots in "mark alive" pass +#define GC_MARK_ALIVE_EXTRA_ROOTS 1 + +// include Python stacks as set of known roots +#define GC_MARK_ALIVE_STACKS 1 + + #ifdef Py_GIL_DISABLED typedef struct _gc_runtime_state GCState; @@ -113,28 +124,66 @@ worklist_remove(struct worklist_iter *iter) iter->next = iter->ptr; } +static inline int +gc_has_bit(PyObject *op, uint8_t bit) +{ + return (op->ob_gc_bits & bit) != 0; +} + +static inline void +gc_set_bit(PyObject *op, uint8_t bit) +{ + op->ob_gc_bits |= bit; +} + +static inline void +gc_clear_bit(PyObject *op, uint8_t bit) +{ + op->ob_gc_bits &= ~bit; +} + static inline int gc_is_frozen(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_FROZEN) != 0; + return gc_has_bit(op, _PyGC_BITS_FROZEN); } static inline int gc_is_unreachable(PyObject *op) { - return (op->ob_gc_bits & _PyGC_BITS_UNREACHABLE) != 0; + return gc_has_bit(op, _PyGC_BITS_UNREACHABLE); } -static void +static inline void gc_set_unreachable(PyObject *op) { - op->ob_gc_bits |= _PyGC_BITS_UNREACHABLE; + gc_set_bit(op, _PyGC_BITS_UNREACHABLE); } -static void +static inline void gc_clear_unreachable(PyObject *op) { - op->ob_gc_bits &= ~_PyGC_BITS_UNREACHABLE; + gc_clear_bit(op, _PyGC_BITS_UNREACHABLE); +} + +static inline int +gc_is_alive(PyObject *op) +{ + return gc_has_bit(op, _PyGC_BITS_ALIVE); +} + +#ifdef GC_ENABLE_MARK_ALIVE +static inline void +gc_set_alive(PyObject *op) +{ + gc_set_bit(op, _PyGC_BITS_ALIVE); +} +#endif + +static inline void +gc_clear_alive(PyObject *op) +{ + gc_clear_bit(op, _PyGC_BITS_ALIVE); } // Initialize the `ob_tid` field to zero if the object is not already @@ -143,6 +192,7 @@ static void gc_maybe_init_refs(PyObject *op) { if (!gc_is_unreachable(op)) { + assert(!gc_is_alive(op)); gc_set_unreachable(op); op->ob_tid = 0; } @@ -264,9 +314,13 @@ static void gc_restore_refs(PyObject *op) { if (gc_is_unreachable(op)) { + assert(!gc_is_alive(op)); gc_restore_tid(op); gc_clear_unreachable(op); } + else { + gc_clear_alive(op); + } } // Given a mimalloc memory block return the PyObject stored in it or NULL if @@ -392,6 +446,119 @@ gc_visit_thread_stacks(PyInterpreterState *interp) _Py_FOR_EACH_TSTATE_END(interp); } +// Untrack objects that can never create reference cycles. +// Return true if the object was untracked. +static bool +gc_maybe_untrack(PyObject *op) +{ + // Currently we only check for tuples containing only non-GC objects. In + // theory we could check other immutable objects that contain references + // to non-GC objects. + if (PyTuple_CheckExact(op)) { + _PyTuple_MaybeUntrack(op); + if (!_PyObject_GC_IS_TRACKED(op)) { + return true; + } + } + return false; +} + +#ifdef GC_ENABLE_MARK_ALIVE +static int +mark_alive_stack_push(PyObject *op, _PyObjectStack *stack) +{ + if (op == NULL) { + return 0; + } + if (!_PyObject_GC_IS_TRACKED(op)) { + return 0; + } + if (gc_is_alive(op)) { + return 0; // already visited this object + } + if (gc_maybe_untrack(op)) { + return 0; // was untracked, don't visit it + } + + // Need to call tp_traverse on this object. Add to stack and mark it + // alive so we don't traverse it a second time. + gc_set_alive(op); + if (_PyObjectStack_Push(stack, op) < 0) { + return -1; + } + return 0; +} + +static bool +gc_clear_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + if (gc_is_alive(op)) { + gc_clear_alive(op); + } + return true; +} + +static void +gc_abort_mark_alive(PyInterpreterState *interp, + struct collection_state *state, + _PyObjectStack *stack) +{ + // We failed to allocate memory for "stack" while doing the "mark + // alive" phase. In that case, free the object stack and make sure + // that no objects have the alive bit set. + _PyObjectStack_Clear(stack); + gc_visit_heaps(interp, &gc_clear_alive_bits, &state->base); +} + +#ifdef GC_MARK_ALIVE_STACKS +static int +gc_visit_stackref_mark_alive(_PyObjectStack *stack, _PyStackRef stackref) +{ + // Note: we MUST check that it is deferred before checking the rest. + // Otherwise we might read into invalid memory due to non-deferred references + // being dead already. + if (PyStackRef_IsDeferred(stackref) && !PyStackRef_IsNull(stackref)) { + PyObject *op = PyStackRef_AsPyObjectBorrow(stackref); + if (mark_alive_stack_push(op, stack) < 0) { + return -1; + } + } + return 0; +} + +static int +gc_visit_thread_stacks_mark_alive(PyInterpreterState *interp, _PyObjectStack *stack) +{ + _Py_FOR_EACH_TSTATE_BEGIN(interp, p) { + for (_PyInterpreterFrame *f = p->current_frame; f != NULL; f = f->previous) { + PyObject *executable = PyStackRef_AsPyObjectBorrow(f->f_executable); + if (executable == NULL || !PyCode_Check(executable)) { + continue; + } + + PyCodeObject *co = (PyCodeObject *)executable; + int max_stack = co->co_nlocalsplus + co->co_stacksize; + if (gc_visit_stackref_mark_alive(stack, f->f_executable) < 0) { + return -1; + } + for (int i = 0; i < max_stack; i++) { + if (gc_visit_stackref_mark_alive(stack, f->localsplus[i]) < 0) { + return -1; + } + } + } + } + _Py_FOR_EACH_TSTATE_END(interp); + return 0; +} +#endif // GC_MARK_ALIVE_STACKS +#endif // GC_ENABLE_MARK_ALIVE + static void queue_untracked_obj_decref(PyObject *op, struct collection_state *state) { @@ -460,7 +627,8 @@ visit_decref(PyObject *op, void *arg) { if (_PyObject_GC_IS_TRACKED(op) && !_Py_IsImmortal(op) - && !gc_is_frozen(op)) + && !gc_is_frozen(op) + && !gc_is_alive(op)) { // If update_refs hasn't reached this object yet, mark it // as (tentatively) unreachable and initialize ob_tid to zero. @@ -482,6 +650,10 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } + if (gc_is_alive(op)) { + return true; + } + // Exclude immortal objects from garbage collection if (_Py_IsImmortal(op)) { op->ob_tid = 0; @@ -497,14 +669,9 @@ update_refs(const mi_heap_t *heap, const mi_heap_area_t *area, _PyObject_ASSERT(op, refcount >= 0); if (refcount > 0 && !_PyObject_HasDeferredRefcount(op)) { - // Untrack tuples and dicts as necessary in this pass, but not objects - // with zero refcount, which we will want to collect. - if (PyTuple_CheckExact(op)) { - _PyTuple_MaybeUntrack(op); - if (!_PyObject_GC_IS_TRACKED(op)) { - gc_restore_refs(op); - return true; - } + if (gc_maybe_untrack(op)) { + gc_restore_refs(op); + return true; } } @@ -553,6 +720,21 @@ mark_reachable(PyObject *op) } #ifdef GC_DEBUG +static bool +validate_alive_bits(const mi_heap_t *heap, const mi_heap_area_t *area, + void *block, size_t block_size, void *args) +{ + PyObject *op = op_from_block(block, args, false); + if (op == NULL) { + return true; + } + + _PyObject_ASSERT_WITH_MSG(op, !gc_is_alive(op), + "object should not be marked as alive yet"); + + return true; +} + static bool validate_refcounts(const mi_heap_t *heap, const mi_heap_area_t *area, void *block, size_t block_size, void *args) @@ -586,6 +768,11 @@ validate_gc_objects(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } + if (gc_is_alive(op)) { + _PyObject_ASSERT(op, !gc_is_unreachable(op)); + return true; + } + _PyObject_ASSERT(op, gc_is_unreachable(op)); _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, "refcount is too small"); @@ -602,6 +789,10 @@ mark_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, return true; } + if (gc_is_alive(op)) { + return true; + } + _PyObject_ASSERT_WITH_MSG(op, gc_get_refs(op) >= 0, "refcount is too small"); @@ -630,6 +821,7 @@ restore_refs(const mi_heap_t *heap, const mi_heap_area_t *area, } gc_restore_tid(op); gc_clear_unreachable(op); + gc_clear_alive(op); return true; } @@ -679,6 +871,7 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, // object is reachable, restore `ob_tid`; we're done with these objects gc_restore_tid(op); + gc_clear_alive(op); state->long_lived_total++; return true; } @@ -686,6 +879,89 @@ scan_heap_visitor(const mi_heap_t *heap, const mi_heap_area_t *area, static int move_legacy_finalizer_reachable(struct collection_state *state); +#ifdef GC_ENABLE_MARK_ALIVE +static int +propagate_alive_bits(_PyObjectStack *stack) +{ + for (;;) { + PyObject *op = _PyObjectStack_Pop(stack); + if (op == NULL) { + break; + } + assert(_PyObject_GC_IS_TRACKED(op)); + assert(gc_is_alive(op)); + traverseproc traverse = Py_TYPE(op)->tp_traverse; + if (traverse(op, (visitproc)&mark_alive_stack_push, stack) < 0) { + return -1; + } + } + return 0; +} + +// Using tp_traverse, mark everything reachable from known root objects +// (which must be non-garbage) as alive (_PyGC_BITS_ALIVE is set). In +// most programs, this marks nearly all objects that are not actually +// unreachable. +// +// Actually alive objects can be missed in this pass if they are alive +// due to being referenced from an unknown root (e.g. an extension +// module global), some tp_traverse methods are either missing or not +// accurate, or objects that have been untracked. Objects that are only +// reachable from the aforementioned are also missed. +// +// If gc.freeze() is used, this pass is disabled since it is unlikely to +// help much. The next stages of cyclic GC will ignore objects with the +// alive bit set. +// +// Returns -1 on failure (out of memory). +static int +mark_alive_from_roots(PyInterpreterState *interp, + struct collection_state *state) +{ +#ifdef GC_DEBUG + // Check that all objects don't have alive bit set + gc_visit_heaps(interp, &validate_alive_bits, &state->base); +#endif + _PyObjectStack stack = { NULL }; + + #define STACK_PUSH(op) \ + if (mark_alive_stack_push(op, &stack) < 0) { \ + gc_abort_mark_alive(interp, state, &stack); \ + return -1; \ + } + STACK_PUSH(interp->sysdict); +#ifdef GC_MARK_ALIVE_EXTRA_ROOTS + STACK_PUSH(interp->builtins); + STACK_PUSH(interp->dict); + struct types_state *types = &interp->types; + for (int i = 0; i < _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES; i++) { + STACK_PUSH(types->builtins.initialized[i].tp_dict); + STACK_PUSH(types->builtins.initialized[i].tp_subclasses); + } + for (int i = 0; i < _Py_MAX_MANAGED_STATIC_EXT_TYPES; i++) { + STACK_PUSH(types->for_extensions.initialized[i].tp_dict); + STACK_PUSH(types->for_extensions.initialized[i].tp_subclasses); + } +#endif +#ifdef GC_MARK_ALIVE_STACKS + if (gc_visit_thread_stacks_mark_alive(interp, &stack) < 0) { + gc_abort_mark_alive(interp, state, &stack); + return -1; + } +#endif + #undef STACK_PUSH + + // Use tp_traverse to find everything reachable from roots. + if (propagate_alive_bits(&stack) < 0) { + gc_abort_mark_alive(interp, state, &stack); + return -1; + } + + return 0; +} +#endif // GC_ENABLE_MARK_ALIVE + + static int deduce_unreachable_heap(PyInterpreterState *interp, struct collection_state *state) @@ -1245,6 +1521,25 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, process_delayed_frees(interp, state); + #ifdef GC_ENABLE_MARK_ALIVE + // If gc.freeze() was used, it seems likely that doing this "mark alive" + // pass will not be a performance win. Typically the majority of alive + // objects will be marked as frozen and will be skipped anyhow, without + // doing this extra work. Doing this pass also defeats one of the + // purposes of using freeze: avoiding writes to objects that are frozen. + // So, we just skip this if gc.freeze() was used. + if (!state->gcstate->freeze_active) { + // Mark objects reachable from known roots as "alive". These will + // be ignored for rest of the GC pass. + int err = mark_alive_from_roots(interp, state); + if (err < 0) { + _PyEval_StartTheWorld(interp); + PyErr_NoMemory(); + return; + } + } + #endif + // Find unreachable objects int err = deduce_unreachable_heap(interp, state); if (err < 0) { @@ -1253,6 +1548,11 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state, return; } +#ifdef GC_DEBUG + // At this point, no object should have the alive bit set + gc_visit_heaps(interp, &validate_alive_bits, &state->base); +#endif + // Print debugging information. if (interp->gc.debug & _PyGC_DEBUG_COLLECTABLE) { PyObject *op; @@ -1564,6 +1864,8 @@ _PyGC_Freeze(PyInterpreterState *interp) { struct visitor_args args; _PyEval_StopTheWorld(interp); + GCState *gcstate = get_gc_state(); + gcstate->freeze_active = true; gc_visit_heaps(interp, &visit_freeze, &args); _PyEval_StartTheWorld(interp); } @@ -1574,7 +1876,7 @@ visit_unfreeze(const mi_heap_t *heap, const mi_heap_area_t *area, { PyObject *op = op_from_block(block, args, true); if (op != NULL) { - op->ob_gc_bits &= ~_PyGC_BITS_FROZEN; + gc_clear_bit(op, _PyGC_BITS_FROZEN); } return true; } @@ -1584,6 +1886,8 @@ _PyGC_Unfreeze(PyInterpreterState *interp) { struct visitor_args args; _PyEval_StopTheWorld(interp); + GCState *gcstate = get_gc_state(); + gcstate->freeze_active = false; gc_visit_heaps(interp, &visit_unfreeze, &args); _PyEval_StartTheWorld(interp); } diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 81408380d6b2b8..cbb07a89d7db83 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -8,13 +8,20 @@ #endif #define TIER_ONE 1 +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + { + /* BEGIN INSTRUCTIONS */ + TARGET(BINARY_OP) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP); - PREDICTED(BINARY_OP); - _Py_CODEUNIT* const this_instr = next_instr - 2; + PREDICTED_BINARY_OP:; + _Py_CODEUNIT* const this_instr = next_instr - 6; (void)this_instr; _PyStackRef lhs; _PyStackRef rhs; @@ -39,6 +46,7 @@ assert(NB_ADD <= oparg); assert(oparg <= NB_INPLACE_XOR); } + /* Skip 4 cache entries */ // _BINARY_OP { PyObject *lhs_o = PyStackRef_AsPyObjectBorrow(lhs); @@ -60,9 +68,9 @@ TARGET(BINARY_OP_ADD_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -75,7 +83,7 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -98,9 +106,9 @@ TARGET(BINARY_OP_ADD_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -113,7 +121,7 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -135,9 +143,9 @@ TARGET(BINARY_OP_ADD_UNICODE) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_ADD_UNICODE); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -150,7 +158,7 @@ DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_ADD_UNICODE { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -170,11 +178,57 @@ DISPATCH(); } + TARGET(BINARY_OP_EXTEND) { + _Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr; + next_instr += 6; + INSTRUCTION_STATS(BINARY_OP_EXTEND); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); + _PyStackRef left; + _PyStackRef right; + _PyStackRef res; + /* Skip 1 cache entry */ + // _GUARD_BINARY_OP_EXTEND + { + right = stack_pointer[-1]; + left = stack_pointer[-2]; + PyObject *descr = read_obj(&this_instr[2].cache); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + assert(d && d->guard); + _PyFrame_SetStackPointer(frame, stack_pointer); + int res = d->guard(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + DEOPT_IF(!res, BINARY_OP); + } + /* Skip -4 cache entry */ + // _BINARY_OP_EXTEND + { + PyObject *descr = read_obj(&this_instr[2].cache); + PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); + PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); + assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5); + _PyBinaryOpSpecializationDescr *d = (_PyBinaryOpSpecializationDescr*)descr; + STAT_INC(BINARY_OP, hit); + _PyFrame_SetStackPointer(frame, stack_pointer); + PyObject *res_o = d->action(left_o, right_o); + stack_pointer = _PyFrame_GetStackPointer(frame); + PyStackRef_CLOSE(left); + PyStackRef_CLOSE(right); + res = PyStackRef_FromPyObjectSteal(res_o); + } + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + DISPATCH(); + } + TARGET(BINARY_OP_INPLACE_ADD_UNICODE) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_INPLACE_ADD_UNICODE); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; // _GUARD_BOTH_UNICODE @@ -186,7 +240,7 @@ DEOPT_IF(!PyUnicode_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyUnicode_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_INPLACE_ADD_UNICODE { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -235,9 +289,9 @@ TARGET(BINARY_OP_MULTIPLY_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_MULTIPLY_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -250,7 +304,7 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_MULTIPLY_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -273,9 +327,9 @@ TARGET(BINARY_OP_MULTIPLY_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_MULTIPLY_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -288,7 +342,7 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_MULTIPLY_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -310,9 +364,9 @@ TARGET(BINARY_OP_SUBTRACT_FLOAT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_SUBTRACT_FLOAT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -325,7 +379,7 @@ DEOPT_IF(!PyFloat_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyFloat_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_SUBTRACT_FLOAT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -348,9 +402,9 @@ TARGET(BINARY_OP_SUBTRACT_INT) { frame->instr_ptr = next_instr; - next_instr += 2; + next_instr += 6; INSTRUCTION_STATS(BINARY_OP_SUBTRACT_INT); - static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 1, "incorrect cache size"); + static_assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5, "incorrect cache size"); _PyStackRef left; _PyStackRef right; _PyStackRef res; @@ -363,7 +417,7 @@ DEOPT_IF(!PyLong_CheckExact(left_o), BINARY_OP); DEOPT_IF(!PyLong_CheckExact(right_o), BINARY_OP); } - /* Skip 1 cache entry */ + /* Skip 5 cache entries */ // _BINARY_OP_SUBTRACT_INT { PyObject *left_o = PyStackRef_AsPyObjectBorrow(left); @@ -437,7 +491,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(BINARY_SUBSCR); - PREDICTED(BINARY_SUBSCR); + PREDICTED_BINARY_SUBSCR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef container; @@ -685,10 +739,8 @@ _PyStackRef *values; _PyStackRef list; values = &stack_pointer[-oparg]; - PyObject *list_o = _PyList_FromStackRefSteal(values, oparg); + PyObject *list_o = _PyList_FromStackRefStealOnSuccess(values, oparg); if (list_o == NULL) { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } list = PyStackRef_FromPyObjectSteal(list_o); @@ -856,10 +908,8 @@ _PyStackRef *values; _PyStackRef tup; values = &stack_pointer[-oparg]; - PyObject *tup_o = _PyTuple_FromStackRefSteal(values, oparg); + PyObject *tup_o = _PyTuple_FromStackRefStealOnSuccess(values, oparg); if (tup_o == NULL) { - stack_pointer += -oparg; - assert(WITHIN_STACK_BOUNDS()); goto error; } tup = PyStackRef_FromPyObjectSteal(tup_o); @@ -882,7 +932,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL); - PREDICTED(CALL); + PREDICTED_CALL:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef *callable; @@ -1658,7 +1708,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(CALL_FUNCTION_EX); - PREDICTED(CALL_FUNCTION_EX); + PREDICTED_CALL_FUNCTION_EX:; _Py_CODEUNIT* const this_instr = next_instr - 1; (void)this_instr; _PyStackRef func; @@ -1915,7 +1965,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(CALL_KW); - PREDICTED(CALL_KW); + PREDICTED_CALL_KW:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef *callable; @@ -3158,7 +3208,7 @@ PyObject *match_o = NULL; PyObject *rest_o = NULL; _PyFrame_SetStackPointer(frame, stack_pointer); - int res = _PyEval_ExceptionGroupMatch(exc_value, match_type, + int res = _PyEval_ExceptionGroupMatch(frame, exc_value, match_type, &match_o, &rest_o); stack_pointer = _PyFrame_GetStackPointer(frame); PyStackRef_CLOSE(exc_value_st); @@ -3254,7 +3304,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(COMPARE_OP); - PREDICTED(COMPARE_OP); + PREDICTED_COMPARE_OP:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef left; @@ -3434,7 +3484,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(CONTAINS_OP); - PREDICTED(CONTAINS_OP); + PREDICTED_CONTAINS_OP:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef left; @@ -3955,7 +4005,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(FOR_ITER); - PREDICTED(FOR_ITER); + PREDICTED_FOR_ITER:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef iter; @@ -4586,7 +4636,8 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(INSTRUMENTED_CALL_FUNCTION_EX); - GO_TO_INSTRUCTION(CALL_FUNCTION_EX); + + goto PREDICTED_CALL_FUNCTION_EX; } TARGET(INSTRUMENTED_CALL_KW) { @@ -4610,7 +4661,7 @@ stack_pointer = _PyFrame_GetStackPointer(frame); if (err) goto error; PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(CALL_KW); + goto PREDICTED_CALL_KW; } TARGET(INSTRUMENTED_END_FOR) { @@ -4764,8 +4815,9 @@ if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); _PyFrame_SetStackPointer(frame, stack_pointer); - original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyFrame_GetBytecode(frame))].original_opcode; + int index = (int)(this_instr - _PyFrame_GetBytecode(frame)); stack_pointer = _PyFrame_GetStackPointer(frame); + original_opcode = code->_co_monitoring->lines->data[index*code->_co_monitoring->lines->bytes_per_entry]; next_instr = this_instr; } else { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -4800,7 +4852,7 @@ // cancel out the decrement that will happen in LOAD_SUPER_ATTR; we // don't want to specialize instrumented instructions PAUSE_ADAPTIVE_COUNTER(this_instr[1].counter); - GO_TO_INSTRUCTION(LOAD_SUPER_ATTR); + goto PREDICTED_LOAD_SUPER_ATTR; } TARGET(INSTRUMENTED_NOT_TAKEN) { @@ -4988,9 +5040,7 @@ // _RETURN_VALUE { retval = val; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -5042,9 +5092,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -5087,7 +5135,7 @@ INSTRUCTION_STATS(INTERPRETER_EXIT); _PyStackRef retval; retval = stack_pointer[-1]; - assert(frame == &entry_frame); + assert(frame->owner == FRAME_OWNED_BY_INTERPRETER); assert(_PyFrame_IsIncomplete(frame)); /* Restore previous frame and return. */ tstate->current_frame = frame->previous; @@ -5255,7 +5303,7 @@ frame->instr_ptr = next_instr; next_instr += 10; INSTRUCTION_STATS(LOAD_ATTR); - PREDICTED(LOAD_ATTR); + PREDICTED_LOAD_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 10; (void)this_instr; _PyStackRef owner; @@ -5971,7 +6019,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(LOAD_CONST); - PREDICTED(LOAD_CONST); + PREDICTED_LOAD_CONST:; _Py_CODEUNIT* const this_instr = next_instr - 1; (void)this_instr; _PyStackRef value; @@ -6214,7 +6262,7 @@ frame->instr_ptr = next_instr; next_instr += 5; INSTRUCTION_STATS(LOAD_GLOBAL); - PREDICTED(LOAD_GLOBAL); + PREDICTED_LOAD_GLOBAL:; _Py_CODEUNIT* const this_instr = next_instr - 5; (void)this_instr; _PyStackRef *res; @@ -6441,7 +6489,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(LOAD_SUPER_ATTR); - PREDICTED(LOAD_SUPER_ATTR); + PREDICTED_LOAD_SUPER_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef global_super_st; @@ -7044,7 +7092,7 @@ frame->instr_ptr = next_instr; next_instr += 1; INSTRUCTION_STATS(RESUME); - PREDICTED(RESUME); + PREDICTED_RESUME:; _Py_CODEUNIT* const this_instr = next_instr - 1; (void)this_instr; // _LOAD_BYTECODE @@ -7170,9 +7218,7 @@ _PyStackRef retval; _PyStackRef res; retval = stack_pointer[-1]; - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); _PyStackRef temp = retval; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -7197,7 +7243,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(SEND); - PREDICTED(SEND); + PREDICTED_SEND:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef receiver; @@ -7225,7 +7271,7 @@ v = stack_pointer[-1]; PyObject *receiver_o = PyStackRef_AsPyObjectBorrow(receiver); PyObject *retval_o; - assert(frame != &entry_frame); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); if ((tstate->interp->eval_frame == NULL) && (Py_TYPE(receiver_o) == &PyGen_Type || Py_TYPE(receiver_o) == &PyCoro_Type) && ((PyGenObject *)receiver_o)->gi_frame_state < FRAME_EXECUTING) @@ -7438,7 +7484,7 @@ frame->instr_ptr = next_instr; next_instr += 5; INSTRUCTION_STATS(STORE_ATTR); - PREDICTED(STORE_ATTR); + PREDICTED_STORE_ATTR:; _Py_CODEUNIT* const this_instr = next_instr - 5; (void)this_instr; _PyStackRef owner; @@ -7802,7 +7848,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(STORE_SUBSCR); - PREDICTED(STORE_SUBSCR); + PREDICTED_STORE_SUBSCR:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef container; @@ -7932,7 +7978,7 @@ frame->instr_ptr = next_instr; next_instr += 4; INSTRUCTION_STATS(TO_BOOL); - PREDICTED(TO_BOOL); + PREDICTED_TO_BOOL:; _Py_CODEUNIT* const this_instr = next_instr - 4; (void)this_instr; _PyStackRef value; @@ -8168,7 +8214,7 @@ frame->instr_ptr = next_instr; next_instr += 2; INSTRUCTION_STATS(UNPACK_SEQUENCE); - PREDICTED(UNPACK_SEQUENCE); + PREDICTED_UNPACK_SEQUENCE:; _Py_CODEUNIT* const this_instr = next_instr - 2; (void)this_instr; _PyStackRef seq; @@ -8347,9 +8393,7 @@ // NOTE: It's important that YIELD_VALUE never raises an exception! // The compiler treats any exception raised here as a failed close() // or throw() call. - #if TIER_ONE - assert(frame != &entry_frame); - #endif + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); frame->instr_ptr++; PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame); assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1); @@ -8384,4 +8428,153 @@ assert(WITHIN_STACK_BOUNDS()); DISPATCH(); } + + /* END INSTRUCTIONS */ +#if USE_COMPUTED_GOTOS + _unknown_opcode: +#else + EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode +#endif + /* Tell C compilers not to hold the opcode variable in the loop. + next_instr points the current instruction without TARGET(). */ + opcode = next_instr->op.code; + _PyErr_Format(tstate, PyExc_SystemError, + "%U:%d: unknown opcode %d", + _PyFrame_GetCode(frame)->co_filename, + PyUnstable_InterpreterFrame_GetLine(frame), + opcode); + goto error; + + } + + /* This should never be reached. Every opcode should end with DISPATCH() + or goto error. */ + Py_UNREACHABLE(); + /* BEGIN LABELS */ + + pop_4_error: + { + STACK_SHRINK(1); + goto pop_3_error; + } + + pop_3_error: + { + STACK_SHRINK(1); + goto pop_2_error; + } + + pop_2_error: + { + STACK_SHRINK(1); + goto pop_1_error; + } + + pop_1_error: + { + STACK_SHRINK(1); + goto error; + } + + error: + { + /* Double-check exception status. */ + #ifdef NDEBUG + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_SystemError, + "error return without exception set"); + } + #else + assert(_PyErr_Occurred(tstate)); + #endif + + /* Log traceback info. */ + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + if (!_PyFrame_IsIncomplete(frame)) { + PyFrameObject *f = _PyFrame_GetFrameObject(frame); + if (f != NULL) { + PyTraceBack_Here(f); + } + } + _PyEval_MonitorRaise(tstate, frame, next_instr-1); + goto exception_unwind; + } + + exception_unwind: + { + /* We can't use frame->instr_ptr here, as RERAISE may have set it */ + int offset = INSTR_OFFSET()-1; + int level, handler, lasti; + if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) { + // No handlers, so exit. + assert(_PyErr_Occurred(tstate)); + /* Pop remaining stack entries. */ + _PyStackRef *stackbase = _PyFrame_Stackbase(frame); + while (stack_pointer > stackbase) { + PyStackRef_XCLOSE(POP()); + } + assert(STACK_LEVEL() == 0); + _PyFrame_SetStackPointer(frame, stack_pointer); + monitor_unwind(tstate, frame, next_instr-1); + goto exit_unwind; + } + assert(STACK_LEVEL() >= level); + _PyStackRef *new_top = _PyFrame_Stackbase(frame) + level; + while (stack_pointer > new_top) { + PyStackRef_XCLOSE(POP()); + } + if (lasti) { + int frame_lasti = _PyInterpreterFrame_LASTI(frame); + PyObject *lasti = PyLong_FromLong(frame_lasti); + if (lasti == NULL) { + goto exception_unwind; + } + PUSH(PyStackRef_FromPyObjectSteal(lasti)); + } + /* Make the raw exception data + available to the handler, + so a program can emulate the + Python main loop. */ + PyObject *exc = _PyErr_GetRaisedException(tstate); + PUSH(PyStackRef_FromPyObjectSteal(exc)); + next_instr = _PyFrame_GetBytecode(frame) + handler; + if (monitor_handled(tstate, frame, next_instr, exc) < 0) { + goto exception_unwind; + } + /* Resume normal execution */ + #ifdef LLTRACE + if (frame->lltrace >= 5) { + lltrace_resume_frame(frame); + } + #endif + DISPATCH(); + } + + exit_unwind: + { + assert(_PyErr_Occurred(tstate)); + _Py_LeaveRecursiveCallPy(tstate); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); + // GH-99729: We need to unlink the frame *before* clearing it: + _PyInterpreterFrame *dying = frame; + frame = tstate->current_frame = dying->previous; + _PyEval_FrameClearAndPop(tstate, dying); + frame->return_offset = 0; + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { + /* Restore previous frame and exit */ + tstate->current_frame = frame->previous; + tstate->c_recursion_remaining += PY_EVAL_C_STACK_UNITS; + return NULL; + } + goto resume_with_error; + } + + resume_with_error: + { + next_instr = frame->instr_ptr; + stack_pointer = _PyFrame_GetStackPointer(frame); + goto error; + } + +/* END LABELS */ #undef TIER_ONE diff --git a/Python/initconfig.c b/Python/initconfig.c index 7851b86db1f6d0..4db77ef47d2362 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -169,7 +169,7 @@ static const PyConfigSpec PYCONFIG_SPEC[] = { SPEC(use_frozen_modules, BOOL, READ_ONLY, NO_SYS), SPEC(use_hash_seed, BOOL, READ_ONLY, NO_SYS), #ifdef __APPLE__ - SPEC(use_system_logger, BOOL, PUBLIC, NO_SYS), + SPEC(use_system_logger, BOOL, READ_ONLY, NO_SYS), #endif SPEC(user_site_directory, BOOL, READ_ONLY, NO_SYS), // sys.flags.no_user_site SPEC(warn_default_encoding, BOOL, READ_ONLY, NO_SYS), diff --git a/Python/instrumentation.c b/Python/instrumentation.c index 17e5346be5ed3d..0e7b4810726434 100644 --- a/Python/instrumentation.c +++ b/Python/instrumentation.c @@ -53,7 +53,7 @@ if (bc == NULL) { \ continue; \ } \ - (func)((_Py_CODEUNIT *)bc, __VA_ARGS__); \ + (func)(code, (_Py_CODEUNIT *)bc, __VA_ARGS__); \ } \ } while (0) @@ -62,7 +62,7 @@ #define LOCK_CODE(code) #define UNLOCK_CODE() #define MODIFY_BYTECODE(code, func, ...) \ - (func)(_PyCode_CODE(code), __VA_ARGS__) + (func)(code, _PyCode_CODE(code), __VA_ARGS__) #endif @@ -285,48 +285,36 @@ get_events(_Py_GlobalMonitors *m, int tool_id) return result; } -/* Line delta. - * 8 bit value. - * if line_delta == -128: - * line = None # represented as -1 - * elif line_delta == -127 or line_delta == -126: - * line = PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); +/* Module code can have line 0, even though modules start at line 1, + * so -1 is a legal delta. */ +#define NO_LINE (-2) + +/* Returns the line delta. Defined as: + * if line is None: + * line_delta = NO_LINE * else: - * line = first_line + (offset >> OFFSET_SHIFT) + line_delta; + * line_delta = line - first_line */ - -#define NO_LINE -128 -#define COMPUTED_LINE_LINENO_CHANGE -127 -#define COMPUTED_LINE -126 - -#define OFFSET_SHIFT 4 - -static int8_t -compute_line_delta(PyCodeObject *code, int offset, int line) +static int +compute_line_delta(PyCodeObject *code, int line) { if (line < 0) { + assert(line == -1); return NO_LINE; } - int delta = line - code->co_firstlineno - (offset >> OFFSET_SHIFT); - if (delta <= INT8_MAX && delta > COMPUTED_LINE) { - return delta; - } - return COMPUTED_LINE; + int delta = line - code->co_firstlineno; + assert(delta > NO_LINE); + return delta; } static int -compute_line(PyCodeObject *code, int offset, int8_t line_delta) +compute_line(PyCodeObject *code, int line_delta) { - if (line_delta > COMPUTED_LINE) { - return code->co_firstlineno + (offset >> OFFSET_SHIFT) + line_delta; - } if (line_delta == NO_LINE) { - return -1; } - assert(line_delta == COMPUTED_LINE || line_delta == COMPUTED_LINE_LINENO_CHANGE); - /* Look it up */ - return PyCode_Addr2Line(code, offset * sizeof(_Py_CODEUNIT)); + assert(line_delta > NO_LINE); + return code->co_firstlineno + line_delta; } int @@ -338,6 +326,57 @@ _PyInstruction_GetLength(PyCodeObject *code, int offset) return 1 + _PyOpcode_Caches[inst.op.code]; } +static inline uint8_t +get_original_opcode(_PyCoLineInstrumentationData *line_data, int index) +{ + return line_data->data[index*line_data->bytes_per_entry]; +} + +static inline uint8_t * +get_original_opcode_ptr(_PyCoLineInstrumentationData *line_data, int index) +{ + return &line_data->data[index*line_data->bytes_per_entry]; +} + +static inline void +set_original_opcode(_PyCoLineInstrumentationData *line_data, int index, uint8_t opcode) +{ + line_data->data[index*line_data->bytes_per_entry] = opcode; +} + +static inline int +get_line_delta(_PyCoLineInstrumentationData *line_data, int index) +{ + uint8_t *ptr = &line_data->data[index*line_data->bytes_per_entry+1]; + assert(line_data->bytes_per_entry >= 2); + uint32_t value = *ptr; + for (int idx = 2; idx < line_data->bytes_per_entry; idx++) { + ptr++; + int shift = (idx-1)*8; + value |= ((uint32_t)(*ptr)) << shift; + } + assert(value < INT_MAX); + /* NO_LINE is stored as zero. */ + return ((int)value) + NO_LINE; +} + +static inline void +set_line_delta(_PyCoLineInstrumentationData *line_data, int index, int line_delta) +{ + /* Store line_delta + 2 as we need -2 to represent no line number */ + assert(line_delta >= NO_LINE); + uint32_t adjusted = line_delta - NO_LINE; + uint8_t *ptr = &line_data->data[index*line_data->bytes_per_entry+1]; + assert(adjusted < (1ULL << ((line_data->bytes_per_entry-1)*8))); + assert(line_data->bytes_per_entry >= 2); + *ptr = adjusted & 0xff; + for (int idx = 2; idx < line_data->bytes_per_entry; idx++) { + ptr++; + adjusted >>= 8; + *ptr = adjusted & 0xff; + } +} + #ifdef INSTRUMENT_DEBUG static void @@ -357,11 +396,15 @@ dump_instrumentation_data_lines(PyCodeObject *code, _PyCoLineInstrumentationData if (lines == NULL) { fprintf(out, ", lines = NULL"); } - else if (lines[i].original_opcode == 0) { - fprintf(out, ", lines = {original_opcode = No LINE (0), line_delta = %d)", lines[i].line_delta); - } else { - fprintf(out, ", lines = {original_opcode = %s, line_delta = %d)", _PyOpcode_OpName[lines[i].original_opcode], lines[i].line_delta); + int opcode = get_original_opcode(lines, i); + int line_delta = get_line_delta(lines, i); + if (opcode == 0) { + fprintf(out, ", lines = {original_opcode = No LINE (0), line_delta = %d)", line_delta); + } + else { + fprintf(out, ", lines = {original_opcode = %s, line_delta = %d)", _PyOpcode_OpName[opcode], line_delta); + } } } @@ -411,6 +454,12 @@ dump_local_monitors(const char *prefix, _Py_LocalMonitors monitors, FILE*out) } } +/** NOTE: + * Do not use PyCode_Addr2Line to determine the line number in instrumentation, + * as `PyCode_Addr2Line` uses the monitoring data if it is available. + */ + + /* No error checking -- Don't use this for anything but experimental debugging */ static void dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) @@ -428,6 +477,8 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) dump_local_monitors("Active", data->active_monitors, out); int code_len = (int)Py_SIZE(code); bool starred = false; + PyCodeAddressRange range; + _PyCode_InitAddressRange(code, &range); for (int i = 0; i < code_len; i += _PyInstruction_GetLength(code, i)) { _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; int opcode = instr->op.code; @@ -435,7 +486,7 @@ dump_instrumentation_data(PyCodeObject *code, int star, FILE*out) fprintf(out, "** "); starred = true; } - fprintf(out, "Offset: %d, line: %d %s: ", i, PyCode_Addr2Line(code, i*2), _PyOpcode_OpName[opcode]); + fprintf(out, "Offset: %d, line: %d %s: ", i, _PyCode_CheckLineNumber(i*2, &range), _PyOpcode_OpName[opcode]); dump_instrumentation_data_tools(code, data->tools, i, out); dump_instrumentation_data_lines(code, data->lines, i, out); dump_instrumentation_data_line_tools(code, data->line_tools, i, out); @@ -500,10 +551,12 @@ sanity_check_instrumentation(PyCodeObject *code) code->_co_monitoring->active_monitors, active_monitors)); int code_len = (int)Py_SIZE(code); + PyCodeAddressRange range; + _PyCode_InitAddressRange(co, &range); for (int i = 0; i < code_len;) { _Py_CODEUNIT *instr = &_PyCode_CODE(code)[i]; int opcode = instr->op.code; - int base_opcode = _Py_GetBaseCodeUnit(code, offset).op.code; + int base_opcode = _Py_GetBaseCodeUnit(code, i).op.code; CHECK(valid_opcode(opcode)); CHECK(valid_opcode(base_opcode)); if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -514,8 +567,8 @@ sanity_check_instrumentation(PyCodeObject *code) } if (opcode == INSTRUMENTED_LINE) { CHECK(data->lines); - CHECK(valid_opcode(data->lines[i].original_opcode)); - opcode = data->lines[i].original_opcode; + opcode = get_original_opcode(data->lines, i); + CHECK(valid_opcode(opcode)); CHECK(opcode != END_FOR); CHECK(opcode != RESUME); CHECK(opcode != RESUME_CHECK); @@ -530,7 +583,7 @@ sanity_check_instrumentation(PyCodeObject *code) * *and* we are executing a INSTRUMENTED_LINE instruction * that has de-instrumented itself, then we will execute * an invalid INSTRUMENTED_INSTRUCTION */ - CHECK(data->lines[i].original_opcode != INSTRUMENTED_INSTRUCTION); + CHECK(get_original_opcode(data->lines, i) != INSTRUMENTED_INSTRUCTION); } if (opcode == INSTRUMENTED_INSTRUCTION) { CHECK(data->per_instruction_opcodes[i] != 0); @@ -545,9 +598,9 @@ sanity_check_instrumentation(PyCodeObject *code) } CHECK(active_monitors.tools[event] != 0); } - if (data->lines && base_opcode != END_FOR) { - int line1 = compute_line(code, i, data->lines[i].line_delta); - int line2 = PyCode_Addr2Line(code, i*sizeof(_Py_CODEUNIT)); + if (data->lines && get_original_opcode(data->lines, i)) { + int line1 = compute_line(code, get_line_delta(data->lines, i)); + int line2 = _PyCode_CheckLineNumber(i*sizeof(_Py_CODEUNIT), &range); CHECK(line1 == line2); } CHECK(valid_opcode(opcode)); @@ -597,7 +650,7 @@ _Py_GetBaseCodeUnit(PyCodeObject *code, int i) return inst; } if (opcode == INSTRUMENTED_LINE) { - opcode = code->_co_monitoring->lines[i].original_opcode; + opcode = get_original_opcode(code->_co_monitoring->lines, i); } if (opcode == INSTRUMENTED_INSTRUCTION) { opcode = code->_co_monitoring->per_instruction_opcodes[i]; @@ -616,7 +669,7 @@ _Py_GetBaseCodeUnit(PyCodeObject *code, int i) } static void -de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, +de_instrument(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, int event) { assert(event != PY_MONITORING_EVENT_INSTRUCTION); @@ -627,7 +680,7 @@ de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, int opcode = *opcode_ptr; assert(opcode != ENTER_EXECUTOR); if (opcode == INSTRUMENTED_LINE) { - opcode_ptr = &monitoring->lines[i].original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -647,7 +700,7 @@ de_instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i, } static void -de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, +de_instrument_line(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; @@ -655,10 +708,10 @@ de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, if (opcode != INSTRUMENTED_LINE) { return; } - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - int original_opcode = lines->original_opcode; + _PyCoLineInstrumentationData *lines = monitoring->lines; + int original_opcode = get_original_opcode(lines, i); if (original_opcode == INSTRUMENTED_INSTRUCTION) { - lines->original_opcode = monitoring->per_instruction_opcodes[i]; + set_original_opcode(lines, i, monitoring->per_instruction_opcodes[i]); } CHECK(original_opcode != 0); CHECK(original_opcode == _PyOpcode_Deopt[original_opcode]); @@ -671,14 +724,14 @@ de_instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, } static void -de_instrument_per_instruction(_Py_CODEUNIT *bytecode, +de_instrument_per_instruction(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - opcode_ptr = &monitoring->lines[i].original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode != INSTRUMENTED_INSTRUCTION) { @@ -697,14 +750,13 @@ de_instrument_per_instruction(_Py_CODEUNIT *bytecode, } static void -instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) +instrument(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode =*opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - opcode_ptr = &lines->original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -727,29 +779,27 @@ instrument(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) } static void -instrument_line(_Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) +instrument_line(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { uint8_t *opcode_ptr = &bytecode[i].op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { return; } - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - lines->original_opcode = _PyOpcode_Deopt[opcode]; - CHECK(lines->original_opcode > 0); + set_original_opcode(monitoring->lines, i, _PyOpcode_Deopt[opcode]); + CHECK(get_line_delta(monitoring->lines, i) > NO_LINE); FT_ATOMIC_STORE_UINT8_RELAXED(*opcode_ptr, INSTRUMENTED_LINE); } static void -instrument_per_instruction(_Py_CODEUNIT *bytecode, +instrument_per_instruction(PyCodeObject *code, _Py_CODEUNIT *bytecode, _PyCoMonitoringData *monitoring, int i) { _Py_CODEUNIT *instr = &bytecode[i]; uint8_t *opcode_ptr = &instr->op.code; int opcode = *opcode_ptr; if (opcode == INSTRUMENTED_LINE) { - _PyCoLineInstrumentationData *lines = &monitoring->lines[i]; - opcode_ptr = &lines->original_opcode; + opcode_ptr = get_original_opcode_ptr(monitoring->lines, i); opcode = *opcode_ptr; } if (opcode == INSTRUMENTED_INSTRUCTION) { @@ -1226,18 +1276,16 @@ _Py_call_instrumentation_exc2( call_instrumentation_vector_protected(tstate, event, frame, instr, 4, args); } - int _Py_Instrumentation_GetLine(PyCodeObject *code, int index) { _PyCoMonitoringData *monitoring = code->_co_monitoring; assert(monitoring != NULL); assert(monitoring->lines != NULL); - assert(index >= code->_co_firsttraceable); assert(index < Py_SIZE(code)); - _PyCoLineInstrumentationData *line_data = &monitoring->lines[index]; - int8_t line_delta = line_data->line_delta; - int line = compute_line(code, index, line_delta); + _PyCoLineInstrumentationData *line_data = monitoring->lines; + int line_delta = get_line_delta(line_data, index); + int line = compute_line(code, line_delta); return line; } @@ -1251,29 +1299,20 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, int i = (int)(instr - bytecode); _PyCoMonitoringData *monitoring = code->_co_monitoring; - _PyCoLineInstrumentationData *line_data = &monitoring->lines[i]; + _PyCoLineInstrumentationData *line_data = monitoring->lines; PyInterpreterState *interp = tstate->interp; - int8_t line_delta = line_data->line_delta; - int line = 0; - - if (line_delta == COMPUTED_LINE_LINENO_CHANGE) { - // We know the line number must have changed, don't need to calculate - // the line number for now because we might not need it. - line = -1; - } else { - line = compute_line(code, i, line_delta); - assert(line >= 0); - assert(prev != NULL); - int prev_index = (int)(prev - bytecode); - int prev_line = _Py_Instrumentation_GetLine(code, prev_index); - if (prev_line == line) { - int prev_opcode = bytecode[prev_index].op.code; - /* RESUME and INSTRUMENTED_RESUME are needed for the operation of - * instrumentation, so must never be hidden by an INSTRUMENTED_LINE. - */ - if (prev_opcode != RESUME && prev_opcode != INSTRUMENTED_RESUME) { - goto done; - } + int line = _Py_Instrumentation_GetLine(code, i); + assert(line >= 0); + assert(prev != NULL); + int prev_index = (int)(prev - bytecode); + int prev_line = _Py_Instrumentation_GetLine(code, prev_index); + if (prev_line == line) { + int prev_opcode = bytecode[prev_index].op.code; + /* RESUME and INSTRUMENTED_RESUME are needed for the operation of + * instrumentation, so must never be hidden by an INSTRUMENTED_LINE. + */ + if (prev_opcode != RESUME && prev_opcode != INSTRUMENTED_RESUME) { + goto done; } } @@ -1298,12 +1337,6 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, tstate->tracing++; /* Call c_tracefunc directly, having set the line number. */ Py_INCREF(frame_obj); - if (line == -1 && line_delta > COMPUTED_LINE) { - /* Only assign f_lineno if it's easy to calculate, otherwise - * do lazy calculation by setting the f_lineno to 0. - */ - line = compute_line(code, i, line_delta); - } frame_obj->f_lineno = line; int err = tstate->c_tracefunc(tstate->c_traceobj, frame_obj, PyTrace_LINE, Py_None); frame_obj->f_lineno = 0; @@ -1320,11 +1353,6 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, if (tools == 0) { goto done; } - - if (line == -1) { - /* Need to calculate the line number now for monitoring events */ - line = compute_line(code, i, line_delta); - } PyObject *line_obj = PyLong_FromLong(line); if (line_obj == NULL) { return -1; @@ -1356,7 +1384,7 @@ _Py_call_instrumentation_line(PyThreadState *tstate, _PyInterpreterFrame* frame, Py_DECREF(line_obj); uint8_t original_opcode; done: - original_opcode = line_data->original_opcode; + original_opcode = get_original_opcode(line_data, i); assert(original_opcode != 0); assert(original_opcode != INSTRUMENTED_LINE); assert(_PyOpcode_Deopt[original_opcode] == original_opcode); @@ -1428,7 +1456,7 @@ initialize_tools(PyCodeObject *code) int opcode = instr->op.code; assert(opcode != ENTER_EXECUTOR); if (opcode == INSTRUMENTED_LINE) { - opcode = code->_co_monitoring->lines[i].original_opcode; + opcode = get_original_opcode(code->_co_monitoring->lines, i); } if (opcode == INSTRUMENTED_INSTRUCTION) { opcode = code->_co_monitoring->per_instruction_opcodes[i]; @@ -1471,64 +1499,58 @@ initialize_tools(PyCodeObject *code) } } -#define NO_LINE -128 - static void -initialize_lines(PyCodeObject *code) +initialize_lines(PyCodeObject *code, int bytes_per_entry) { ASSERT_WORLD_STOPPED_OR_LOCKED(code); _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; assert(line_data != NULL); + line_data->bytes_per_entry = bytes_per_entry; int code_len = (int)Py_SIZE(code); PyCodeAddressRange range; _PyCode_InitAddressRange(code, &range); - for (int i = 0; i < code->_co_firsttraceable && i < code_len; i++) { - line_data[i].original_opcode = 0; - line_data[i].line_delta = -127; - } int current_line = -1; - for (int i = code->_co_firsttraceable; i < code_len; ) { + for (int i = 0; i < code_len; ) { int opcode = _Py_GetBaseCodeUnit(code, i).op.code; int line = _PyCode_CheckLineNumber(i*(int)sizeof(_Py_CODEUNIT), &range); - line_data[i].line_delta = compute_line_delta(code, i, line); + set_line_delta(line_data, i, compute_line_delta(code, line)); int length = _PyInstruction_GetLength(code, i); - switch (opcode) { - case END_ASYNC_FOR: - case END_FOR: - case END_SEND: - case RESUME: + if (i < code->_co_firsttraceable) { + set_original_opcode(line_data, i, 0); + } + else { + switch (opcode) { + case END_ASYNC_FOR: + case END_FOR: + case END_SEND: + case RESUME: case POP_ITER: - /* END_FOR cannot start a line, as it is skipped by FOR_ITER - * END_SEND cannot start a line, as it is skipped by SEND - * RESUME and POP_ITER must not be instrumented with INSTRUMENT_LINE */ - line_data[i].original_opcode = 0; - break; - default: - /* Set original_opcode to the opcode iff the instruction - * starts a line, and thus should be instrumented. - * This saves having to perform this check every time the - * we turn instrumentation on or off, and serves as a sanity - * check when debugging. - */ - if (line != current_line && line >= 0) { - line_data[i].original_opcode = opcode; - if (line_data[i].line_delta == COMPUTED_LINE) { - /* Label this line as a line with a line number change - * which could help the monitoring callback to quickly - * identify the line number change. - */ - line_data[i].line_delta = COMPUTED_LINE_LINENO_CHANGE; + /* END_FOR cannot start a line, as it is skipped by FOR_ITER + * END_SEND cannot start a line, as it is skipped by SEND + * RESUME and POP_ITER must not be instrumented with INSTRUMENTED_LINE */ + set_original_opcode(line_data, i, 0); + break; + default: + /* Set original_opcode to the opcode iff the instruction + * starts a line, and thus should be instrumented. + * This saves having to perform this check every time the + * we turn instrumentation on or off, and serves as a sanity + * check when debugging. + */ + if (line != current_line && line >= 0) { + set_original_opcode(line_data, i, opcode); + CHECK(get_line_delta(line_data, i) != NO_LINE); } - } - else { - line_data[i].original_opcode = 0; - } - current_line = line; + else { + set_original_opcode(line_data, i, 0); + } + current_line = line; + } } for (int j = 1; j < length; j++) { - line_data[i+j].original_opcode = 0; - line_data[i+j].line_delta = NO_LINE; + set_original_opcode(line_data, i+j, 0); + set_line_delta(line_data, i+j, NO_LINE); } i += length; } @@ -1572,15 +1594,10 @@ initialize_lines(PyCodeObject *code) continue; } assert(target >= 0); - if (line_data[target].line_delta != NO_LINE) { + if (get_line_delta(line_data, target) != NO_LINE) { int opcode = _Py_GetBaseCodeUnit(code, target).op.code; if (opcode != POP_ITER) { - line_data[target].original_opcode = opcode; - if (line_data[target].line_delta == COMPUTED_LINE_LINENO_CHANGE) { - // If the line is a jump target, we are not sure if the line - // number changes, so we set it to COMPUTED_LINE. - line_data[target].line_delta = COMPUTED_LINE; - } + set_original_opcode(line_data, target, opcode); } } } @@ -1603,9 +1620,8 @@ initialize_lines(PyCodeObject *code) * END_ASYNC_FOR is a bit special as it marks the end of * an `async for` loop, which should not generate its own * line event. */ - if (line_data[handler].line_delta != NO_LINE && - original_opcode != END_ASYNC_FOR) { - line_data[handler].original_opcode = original_opcode; + if (get_line_delta(line_data, handler) != NO_LINE && original_opcode != END_ASYNC_FOR) { + set_original_opcode(line_data, handler, original_opcode); } } } @@ -1678,12 +1694,39 @@ update_instrumentation_data(PyCodeObject *code, PyInterpreterState *interp) } if (all_events.tools[PY_MONITORING_EVENT_LINE]) { if (code->_co_monitoring->lines == NULL) { - code->_co_monitoring->lines = PyMem_Malloc(code_len * sizeof(_PyCoLineInstrumentationData)); + PyCodeAddressRange range; + _PyCode_InitAddressRange(code, &range); + int max_line = code->co_firstlineno + 1; + _PyCode_InitAddressRange(code, &range); + for (int i = code->_co_firsttraceable; i < code_len; ) { + int line = _PyCode_CheckLineNumber(i*(int)sizeof(_Py_CODEUNIT), &range); + if (line > max_line) { + max_line = line; + } + int length = _PyInstruction_GetLength(code, i); + i += length; + } + int bytes_per_entry; + int max_delta = max_line - code->co_firstlineno; + /* We store delta+2 in the table, so 253 is max for one byte */ + if (max_delta < 256+NO_LINE) { + bytes_per_entry = 2; + } + else if (max_delta < (1 << 16)+NO_LINE) { + bytes_per_entry = 3; + } + else if (max_delta < (1 << 24)+NO_LINE) { + bytes_per_entry = 4; + } + else { + bytes_per_entry = 5; + } + code->_co_monitoring->lines = PyMem_Malloc(1 + code_len * bytes_per_entry); if (code->_co_monitoring->lines == NULL) { PyErr_NoMemory(); return -1; } - initialize_lines(code); + initialize_lines(code, bytes_per_entry); } if (multitools && code->_co_monitoring->line_tools == NULL) { code->_co_monitoring->line_tools = PyMem_Malloc(code_len); @@ -1798,7 +1841,7 @@ force_instrument_lock_held(PyCodeObject *code, PyInterpreterState *interp) if (removed_line_tools) { _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; for (int i = code->_co_firsttraceable; i < code_len;) { - if (line_data[i].original_opcode) { + if (get_original_opcode(line_data, i)) { remove_line_tools(code, i, removed_line_tools); } i += _PyInstruction_GetLength(code, i); @@ -1825,7 +1868,7 @@ force_instrument_lock_held(PyCodeObject *code, PyInterpreterState *interp) if (new_line_tools) { _PyCoLineInstrumentationData *line_data = code->_co_monitoring->lines; for (int i = code->_co_firsttraceable; i < code_len;) { - if (line_data[i].original_opcode) { + if (get_original_opcode(line_data, i)) { add_line_tools(code, i, new_line_tools); } i += _PyInstruction_GetLength(code, i); @@ -1898,7 +1941,7 @@ instrument_all_executing_code_objects(PyInterpreterState *interp) { while (ts) { _PyInterpreterFrame *frame = ts->current_frame; while (frame) { - if (frame->owner != FRAME_OWNED_BY_CSTACK) { + if (frame->owner < FRAME_OWNED_BY_INTERPRETER) { if (instrument_lock_held(_PyFrame_GetCode(frame), interp)) { return -1; } diff --git a/Python/legacy_tracing.c b/Python/legacy_tracing.c index 45af275f1f6dce..97634f9183c7d5 100644 --- a/Python/legacy_tracing.c +++ b/Python/legacy_tracing.c @@ -491,8 +491,8 @@ int _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) { assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); + /* The caller must hold a thread state */ + _Py_AssertHoldsTstate(); /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ @@ -586,8 +586,8 @@ int _PyEval_SetTrace(PyThreadState *tstate, Py_tracefunc func, PyObject *arg) { assert(is_tstate_valid(tstate)); - /* The caller must hold the GIL */ - assert(PyGILState_Check()); + /* The caller must hold a thread state */ + _Py_AssertHoldsTstate(); /* Call _PySys_Audit() in the context of the current thread state, even if tstate is not the current thread state. */ diff --git a/Python/marshal.c b/Python/marshal.c index 72afa4ff89432c..cf7011652513ae 100644 --- a/Python/marshal.c +++ b/Python/marshal.c @@ -240,10 +240,6 @@ w_short_pstring(const void *s, Py_ssize_t n, WFILE *p) #define PyLong_MARSHAL_SHIFT 15 #define PyLong_MARSHAL_BASE ((short)1 << PyLong_MARSHAL_SHIFT) #define PyLong_MARSHAL_MASK (PyLong_MARSHAL_BASE - 1) -#if PyLong_SHIFT % PyLong_MARSHAL_SHIFT != 0 -#error "PyLong_SHIFT must be a multiple of PyLong_MARSHAL_SHIFT" -#endif -#define PyLong_MARSHAL_RATIO (PyLong_SHIFT / PyLong_MARSHAL_SHIFT) #define W_TYPE(t, p) do { \ w_byte((t) | flag, (p)); \ @@ -252,47 +248,106 @@ w_short_pstring(const void *s, Py_ssize_t n, WFILE *p) static PyObject * _PyMarshal_WriteObjectToString(PyObject *x, int version, int allow_code); +#define _r_digits(bitsize) \ +static void \ +_r_digits##bitsize(const uint ## bitsize ## _t *digits, Py_ssize_t n, \ + uint8_t negative, Py_ssize_t marshal_ratio, WFILE *p) \ +{ \ + /* set l to number of base PyLong_MARSHAL_BASE digits */ \ + Py_ssize_t l = (n - 1)*marshal_ratio; \ + uint ## bitsize ## _t d = digits[n - 1]; \ + \ + assert(marshal_ratio > 0); \ + assert(n >= 1); \ + assert(d != 0); /* a PyLong is always normalized */ \ + do { \ + d >>= PyLong_MARSHAL_SHIFT; \ + l++; \ + } while (d != 0); \ + if (l > SIZE32_MAX) { \ + p->depth--; \ + p->error = WFERR_UNMARSHALLABLE; \ + return; \ + } \ + w_long((long)(negative ? -l : l), p); \ + \ + for (Py_ssize_t i = 0; i < n - 1; i++) { \ + d = digits[i]; \ + for (Py_ssize_t j = 0; j < marshal_ratio; j++) { \ + w_short(d & PyLong_MARSHAL_MASK, p); \ + d >>= PyLong_MARSHAL_SHIFT; \ + } \ + assert(d == 0); \ + } \ + d = digits[n - 1]; \ + do { \ + w_short(d & PyLong_MARSHAL_MASK, p); \ + d >>= PyLong_MARSHAL_SHIFT; \ + } while (d != 0); \ +} +_r_digits(16) +_r_digits(32) +#undef _r_digits + static void w_PyLong(const PyLongObject *ob, char flag, WFILE *p) { - Py_ssize_t i, j, n, l; - digit d; - W_TYPE(TYPE_LONG, p); if (_PyLong_IsZero(ob)) { w_long((long)0, p); return; } - /* set l to number of base PyLong_MARSHAL_BASE digits */ - n = _PyLong_DigitCount(ob); - l = (n-1) * PyLong_MARSHAL_RATIO; - d = ob->long_value.ob_digit[n-1]; - assert(d != 0); /* a PyLong is always normalized */ - do { - d >>= PyLong_MARSHAL_SHIFT; - l++; - } while (d != 0); - if (l > SIZE32_MAX) { + PyLongExport long_export; + + if (PyLong_Export((PyObject *)ob, &long_export) < 0) { p->depth--; p->error = WFERR_UNMARSHALLABLE; return; } - w_long((long)(_PyLong_IsNegative(ob) ? -l : l), p); + if (!long_export.digits) { + int8_t sign = long_export.value < 0 ? -1 : 1; + uint64_t abs_value = Py_ABS(long_export.value); + uint64_t d = abs_value; + long l = 0; - for (i=0; i < n-1; i++) { - d = ob->long_value.ob_digit[i]; - for (j=0; j < PyLong_MARSHAL_RATIO; j++) { + /* set l to number of base PyLong_MARSHAL_BASE digits */ + do { + d >>= PyLong_MARSHAL_SHIFT; + l += sign; + } while (d); + w_long(l, p); + + d = abs_value; + do { w_short(d & PyLong_MARSHAL_MASK, p); d >>= PyLong_MARSHAL_SHIFT; - } - assert (d == 0); + } while (d); + return; } - d = ob->long_value.ob_digit[n-1]; - do { - w_short(d & PyLong_MARSHAL_MASK, p); - d >>= PyLong_MARSHAL_SHIFT; - } while (d != 0); + + const PyLongLayout *layout = PyLong_GetNativeLayout(); + Py_ssize_t marshal_ratio = layout->bits_per_digit/PyLong_MARSHAL_SHIFT; + + /* must be a multiple of PyLong_MARSHAL_SHIFT */ + assert(layout->bits_per_digit % PyLong_MARSHAL_SHIFT == 0); + assert(layout->bits_per_digit >= PyLong_MARSHAL_SHIFT); + + /* other assumptions on PyLongObject internals */ + assert(layout->bits_per_digit <= 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + if (layout->digit_size == 4) { + _r_digits32(long_export.digits, long_export.ndigits, + long_export.negative, marshal_ratio, p); + } + else { + _r_digits16(long_export.digits, long_export.ndigits, + long_export.negative, marshal_ratio, p); + } + PyLong_FreeExport(&long_export); } static void @@ -875,17 +930,62 @@ r_long64(RFILE *p) 1 /* signed */); } +#define _w_digits(bitsize) \ +static int \ +_w_digits##bitsize(uint ## bitsize ## _t *digits, Py_ssize_t size, \ + Py_ssize_t marshal_ratio, \ + int shorts_in_top_digit, RFILE *p) \ +{ \ + uint ## bitsize ## _t d; \ + \ + assert(size >= 1); \ + for (Py_ssize_t i = 0; i < size - 1; i++) { \ + d = 0; \ + for (Py_ssize_t j = 0; j < marshal_ratio; j++) { \ + int md = r_short(p); \ + if (md < 0 || md > PyLong_MARSHAL_BASE) { \ + goto bad_digit; \ + } \ + d += (uint ## bitsize ## _t)md << j*PyLong_MARSHAL_SHIFT; \ + } \ + digits[i] = d; \ + } \ + \ + d = 0; \ + for (Py_ssize_t j = 0; j < shorts_in_top_digit; j++) { \ + int md = r_short(p); \ + if (md < 0 || md > PyLong_MARSHAL_BASE) { \ + goto bad_digit; \ + } \ + /* topmost marshal digit should be nonzero */ \ + if (md == 0 && j == shorts_in_top_digit - 1) { \ + PyErr_SetString(PyExc_ValueError, \ + "bad marshal data (unnormalized long data)"); \ + return -1; \ + } \ + d += (uint ## bitsize ## _t)md << j*PyLong_MARSHAL_SHIFT; \ + } \ + assert(!PyErr_Occurred()); \ + /* top digit should be nonzero, else the resulting PyLong won't be \ + normalized */ \ + digits[size - 1] = d; \ + return 0; \ + \ +bad_digit: \ + if (!PyErr_Occurred()) { \ + PyErr_SetString(PyExc_ValueError, \ + "bad marshal data (digit out of range in long)"); \ + } \ + return -1; \ +} +_w_digits(32) +_w_digits(16) +#undef _w_digits + static PyObject * r_PyLong(RFILE *p) { - PyLongObject *ob; - long n, size, i; - int j, md, shorts_in_top_digit; - digit d; - - n = r_long(p); - if (n == 0) - return (PyObject *)_PyLong_New(0); + long n = r_long(p); if (n == -1 && PyErr_Occurred()) { return NULL; } @@ -895,51 +995,44 @@ r_PyLong(RFILE *p) return NULL; } - size = 1 + (Py_ABS(n) - 1) / PyLong_MARSHAL_RATIO; - shorts_in_top_digit = 1 + (Py_ABS(n) - 1) % PyLong_MARSHAL_RATIO; - ob = _PyLong_New(size); - if (ob == NULL) - return NULL; + const PyLongLayout *layout = PyLong_GetNativeLayout(); + Py_ssize_t marshal_ratio = layout->bits_per_digit/PyLong_MARSHAL_SHIFT; - _PyLong_SetSignAndDigitCount(ob, n < 0 ? -1 : 1, size); + /* must be a multiple of PyLong_MARSHAL_SHIFT */ + assert(layout->bits_per_digit % PyLong_MARSHAL_SHIFT == 0); + assert(layout->bits_per_digit >= PyLong_MARSHAL_SHIFT); - for (i = 0; i < size-1; i++) { - d = 0; - for (j=0; j < PyLong_MARSHAL_RATIO; j++) { - md = r_short(p); - if (md < 0 || md > PyLong_MARSHAL_BASE) - goto bad_digit; - d += (digit)md << j*PyLong_MARSHAL_SHIFT; - } - ob->long_value.ob_digit[i] = d; + /* other assumptions on PyLongObject internals */ + assert(layout->bits_per_digit <= 32); + assert(layout->digits_order == -1); + assert(layout->digit_endianness == (PY_LITTLE_ENDIAN ? -1 : 1)); + assert(layout->digit_size == 2 || layout->digit_size == 4); + + Py_ssize_t size = 1 + (Py_ABS(n) - 1) / marshal_ratio; + + assert(size >= 1); + + int shorts_in_top_digit = 1 + (Py_ABS(n) - 1) % marshal_ratio; + void *digits; + PyLongWriter *writer = PyLongWriter_Create(n < 0, size, &digits); + + if (writer == NULL) { + return NULL; } - d = 0; - for (j=0; j < shorts_in_top_digit; j++) { - md = r_short(p); - if (md < 0 || md > PyLong_MARSHAL_BASE) - goto bad_digit; - /* topmost marshal digit should be nonzero */ - if (md == 0 && j == shorts_in_top_digit - 1) { - Py_DECREF(ob); - PyErr_SetString(PyExc_ValueError, - "bad marshal data (unnormalized long data)"); - return NULL; - } - d += (digit)md << j*PyLong_MARSHAL_SHIFT; + int ret; + + if (layout->digit_size == 4) { + ret = _w_digits32(digits, size, marshal_ratio, shorts_in_top_digit, p); } - assert(!PyErr_Occurred()); - /* top digit should be nonzero, else the resulting PyLong won't be - normalized */ - ob->long_value.ob_digit[size-1] = d; - return (PyObject *)ob; - bad_digit: - Py_DECREF(ob); - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ValueError, - "bad marshal data (digit out of range in long)"); + else { + ret = _w_digits16(digits, size, marshal_ratio, shorts_in_top_digit, p); + } + if (ret < 0) { + PyLongWriter_Discard(writer); + return NULL; } - return NULL; + return PyLongWriter_Finish(writer); } static double diff --git a/Python/opcode_targets.h b/Python/opcode_targets.h index c5c008fcbe574e..cb6c33f01d3598 100644 --- a/Python/opcode_targets.h +++ b/Python/opcode_targets.h @@ -152,6 +152,7 @@ static void *opcode_targets[256] = { &&TARGET_BINARY_OP_ADD_FLOAT, &&TARGET_BINARY_OP_ADD_INT, &&TARGET_BINARY_OP_ADD_UNICODE, + &&TARGET_BINARY_OP_EXTEND, &&TARGET_BINARY_OP_MULTIPLY_FLOAT, &&TARGET_BINARY_OP_MULTIPLY_INT, &&TARGET_BINARY_OP_SUBTRACT_FLOAT, @@ -233,7 +234,6 @@ static void *opcode_targets[256] = { &&_unknown_opcode, &&_unknown_opcode, &&_unknown_opcode, - &&_unknown_opcode, &&TARGET_INSTRUMENTED_END_FOR, &&TARGET_INSTRUMENTED_POP_ITER, &&TARGET_INSTRUMENTED_END_SEND, diff --git a/Python/optimizer.c b/Python/optimizer.c index 52b3f0a84afedf..9beb47246eb3d6 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -132,8 +132,6 @@ _Py_GetOptimizer(void) static _PyExecutorObject * make_executor_from_uops(_PyUOpInstruction *buffer, int length, const _PyBloomFilter *dependencies); -static const _PyBloomFilter EMPTY_FILTER = { 0 }; - _PyOptimizerObject * _Py_SetOptimizer(PyInterpreterState *interp, _PyOptimizerObject *optimizer) { @@ -251,13 +249,6 @@ get_oparg(PyObject *self, PyObject *Py_UNUSED(ignored)) return PyLong_FromUnsignedLong(((_PyExecutorObject *)self)->vm_data.oparg); } -static PyMethodDef executor_methods[] = { - { "is_valid", is_valid, METH_NOARGS, NULL }, - { "get_opcode", get_opcode, METH_NOARGS, NULL }, - { "get_oparg", get_oparg, METH_NOARGS, NULL }, - { NULL, NULL }, -}; - ///////////////////// Experimental UOp Optimizer ///////////////////// static int executor_clear(_PyExecutorObject *executor); @@ -1332,96 +1323,6 @@ _PyOptimizer_NewUOpOptimizer(void) return (PyObject *)opt; } -static void -counter_dealloc(_PyExecutorObject *self) { - /* The optimizer is the operand of the second uop. */ - PyObject *opt = (PyObject *)self->trace[1].operand0; - Py_DECREF(opt); - uop_dealloc(self); -} - -PyTypeObject _PyCounterExecutor_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "counting_executor", - .tp_basicsize = offsetof(_PyExecutorObject, exits), - .tp_itemsize = 1, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_HAVE_GC, - .tp_dealloc = (destructor)counter_dealloc, - .tp_methods = executor_methods, - .tp_traverse = executor_traverse, - .tp_clear = (inquiry)executor_clear, -}; - -static int -counter_optimize( - _PyOptimizerObject* self, - _PyInterpreterFrame *frame, - _Py_CODEUNIT *instr, - _PyExecutorObject **exec_ptr, - int Py_UNUSED(curr_stackentries), - bool Py_UNUSED(progress_needed) -) -{ - PyCodeObject *code = _PyFrame_GetCode(frame); - int oparg = instr->op.arg; - while (instr->op.code == EXTENDED_ARG) { - instr++; - oparg = (oparg << 8) | instr->op.arg; - } - if (instr->op.code != JUMP_BACKWARD) { - /* Counter optimizer can only handle backward edges */ - return 0; - } - _Py_CODEUNIT *target = instr + 1 + _PyOpcode_Caches[JUMP_BACKWARD] - oparg; - _PyUOpInstruction buffer[4] = { - { .opcode = _START_EXECUTOR, .jump_target = 3, .format=UOP_FORMAT_JUMP }, - { .opcode = _LOAD_CONST_INLINE, .operand0 = (uintptr_t)self }, - { .opcode = _INTERNAL_INCREMENT_OPT_COUNTER }, - { .opcode = _EXIT_TRACE, .target = (uint32_t)(target - _PyCode_CODE(code)), .format=UOP_FORMAT_TARGET } - }; - _PyExecutorObject *executor = make_executor_from_uops(buffer, 4, &EMPTY_FILTER); - if (executor == NULL) { - return -1; - } - Py_INCREF(self); - Py_SET_TYPE(executor, &_PyCounterExecutor_Type); - *exec_ptr = executor; - return 1; -} - -static PyObject * -counter_get_counter(PyObject *self, PyObject *args) -{ - return PyLong_FromLongLong(((_PyCounterOptimizerObject *)self)->count); -} - -static PyMethodDef counter_optimizer_methods[] = { - { "get_count", counter_get_counter, METH_NOARGS, NULL }, - { NULL, NULL }, -}; - -PyTypeObject _PyCounterOptimizer_Type = { - PyVarObject_HEAD_INIT(&PyType_Type, 0) - .tp_name = "Counter optimizer", - .tp_basicsize = sizeof(_PyCounterOptimizerObject), - .tp_itemsize = 0, - .tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_DISALLOW_INSTANTIATION, - .tp_methods = counter_optimizer_methods, - .tp_dealloc = (destructor)PyObject_Free, -}; - -PyObject * -_PyOptimizer_NewCounter(void) -{ - _PyCounterOptimizerObject *opt = (_PyCounterOptimizerObject *)_PyObject_New(&_PyCounterOptimizer_Type); - if (opt == NULL) { - return NULL; - } - opt->base.optimize = counter_optimize; - opt->count = 0; - return (PyObject *)opt; -} - /***************************************** * Executor management diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index 0ef15c630e91db..b9ac30ea04e4e8 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -368,13 +368,17 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer, #define sym_truthiness _Py_uop_sym_truthiness #define frame_new _Py_uop_frame_new #define frame_pop _Py_uop_frame_pop +#define sym_new_tuple _Py_uop_sym_new_tuple +#define sym_tuple_getitem _Py_uop_sym_tuple_getitem +#define sym_tuple_length _Py_uop_sym_tuple_length +#define sym_is_immortal _Py_uop_sym_is_immortal static int optimize_to_bool( _PyUOpInstruction *this_instr, - _Py_UOpsContext *ctx, - _Py_UopsSymbol *value, - _Py_UopsSymbol **result_ptr) + JitOptContext *ctx, + JitOptSymbol *value, + JitOptSymbol **result_ptr) { if (sym_matches_type(value, &PyBool_Type)) { REPLACE_OP(this_instr, _NOP, 0, 0); @@ -460,8 +464,8 @@ optimize_uops( ) { - _Py_UOpsContext context; - _Py_UOpsContext *ctx = &context; + JitOptContext context; + JitOptContext *ctx = &context; uint32_t opcode = UINT16_MAX; int curr_space = 0; int max_space = 0; @@ -486,7 +490,7 @@ optimize_uops( int oparg = this_instr->oparg; opcode = this_instr->opcode; - _Py_UopsSymbol **stack_pointer = ctx->frame->stack_pointer; + JitOptSymbol **stack_pointer = ctx->frame->stack_pointer; #ifdef Py_DEBUG if (get_lltrace() >= 3) { diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index 4d96ada5acf00f..881a607ca2aa29 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -6,8 +6,6 @@ #define op(name, ...) /* NAME is ignored */ -typedef struct _Py_UopsSymbol _Py_UopsSymbol; -typedef struct _Py_UOpsContext _Py_UOpsContext; typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; /* Shortened forms for convenience */ @@ -32,13 +30,17 @@ typedef struct _Py_UOpsAbstractFrame _Py_UOpsAbstractFrame; #define sym_is_bottom _Py_uop_sym_is_bottom #define frame_new _Py_uop_frame_new #define frame_pop _Py_uop_frame_pop +#define sym_new_tuple _Py_uop_sym_new_tuple +#define sym_tuple_getitem _Py_uop_sym_tuple_getitem +#define sym_tuple_length _Py_uop_sym_tuple_length +#define sym_is_immortal _Py_uop_sym_is_immortal extern int optimize_to_bool( _PyUOpInstruction *this_instr, - _Py_UOpsContext *ctx, - _Py_UopsSymbol *value, - _Py_UopsSymbol **result_ptr); + JitOptContext *ctx, + JitOptSymbol *value, + JitOptSymbol **result_ptr); extern void eliminate_pop_guard(_PyUOpInstruction *this_instr, bool exit); @@ -50,17 +52,17 @@ dummy_func(void) { PyCodeObject *co; int oparg; - _Py_UopsSymbol *flag; - _Py_UopsSymbol *left; - _Py_UopsSymbol *right; - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; - _Py_UopsSymbol *iter; - _Py_UopsSymbol *top; - _Py_UopsSymbol *bottom; + JitOptSymbol *flag; + JitOptSymbol *left; + JitOptSymbol *right; + JitOptSymbol *value; + JitOptSymbol *res; + JitOptSymbol *iter; + JitOptSymbol *top; + JitOptSymbol *bottom; _Py_UOpsAbstractFrame *frame; _Py_UOpsAbstractFrame *new_frame; - _Py_UOpsContext *ctx; + JitOptContext *ctx; _PyUOpInstruction *this_instr; _PyBloomFilter *dependencies; int modified; @@ -85,7 +87,7 @@ dummy_func(void) { op(_LOAD_FAST_AND_CLEAR, (-- value)) { value = GETLOCAL(oparg); - _Py_UopsSymbol *temp = sym_new_null(ctx); + JitOptSymbol *temp = sym_new_null(ctx); GETLOCAL(oparg) = temp; } @@ -365,7 +367,7 @@ dummy_func(void) { } op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right -- )) { - _Py_UopsSymbol *res; + JitOptSymbol *res; if (sym_is_const(left) && sym_is_const(right) && sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); @@ -949,6 +951,22 @@ dummy_func(void) { res = sym_new_const(ctx, Py_True); } + op(_BUILD_TUPLE, (values[oparg] -- tup)) { + tup = sym_new_tuple(ctx, oparg, values); + } + + op(_UNPACK_SEQUENCE_TWO_TUPLE, (seq -- val1, val0)) { + val0 = sym_tuple_getitem(ctx, seq, 0); + val1 = sym_tuple_getitem(ctx, seq, 1); + } + + op(_UNPACK_SEQUENCE_TUPLE, (seq -- values[oparg])) { + for (int i = 0; i < oparg; i++) { + values[i] = sym_tuple_getitem(ctx, seq, i); + } + } + + // END BYTECODES // } diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index aff4493fdc4dd7..fa0b4ed4345320 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -26,7 +26,7 @@ /* _MONITOR_RESUME is not a viable micro-op for tier 2 */ case _LOAD_FAST_CHECK: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); // We guarantee this will error - just bail and don't optimize it. if (sym_is_null(value)) { @@ -39,7 +39,7 @@ } case _LOAD_FAST: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); stack_pointer[0] = value; stack_pointer += 1; @@ -48,9 +48,9 @@ } case _LOAD_FAST_AND_CLEAR: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = GETLOCAL(oparg); - _Py_UopsSymbol *temp = sym_new_null(ctx); + JitOptSymbol *temp = sym_new_null(ctx); GETLOCAL(oparg) = temp; stack_pointer[0] = value; stack_pointer += 1; @@ -61,7 +61,7 @@ /* _LOAD_CONST is not a viable micro-op for tier 2 */ case _LOAD_CONST_MORTAL: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE; REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val); @@ -73,7 +73,7 @@ } case _LOAD_CONST_IMMORTAL: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg); REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val); value = sym_new_const(ctx, val); @@ -84,7 +84,7 @@ } case _LOAD_SMALL_INT: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *val = PyLong_FromLong(this_instr->oparg); value = sym_new_const(ctx, val); stack_pointer[0] = value; @@ -94,7 +94,7 @@ } case _STORE_FAST: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = stack_pointer[-1]; GETLOCAL(oparg) = value; stack_pointer += -1; @@ -109,7 +109,7 @@ } case _PUSH_NULL: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -124,7 +124,7 @@ } case _END_SEND: { - _Py_UopsSymbol *val; + JitOptSymbol *val; val = sym_new_not_null(ctx); stack_pointer[-2] = val; stack_pointer += -1; @@ -133,22 +133,22 @@ } case _UNARY_NEGATIVE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _UNARY_NOT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _TO_BOOL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { res = sym_new_type(ctx, &PyBool_Type); @@ -158,8 +158,8 @@ } case _TO_BOOL_BOOL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyBool_Type); @@ -170,8 +170,8 @@ } case _TO_BOOL_INT: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyLong_Type); @@ -182,8 +182,8 @@ } case _TO_BOOL_LIST: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_type(value, &PyList_Type); @@ -194,8 +194,8 @@ } case _TO_BOOL_NONE: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { sym_set_const(value, Py_None); @@ -206,8 +206,8 @@ } case _TO_BOOL_STR: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *res; + JitOptSymbol *value; + JitOptSymbol *res; value = stack_pointer[-1]; if (!optimize_to_bool(this_instr, ctx, value, &res)) { res = sym_new_type(ctx, &PyBool_Type); @@ -218,22 +218,22 @@ } case _REPLACE_WITH_TRUE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_const(ctx, Py_True); stack_pointer[-1] = res; break; } case _UNARY_INVERT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _GUARD_BOTH_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyLong_Type)) { @@ -263,9 +263,9 @@ } case _BINARY_OP_MULTIPLY_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -296,9 +296,9 @@ } case _BINARY_OP_ADD_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -329,9 +329,9 @@ } case _BINARY_OP_SUBTRACT_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -362,8 +362,8 @@ } case _GUARD_BOTH_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyFloat_Type)) { @@ -393,9 +393,9 @@ } case _BINARY_OP_MULTIPLY_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -427,9 +427,9 @@ } case _BINARY_OP_ADD_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -461,9 +461,9 @@ } case _BINARY_OP_SUBTRACT_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -495,8 +495,8 @@ } case _GUARD_BOTH_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_matches_type(left, &PyUnicode_Type) && @@ -509,9 +509,9 @@ } case _BINARY_OP_ADD_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; if (sym_is_const(left) && sym_is_const(right) && @@ -536,11 +536,11 @@ } case _BINARY_OP_INPLACE_ADD_UNICODE: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; + JitOptSymbol *right; + JitOptSymbol *left; right = stack_pointer[-1]; left = stack_pointer[-2]; - _Py_UopsSymbol *res; + JitOptSymbol *res; if (sym_is_const(left) && sym_is_const(right) && sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) { PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right)); @@ -562,8 +562,21 @@ break; } + case _GUARD_BINARY_OP_EXTEND: { + break; + } + + case _BINARY_OP_EXTEND: { + JitOptSymbol *res; + res = sym_new_not_null(ctx); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); + break; + } + case _BINARY_SUBSCR: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -572,7 +585,7 @@ } case _BINARY_SLICE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -587,7 +600,7 @@ } case _BINARY_SUBSCR_LIST_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -596,7 +609,7 @@ } case _BINARY_SUBSCR_STR_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -605,7 +618,7 @@ } case _BINARY_SUBSCR_TUPLE_INT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -614,7 +627,7 @@ } case _BINARY_SUBSCR_DICT: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -623,7 +636,7 @@ } case _BINARY_SUBSCR_CHECK_FUNC: { - _Py_UopsSymbol *getitem; + JitOptSymbol *getitem; getitem = sym_new_not_null(ctx); stack_pointer[0] = getitem; stack_pointer += 1; @@ -632,9 +645,9 @@ } case _BINARY_SUBSCR_INIT_CALL: { - _Py_UopsSymbol *getitem; - _Py_UopsSymbol *sub; - _Py_UopsSymbol *container; + JitOptSymbol *getitem; + JitOptSymbol *sub; + JitOptSymbol *container; _Py_UOpsAbstractFrame *new_frame; getitem = stack_pointer[-1]; sub = stack_pointer[-2]; @@ -644,7 +657,7 @@ (void)getitem; new_frame = NULL; ctx->done = true; - stack_pointer[-3] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-3] = (JitOptSymbol *)new_frame; stack_pointer += -2; assert(WITHIN_STACK_BOUNDS()); break; @@ -687,14 +700,14 @@ } case _CALL_INTRINSIC_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _CALL_INTRINSIC_2: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -703,8 +716,8 @@ } case _RETURN_VALUE: { - _Py_UopsSymbol *retval; - _Py_UopsSymbol *res; + JitOptSymbol *retval; + JitOptSymbol *res; retval = stack_pointer[-1]; stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); @@ -731,14 +744,14 @@ } case _GET_AITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; } case _GET_ANEXT: { - _Py_UopsSymbol *awaitable; + JitOptSymbol *awaitable; awaitable = sym_new_not_null(ctx); stack_pointer[0] = awaitable; stack_pointer += 1; @@ -747,7 +760,7 @@ } case _GET_AWAITABLE: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; @@ -762,7 +775,7 @@ } case _YIELD_VALUE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_unknown(ctx); stack_pointer[-1] = res; break; @@ -775,7 +788,7 @@ } case _LOAD_COMMON_CONSTANT: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[0] = value; stack_pointer += 1; @@ -784,7 +797,7 @@ } case _LOAD_BUILD_CLASS: { - _Py_UopsSymbol *bc; + JitOptSymbol *bc; bc = sym_new_not_null(ctx); stack_pointer[0] = bc; stack_pointer += 1; @@ -803,8 +816,8 @@ } case _UNPACK_SEQUENCE: { - _Py_UopsSymbol *seq; - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; seq = stack_pointer[-1]; values = &stack_pointer[-1]; /* This has to be done manually */ @@ -818,10 +831,12 @@ } case _UNPACK_SEQUENCE_TWO_TUPLE: { - _Py_UopsSymbol *val1; - _Py_UopsSymbol *val0; - val1 = sym_new_not_null(ctx); - val0 = sym_new_not_null(ctx); + JitOptSymbol *seq; + JitOptSymbol *val1; + JitOptSymbol *val0; + seq = stack_pointer[-1]; + val0 = sym_tuple_getitem(ctx, seq, 0); + val1 = sym_tuple_getitem(ctx, seq, 1); stack_pointer[-1] = val1; stack_pointer[0] = val0; stack_pointer += 1; @@ -830,10 +845,12 @@ } case _UNPACK_SEQUENCE_TUPLE: { - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; + seq = stack_pointer[-1]; values = &stack_pointer[-1]; - for (int _i = oparg; --_i >= 0;) { - values[_i] = sym_new_not_null(ctx); + for (int i = 0; i < oparg; i++) { + values[i] = sym_tuple_getitem(ctx, seq, i); } stack_pointer += -1 + oparg; assert(WITHIN_STACK_BOUNDS()); @@ -841,7 +858,7 @@ } case _UNPACK_SEQUENCE_LIST: { - _Py_UopsSymbol **values; + JitOptSymbol **values; values = &stack_pointer[-1]; for (int _i = oparg; --_i >= 0;) { values[_i] = sym_new_not_null(ctx); @@ -852,8 +869,8 @@ } case _UNPACK_EX: { - _Py_UopsSymbol *seq; - _Py_UopsSymbol **values; + JitOptSymbol *seq; + JitOptSymbol **values; seq = stack_pointer[-1]; values = &stack_pointer[-1]; /* This has to be done manually */ @@ -890,7 +907,7 @@ } case _LOAD_LOCALS: { - _Py_UopsSymbol *locals; + JitOptSymbol *locals; locals = sym_new_not_null(ctx); stack_pointer[0] = locals; stack_pointer += 1; @@ -901,7 +918,7 @@ /* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */ case _LOAD_NAME: { - _Py_UopsSymbol *v; + JitOptSymbol *v; v = sym_new_not_null(ctx); stack_pointer[0] = v; stack_pointer += 1; @@ -910,8 +927,8 @@ } case _LOAD_GLOBAL: { - _Py_UopsSymbol **res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol **res; + JitOptSymbol *null = NULL; res = &stack_pointer[0]; res[0] = sym_new_not_null(ctx); null = sym_new_null(ctx); @@ -926,7 +943,7 @@ } case _GUARD_GLOBALS_VERSION_PUSH_KEYS: { - _Py_UopsSymbol *globals_keys; + JitOptSymbol *globals_keys; uint16_t version = (uint16_t)this_instr->operand0; globals_keys = sym_new_unknown(ctx); (void)version; @@ -937,7 +954,7 @@ } case _GUARD_BUILTINS_VERSION_PUSH_KEYS: { - _Py_UopsSymbol *builtins_keys; + JitOptSymbol *builtins_keys; uint16_t version = (uint16_t)this_instr->operand0; builtins_keys = sym_new_unknown(ctx); (void)version; @@ -948,8 +965,8 @@ } case _LOAD_GLOBAL_MODULE_FROM_KEYS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; + JitOptSymbol *null = NULL; res = sym_new_not_null(ctx); null = sym_new_null(ctx); stack_pointer[-1] = res; @@ -960,8 +977,8 @@ } case _LOAD_GLOBAL_BUILTINS_FROM_KEYS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; + JitOptSymbol *null = NULL; res = sym_new_not_null(ctx); null = sym_new_null(ctx); stack_pointer[-1] = res; @@ -984,14 +1001,14 @@ } case _LOAD_FROM_DICT_OR_DEREF: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[-1] = value; break; } case _LOAD_DEREF: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[0] = value; stack_pointer += 1; @@ -1010,7 +1027,7 @@ } case _BUILD_STRING: { - _Py_UopsSymbol *str; + JitOptSymbol *str; str = sym_new_not_null(ctx); stack_pointer[-oparg] = str; stack_pointer += 1 - oparg; @@ -1019,8 +1036,10 @@ } case _BUILD_TUPLE: { - _Py_UopsSymbol *tup; - tup = sym_new_not_null(ctx); + JitOptSymbol **values; + JitOptSymbol *tup; + values = &stack_pointer[-oparg]; + tup = sym_new_tuple(ctx, oparg, values); stack_pointer[-oparg] = tup; stack_pointer += 1 - oparg; assert(WITHIN_STACK_BOUNDS()); @@ -1028,7 +1047,7 @@ } case _BUILD_LIST: { - _Py_UopsSymbol *list; + JitOptSymbol *list; list = sym_new_not_null(ctx); stack_pointer[-oparg] = list; stack_pointer += 1 - oparg; @@ -1049,7 +1068,7 @@ } case _BUILD_SET: { - _Py_UopsSymbol *set; + JitOptSymbol *set; set = sym_new_not_null(ctx); stack_pointer[-oparg] = set; stack_pointer += 1 - oparg; @@ -1058,7 +1077,7 @@ } case _BUILD_MAP: { - _Py_UopsSymbol *map; + JitOptSymbol *map; map = sym_new_not_null(ctx); stack_pointer[-oparg*2] = map; stack_pointer += 1 - oparg*2; @@ -1091,7 +1110,7 @@ /* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */ case _LOAD_SUPER_ATTR_ATTR: { - _Py_UopsSymbol *attr_st; + JitOptSymbol *attr_st; attr_st = sym_new_not_null(ctx); stack_pointer[-3] = attr_st; stack_pointer += -2; @@ -1100,8 +1119,8 @@ } case _LOAD_SUPER_ATTR_METHOD: { - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null; + JitOptSymbol *attr; + JitOptSymbol *self_or_null; attr = sym_new_not_null(ctx); self_or_null = sym_new_not_null(ctx); stack_pointer[-3] = attr; @@ -1112,9 +1131,9 @@ } case _LOAD_ATTR: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self_or_null = NULL; owner = stack_pointer[-1]; (void)owner; attr = sym_new_not_null(ctx); @@ -1127,7 +1146,7 @@ } case _GUARD_TYPE_VERSION: { - _Py_UopsSymbol *owner; + JitOptSymbol *owner; owner = stack_pointer[-1]; uint32_t type_version = (uint32_t)this_instr->operand0; assert(type_version); @@ -1161,9 +1180,9 @@ } case _LOAD_ATTR_INSTANCE_VALUE: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; owner = stack_pointer[-1]; uint16_t offset = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); @@ -1178,8 +1197,8 @@ } case _CHECK_ATTR_MODULE_PUSH_KEYS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *mod_keys; + JitOptSymbol *owner; + JitOptSymbol *mod_keys; owner = stack_pointer[-1]; uint32_t dict_version = (uint32_t)this_instr->operand0; (void)dict_version; @@ -1209,9 +1228,9 @@ } case _LOAD_ATTR_MODULE_FROM_KEYS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; owner = stack_pointer[-2]; uint16_t index = (uint16_t)this_instr->operand0; (void)index; @@ -1250,8 +1269,8 @@ } case _CHECK_ATTR_WITH_HINT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *dict; + JitOptSymbol *owner; + JitOptSymbol *dict; owner = stack_pointer[-1]; dict = sym_new_not_null(ctx); (void)owner; @@ -1262,10 +1281,10 @@ } case _LOAD_ATTR_WITH_HINT: { - _Py_UopsSymbol *dict; - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *dict; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; dict = stack_pointer[-1]; owner = stack_pointer[-2]; uint16_t hint = (uint16_t)this_instr->operand0; @@ -1282,9 +1301,9 @@ } case _LOAD_ATTR_SLOT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; owner = stack_pointer[-1]; uint16_t index = (uint16_t)this_instr->operand0; attr = sym_new_not_null(ctx); @@ -1303,9 +1322,9 @@ } case _LOAD_ATTR_CLASS: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; attr = sym_new_not_null(ctx); @@ -1320,7 +1339,7 @@ } case _LOAD_ATTR_PROPERTY_FRAME: { - _Py_UopsSymbol *owner; + JitOptSymbol *owner; _Py_UOpsAbstractFrame *new_frame; owner = stack_pointer[-1]; PyObject *fget = (PyObject *)this_instr->operand0; @@ -1328,7 +1347,7 @@ (void)owner; new_frame = NULL; ctx->done = true; - stack_pointer[-1] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-1] = (JitOptSymbol *)new_frame; break; } @@ -1357,9 +1376,9 @@ } case _COMPARE_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1381,9 +1400,9 @@ } case _COMPARE_OP_FLOAT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1396,9 +1415,9 @@ } case _COMPARE_OP_INT: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1411,9 +1430,9 @@ } case _COMPARE_OP_STR: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1426,9 +1445,9 @@ } case _IS_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1441,9 +1460,9 @@ } case _CONTAINS_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; (void)left; @@ -1456,7 +1475,7 @@ } case _CONTAINS_OP_SET: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-2] = b; stack_pointer += -1; @@ -1465,7 +1484,7 @@ } case _CONTAINS_OP_DICT: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-2] = b; stack_pointer += -1; @@ -1474,8 +1493,8 @@ } case _CHECK_EG_MATCH: { - _Py_UopsSymbol *rest; - _Py_UopsSymbol *match; + JitOptSymbol *rest; + JitOptSymbol *match; rest = sym_new_not_null(ctx); match = sym_new_not_null(ctx); stack_pointer[-2] = rest; @@ -1484,14 +1503,14 @@ } case _CHECK_EXC_MATCH: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-1] = b; break; } case _IMPORT_NAME: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -1500,7 +1519,7 @@ } case _IMPORT_FROM: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1513,14 +1532,14 @@ /* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */ case _IS_NONE: { - _Py_UopsSymbol *b; + JitOptSymbol *b; b = sym_new_not_null(ctx); stack_pointer[-1] = b; break; } case _GET_LEN: { - _Py_UopsSymbol *len; + JitOptSymbol *len; len = sym_new_not_null(ctx); stack_pointer[0] = len; stack_pointer += 1; @@ -1529,7 +1548,7 @@ } case _MATCH_CLASS: { - _Py_UopsSymbol *attrs; + JitOptSymbol *attrs; attrs = sym_new_not_null(ctx); stack_pointer[-3] = attrs; stack_pointer += -2; @@ -1538,7 +1557,7 @@ } case _MATCH_MAPPING: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1547,7 +1566,7 @@ } case _MATCH_SEQUENCE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1556,7 +1575,7 @@ } case _MATCH_KEYS: { - _Py_UopsSymbol *values_or_none; + JitOptSymbol *values_or_none; values_or_none = sym_new_not_null(ctx); stack_pointer[0] = values_or_none; stack_pointer += 1; @@ -1565,14 +1584,14 @@ } case _GET_ITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; } case _GET_YIELD_FROM_ITER: { - _Py_UopsSymbol *iter; + JitOptSymbol *iter; iter = sym_new_not_null(ctx); stack_pointer[-1] = iter; break; @@ -1581,7 +1600,7 @@ /* _FOR_ITER is not a viable micro-op for tier 2 */ case _FOR_ITER_TIER_TWO: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1602,7 +1621,7 @@ } case _ITER_NEXT_LIST: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1621,7 +1640,7 @@ } case _ITER_NEXT_TUPLE: { - _Py_UopsSymbol *next; + JitOptSymbol *next; next = sym_new_not_null(ctx); stack_pointer[0] = next; stack_pointer += 1; @@ -1640,8 +1659,8 @@ } case _ITER_NEXT_RANGE: { - _Py_UopsSymbol *iter; - _Py_UopsSymbol *next; + JitOptSymbol *iter; + JitOptSymbol *next; iter = stack_pointer[-1]; next = sym_new_type(ctx, &PyLong_Type); (void)iter; @@ -1658,9 +1677,9 @@ } case _LOAD_SPECIAL: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self_or_null; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self_or_null; owner = stack_pointer[-1]; (void)owner; attr = sym_new_not_null(ctx); @@ -1673,7 +1692,7 @@ } case _WITH_EXCEPT_START: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[0] = res; stack_pointer += 1; @@ -1682,8 +1701,8 @@ } case _PUSH_EXC_INFO: { - _Py_UopsSymbol *prev_exc; - _Py_UopsSymbol *new_exc; + JitOptSymbol *prev_exc; + JitOptSymbol *new_exc; prev_exc = sym_new_not_null(ctx); new_exc = sym_new_not_null(ctx); stack_pointer[-1] = prev_exc; @@ -1702,9 +1721,9 @@ } case _LOAD_ATTR_METHOD_WITH_VALUES: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self = NULL; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1718,9 +1737,9 @@ } case _LOAD_ATTR_METHOD_NO_DICT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self = NULL; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1734,14 +1753,14 @@ } case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: { - _Py_UopsSymbol *attr; + JitOptSymbol *attr; attr = sym_new_not_null(ctx); stack_pointer[-1] = attr; break; } case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: { - _Py_UopsSymbol *attr; + JitOptSymbol *attr; attr = sym_new_not_null(ctx); stack_pointer[-1] = attr; break; @@ -1752,9 +1771,9 @@ } case _LOAD_ATTR_METHOD_LAZY_DICT: { - _Py_UopsSymbol *owner; - _Py_UopsSymbol *attr; - _Py_UopsSymbol *self = NULL; + JitOptSymbol *owner; + JitOptSymbol *attr; + JitOptSymbol *self = NULL; owner = stack_pointer[-1]; PyObject *descr = (PyObject *)this_instr->operand0; (void)descr; @@ -1768,11 +1787,11 @@ } case _MAYBE_EXPAND_METHOD: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; - _Py_UopsSymbol *func; - _Py_UopsSymbol *maybe_self; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; + JitOptSymbol *func; + JitOptSymbol *maybe_self; args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1792,8 +1811,8 @@ /* _MONITOR_CALL is not a viable micro-op for tier 2 */ case _PY_FRAME_GENERAL: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -1809,15 +1828,15 @@ break; } new_frame = frame_new(ctx, co, 0, NULL, 0); - stack_pointer[0] = (_Py_UopsSymbol *)new_frame; + stack_pointer[0] = (JitOptSymbol *)new_frame; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; } case _CHECK_FUNCTION_VERSION: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; uint32_t func_version = (uint32_t)this_instr->operand0; @@ -1840,8 +1859,8 @@ } case _EXPAND_METHOD: { - _Py_UopsSymbol **method; - _Py_UopsSymbol **self; + JitOptSymbol **method; + JitOptSymbol **self; method = &stack_pointer[-2 - oparg]; self = &stack_pointer[-1 - oparg]; method[0] = sym_new_not_null(ctx); @@ -1854,7 +1873,7 @@ } case _CALL_NON_PY_GENERAL: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -1863,8 +1882,8 @@ } case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: { - _Py_UopsSymbol *null; - _Py_UopsSymbol *callable; + JitOptSymbol *null; + JitOptSymbol *callable; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; sym_set_null(null); @@ -1873,9 +1892,9 @@ } case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: { - _Py_UopsSymbol *callable; - _Py_UopsSymbol *func; - _Py_UopsSymbol *self; + JitOptSymbol *callable; + JitOptSymbol *func; + JitOptSymbol *self; callable = stack_pointer[-2 - oparg]; (void)callable; func = sym_new_not_null(ctx); @@ -1895,8 +1914,8 @@ } case _CHECK_FUNCTION_EXACT_ARGS: { - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; self_or_null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; assert(sym_matches_type(callable, &PyFunction_Type)); @@ -1920,9 +1939,9 @@ } case _INIT_CALL_PY_EXACT_ARGS: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; args = &stack_pointer[-oparg]; self_or_null = stack_pointer[-1 - oparg]; @@ -1950,7 +1969,7 @@ } else { new_frame = frame_new(ctx, co, 0, NULL, 0); } - stack_pointer[0] = (_Py_UopsSymbol *)new_frame; + stack_pointer[0] = (JitOptSymbol *)new_frame; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); break; @@ -1995,7 +2014,7 @@ } case _CALL_TYPE_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -2004,7 +2023,7 @@ } case _CALL_STR_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -2013,7 +2032,7 @@ } case _CALL_TUPLE_1: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3] = res; stack_pointer += -2; @@ -2022,11 +2041,11 @@ } case _CHECK_AND_ALLOCATE_OBJECT: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *null; - _Py_UopsSymbol *callable; - _Py_UopsSymbol *self; - _Py_UopsSymbol *init; + JitOptSymbol **args; + JitOptSymbol *null; + JitOptSymbol *callable; + JitOptSymbol *self; + JitOptSymbol *init; args = &stack_pointer[-oparg]; null = stack_pointer[-1 - oparg]; callable = stack_pointer[-2 - oparg]; @@ -2044,9 +2063,9 @@ } case _CREATE_INIT_FRAME: { - _Py_UopsSymbol **args; - _Py_UopsSymbol *init; - _Py_UopsSymbol *self; + JitOptSymbol **args; + JitOptSymbol *init; + JitOptSymbol *self; _Py_UOpsAbstractFrame *init_frame; args = &stack_pointer[-oparg]; init = stack_pointer[-1 - oparg]; @@ -2056,7 +2075,7 @@ (void)args; init_frame = NULL; ctx->done = true; - stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)init_frame; + stack_pointer[-2 - oparg] = (JitOptSymbol *)init_frame; stack_pointer += -1 - oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -2069,7 +2088,7 @@ } case _CALL_BUILTIN_CLASS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2078,7 +2097,7 @@ } case _CALL_BUILTIN_O: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2087,7 +2106,7 @@ } case _CALL_BUILTIN_FAST: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2096,7 +2115,7 @@ } case _CALL_BUILTIN_FAST_WITH_KEYWORDS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2105,7 +2124,7 @@ } case _CALL_LEN: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2114,7 +2133,7 @@ } case _CALL_ISINSTANCE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2129,7 +2148,7 @@ } case _CALL_METHOD_DESCRIPTOR_O: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2138,7 +2157,7 @@ } case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2147,7 +2166,7 @@ } case _CALL_METHOD_DESCRIPTOR_NOARGS: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2156,7 +2175,7 @@ } case _CALL_METHOD_DESCRIPTOR_FAST: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2 - oparg] = res; stack_pointer += -1 - oparg; @@ -2167,10 +2186,10 @@ /* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */ case _MAYBE_EXPAND_METHOD_KW: { - _Py_UopsSymbol **func; - _Py_UopsSymbol **maybe_self; - _Py_UopsSymbol **args; - _Py_UopsSymbol *kwnames_out; + JitOptSymbol **func; + JitOptSymbol **maybe_self; + JitOptSymbol **args; + JitOptSymbol *kwnames_out; func = &stack_pointer[-3 - oparg]; maybe_self = &stack_pointer[-2 - oparg]; args = &stack_pointer[-1 - oparg]; @@ -2187,10 +2206,10 @@ /* _DO_CALL_KW is not a viable micro-op for tier 2 */ case _PY_FRAME_KW: { - _Py_UopsSymbol *kwnames; - _Py_UopsSymbol **args; - _Py_UopsSymbol *self_or_null; - _Py_UopsSymbol *callable; + JitOptSymbol *kwnames; + JitOptSymbol **args; + JitOptSymbol *self_or_null; + JitOptSymbol *callable; _Py_UOpsAbstractFrame *new_frame; kwnames = stack_pointer[-1]; args = &stack_pointer[-1 - oparg]; @@ -2202,7 +2221,7 @@ (void)kwnames; new_frame = NULL; ctx->done = true; - stack_pointer[-3 - oparg] = (_Py_UopsSymbol *)new_frame; + stack_pointer[-3 - oparg] = (JitOptSymbol *)new_frame; stack_pointer += -2 - oparg; assert(WITHIN_STACK_BOUNDS()); break; @@ -2217,8 +2236,8 @@ } case _EXPAND_METHOD_KW: { - _Py_UopsSymbol **method; - _Py_UopsSymbol **self; + JitOptSymbol **method; + JitOptSymbol **self; method = &stack_pointer[-3 - oparg]; self = &stack_pointer[-2 - oparg]; method[0] = sym_new_not_null(ctx); @@ -2231,7 +2250,7 @@ } case _CALL_KW_NON_PY: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-3 - oparg] = res; stack_pointer += -2 - oparg; @@ -2242,8 +2261,8 @@ /* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ case _MAKE_CALLARGS_A_TUPLE: { - _Py_UopsSymbol *tuple; - _Py_UopsSymbol *kwargs_out = NULL; + JitOptSymbol *tuple; + JitOptSymbol *kwargs_out = NULL; tuple = sym_new_not_null(ctx); kwargs_out = sym_new_not_null(ctx); stack_pointer[-1 - (oparg & 1)] = tuple; @@ -2254,14 +2273,14 @@ /* _DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */ case _MAKE_FUNCTION: { - _Py_UopsSymbol *func; + JitOptSymbol *func; func = sym_new_not_null(ctx); stack_pointer[-1] = func; break; } case _SET_FUNCTION_ATTRIBUTE: { - _Py_UopsSymbol *func_out; + JitOptSymbol *func_out; func_out = sym_new_not_null(ctx); stack_pointer[-2] = func_out; stack_pointer += -1; @@ -2270,7 +2289,7 @@ } case _RETURN_GENERATOR: { - _Py_UopsSymbol *res; + JitOptSymbol *res; ctx->frame->stack_pointer = stack_pointer; frame_pop(ctx); stack_pointer = ctx->frame->stack_pointer; @@ -2294,7 +2313,7 @@ } case _BUILD_SLICE: { - _Py_UopsSymbol *slice; + JitOptSymbol *slice; slice = sym_new_not_null(ctx); stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice; stack_pointer += -1 - ((oparg == 3) ? 1 : 0); @@ -2303,21 +2322,21 @@ } case _CONVERT_VALUE: { - _Py_UopsSymbol *result; + JitOptSymbol *result; result = sym_new_not_null(ctx); stack_pointer[-1] = result; break; } case _FORMAT_SIMPLE: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-1] = res; break; } case _FORMAT_WITH_SPEC: { - _Py_UopsSymbol *res; + JitOptSymbol *res; res = sym_new_not_null(ctx); stack_pointer[-2] = res; stack_pointer += -1; @@ -2326,8 +2345,8 @@ } case _COPY: { - _Py_UopsSymbol *bottom; - _Py_UopsSymbol *top; + JitOptSymbol *bottom; + JitOptSymbol *top; bottom = stack_pointer[-1 - (oparg-1)]; assert(oparg > 0); top = bottom; @@ -2338,9 +2357,9 @@ } case _BINARY_OP: { - _Py_UopsSymbol *right; - _Py_UopsSymbol *left; - _Py_UopsSymbol *res; + JitOptSymbol *right; + JitOptSymbol *left; + JitOptSymbol *res; right = stack_pointer[-1]; left = stack_pointer[-2]; bool lhs_int = sym_matches_type(left, &PyLong_Type); @@ -2413,10 +2432,10 @@ } case _SWAP: { - _Py_UopsSymbol *top_in; - _Py_UopsSymbol *bottom_in; - _Py_UopsSymbol *top_out; - _Py_UopsSymbol *bottom_out; + JitOptSymbol *top_in; + JitOptSymbol *bottom_in; + JitOptSymbol *top_out; + JitOptSymbol *bottom_out; top_in = stack_pointer[-1]; bottom_in = stack_pointer[-2 - (oparg-2)]; bottom_out = bottom_in; @@ -2445,7 +2464,7 @@ /* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */ case _GUARD_IS_TRUE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2462,7 +2481,7 @@ } case _GUARD_IS_FALSE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2479,7 +2498,7 @@ } case _GUARD_IS_NONE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2504,7 +2523,7 @@ } case _GUARD_IS_NOT_NONE_POP: { - _Py_UopsSymbol *flag; + JitOptSymbol *flag; flag = stack_pointer[-1]; if (sym_is_const(flag)) { PyObject *value = sym_get_const(flag); @@ -2562,7 +2581,7 @@ } case _LOAD_CONST_INLINE: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; @@ -2572,7 +2591,7 @@ } case _LOAD_CONST_INLINE_BORROW: { - _Py_UopsSymbol *value; + JitOptSymbol *value; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); stack_pointer[0] = value; @@ -2582,15 +2601,15 @@ } case _POP_TOP_LOAD_CONST_INLINE_BORROW: { - _Py_UopsSymbol *value; + JitOptSymbol *value; value = sym_new_not_null(ctx); stack_pointer[-1] = value; break; } case _LOAD_CONST_INLINE_WITH_NULL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *null; + JitOptSymbol *value; + JitOptSymbol *null; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); null = sym_new_null(ctx); @@ -2602,8 +2621,8 @@ } case _LOAD_CONST_INLINE_BORROW_WITH_NULL: { - _Py_UopsSymbol *value; - _Py_UopsSymbol *null; + JitOptSymbol *value; + JitOptSymbol *null; PyObject *ptr = (PyObject *)this_instr->operand0; value = sym_new_const(ctx, ptr); null = sym_new_null(ctx); @@ -2619,8 +2638,8 @@ } case _LOAD_GLOBAL_MODULE: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; + JitOptSymbol *null = NULL; res = sym_new_not_null(ctx); null = sym_new_null(ctx); stack_pointer[0] = res; @@ -2631,8 +2650,8 @@ } case _LOAD_GLOBAL_BUILTINS: { - _Py_UopsSymbol *res; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *res; + JitOptSymbol *null = NULL; res = sym_new_not_null(ctx); null = sym_new_null(ctx); stack_pointer[0] = res; @@ -2643,8 +2662,8 @@ } case _LOAD_ATTR_MODULE: { - _Py_UopsSymbol *attr; - _Py_UopsSymbol *null = NULL; + JitOptSymbol *attr; + JitOptSymbol *null = NULL; attr = sym_new_not_null(ctx); null = sym_new_null(ctx); stack_pointer[-1] = attr; @@ -2654,12 +2673,6 @@ break; } - case _INTERNAL_INCREMENT_OPT_COUNTER: { - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - break; - } - case _DYNAMIC_EXIT: { break; } diff --git a/Python/optimizer_symbols.c b/Python/optimizer_symbols.c index 40cbf95e3d6d39..dcde8e7ce81577 100644 --- a/Python/optimizer_symbols.c +++ b/Python/optimizer_symbols.c @@ -28,11 +28,6 @@ - Bottom: IS_NULL and NOT_NULL flags set, type and const_val NULL. */ -// Flags for below. -#define IS_NULL 1 << 0 -#define NOT_NULL 1 << 1 -#define NO_SPACE 1 << 2 - #ifdef Py_DEBUG static inline int get_lltrace(void) { char *uop_debug = Py_GETENV("PYTHON_OPT_DEBUG"); @@ -48,187 +43,254 @@ static inline int get_lltrace(void) { #define DPRINTF(level, ...) #endif -static _Py_UopsSymbol NO_SPACE_SYMBOL = { - .flags = IS_NULL | NOT_NULL | NO_SPACE, - .typ = NULL, - .const_val = NULL, - .type_version = 0, + +static JitOptSymbol NO_SPACE_SYMBOL = { + .tag = JIT_SYM_BOTTOM_TAG }; -_Py_UopsSymbol * -out_of_space(_Py_UOpsContext *ctx) +JitOptSymbol * +out_of_space(JitOptContext *ctx) { ctx->done = true; ctx->out_of_space = true; return &NO_SPACE_SYMBOL; } -static _Py_UopsSymbol * -sym_new(_Py_UOpsContext *ctx) +static JitOptSymbol * +sym_new(JitOptContext *ctx) { - _Py_UopsSymbol *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number]; + JitOptSymbol *self = &ctx->t_arena.arena[ctx->t_arena.ty_curr_number]; if (ctx->t_arena.ty_curr_number >= ctx->t_arena.ty_max_number) { OPT_STAT_INC(optimizer_failure_reason_no_memory); DPRINTF(1, "out of space for symbolic expression type\n"); return NULL; } ctx->t_arena.ty_curr_number++; - self->flags = 0; - self->typ = NULL; - self->const_val = NULL; - self->type_version = 0; - + self->tag = JIT_SYM_UNKNOWN_TAG; return self; } static inline void -sym_set_flag(_Py_UopsSymbol *sym, int flag) -{ - sym->flags |= flag; -} - -static inline void -sym_set_bottom(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +sym_set_bottom(JitOptContext *ctx, JitOptSymbol *sym) { - sym_set_flag(sym, IS_NULL | NOT_NULL); - sym->typ = NULL; - Py_CLEAR(sym->const_val); + sym->tag = JIT_SYM_BOTTOM_TAG; ctx->done = true; ctx->contradiction = true; } bool -_Py_uop_sym_is_bottom(_Py_UopsSymbol *sym) +_Py_uop_sym_is_bottom(JitOptSymbol *sym) { - if ((sym->flags & IS_NULL) && (sym->flags & NOT_NULL)) { - assert(sym->flags == (IS_NULL | NOT_NULL)); - assert(sym->typ == NULL); - assert(sym->const_val == NULL); - return true; - } - return false; + return sym->tag == JIT_SYM_BOTTOM_TAG; } bool -_Py_uop_sym_is_not_null(_Py_UopsSymbol *sym) -{ - return sym->flags == NOT_NULL; +_Py_uop_sym_is_not_null(JitOptSymbol *sym) { + return sym->tag == JIT_SYM_NON_NULL_TAG || sym->tag > JIT_SYM_BOTTOM_TAG; } bool -_Py_uop_sym_is_null(_Py_UopsSymbol *sym) +_Py_uop_sym_is_const(JitOptSymbol *sym) { - return sym->flags == IS_NULL; + return sym->tag == JIT_SYM_KNOWN_VALUE_TAG; } bool -_Py_uop_sym_is_const(_Py_UopsSymbol *sym) +_Py_uop_sym_is_null(JitOptSymbol *sym) { - return sym->const_val != NULL; + return sym->tag == JIT_SYM_NULL_TAG; } + PyObject * -_Py_uop_sym_get_const(_Py_UopsSymbol *sym) +_Py_uop_sym_get_const(JitOptSymbol *sym) { - return sym->const_val; + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + return sym->value.value; + } + return NULL; } void -_Py_uop_sym_set_type(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyTypeObject *typ) +_Py_uop_sym_set_type(JitOptContext *ctx, JitOptSymbol *sym, PyTypeObject *typ) { - assert(typ != NULL && PyType_Check(typ)); - if (sym->flags & IS_NULL) { - sym_set_bottom(ctx, sym); - return; - } - if (sym->typ != NULL) { - if (sym->typ != typ) { + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: sym_set_bottom(ctx, sym); return; - } - } - else { - sym_set_flag(sym, NOT_NULL); - sym->typ = typ; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type != typ) { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version == typ->tp_version_tag) { + sym->tag = JIT_SYM_KNOWN_CLASS_TAG; + sym->cls.type = typ; + sym->cls.version = typ->tp_version_tag; + } + else { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_KNOWN_VALUE_TAG: + if (Py_TYPE(sym->value.value) != typ) { + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TUPLE_TAG: + if (typ != &PyTuple_Type) { + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_BOTTOM_TAG: + return; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + sym->tag = JIT_SYM_KNOWN_CLASS_TAG; + sym->cls.version = 0; + sym->cls.type = typ; + return; } } bool -_Py_uop_sym_set_type_version(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, unsigned int version) +_Py_uop_sym_set_type_version(JitOptContext *ctx, JitOptSymbol *sym, unsigned int version) { - // if the type version was already set, then it must be different and we should set it to bottom - if (sym->type_version) { - sym_set_bottom(ctx, sym); - return false; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type->tp_version_tag != version) { + sym_set_bottom(ctx, sym); + return false; + } + else { + sym->cls.version = version; + return true; + } + case JIT_SYM_KNOWN_VALUE_TAG: + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_TUPLE_TAG: + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version == version) { + return true; + } + sym_set_bottom(ctx, sym); + return false; + case JIT_SYM_BOTTOM_TAG: + return false; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + sym->tag = JIT_SYM_TYPE_VERSION_TAG; + sym->version.version = version; + return true; } - sym->type_version = version; - return true; + Py_UNREACHABLE(); +} + +static void make_const(JitOptSymbol *sym, PyObject *val) +{ + sym->tag = JIT_SYM_KNOWN_VALUE_TAG; + sym->value.value = Py_NewRef(val); } void -_Py_uop_sym_set_const(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym, PyObject *const_val) +_Py_uop_sym_set_const(JitOptContext *ctx, JitOptSymbol *sym, PyObject *const_val) { - assert(const_val != NULL); - if (sym->flags & IS_NULL) { - sym_set_bottom(ctx, sym); - } - PyTypeObject *typ = Py_TYPE(const_val); - if (sym->typ != NULL && sym->typ != typ) { - sym_set_bottom(ctx, sym); - } - if (sym->const_val != NULL) { - if (sym->const_val != const_val) { - // TODO: What if they're equal? + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: sym_set_bottom(ctx, sym); - } - } - else { - sym_set_flag(sym, NOT_NULL); - sym->typ = typ; - sym->const_val = Py_NewRef(const_val); + return; + case JIT_SYM_KNOWN_CLASS_TAG: + if (sym->cls.type != Py_TYPE(const_val)) { + sym_set_bottom(ctx, sym); + return; + } + make_const(sym, const_val); + return; + case JIT_SYM_KNOWN_VALUE_TAG: + if (sym->value.value != const_val) { + Py_CLEAR(sym->value.value); + sym_set_bottom(ctx, sym); + } + return; + case JIT_SYM_TUPLE_TAG: + sym_set_bottom(ctx, sym); + return; + case JIT_SYM_TYPE_VERSION_TAG: + if (sym->version.version != Py_TYPE(const_val)->tp_version_tag) { + sym_set_bottom(ctx, sym); + return; + } + make_const(sym, const_val); + return; + case JIT_SYM_BOTTOM_TAG: + return; + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + make_const(sym, const_val); + return; } } void -_Py_uop_sym_set_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +_Py_uop_sym_set_null(JitOptContext *ctx, JitOptSymbol *sym) { - if (_Py_uop_sym_is_not_null(sym)) { + if (sym->tag == JIT_SYM_UNKNOWN_TAG) { + sym->tag = JIT_SYM_NULL_TAG; + } + else if (sym->tag > JIT_SYM_NULL_TAG) { sym_set_bottom(ctx, sym); } - sym_set_flag(sym, IS_NULL); } void -_Py_uop_sym_set_non_null(_Py_UOpsContext *ctx, _Py_UopsSymbol *sym) +_Py_uop_sym_set_non_null(JitOptContext *ctx, JitOptSymbol *sym) { - if (_Py_uop_sym_is_null(sym)) { + if (sym->tag == JIT_SYM_UNKNOWN_TAG) { + sym->tag = JIT_SYM_NON_NULL_TAG; + } + else if (sym->tag == JIT_SYM_NULL_TAG) { sym_set_bottom(ctx, sym); } - sym_set_flag(sym, NOT_NULL); } -_Py_UopsSymbol * -_Py_uop_sym_new_unknown(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_unknown(JitOptContext *ctx) { - return sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); + if (res == NULL) { + return out_of_space(ctx); + } + return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_not_null(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_not_null(JitOptContext *ctx) { - _Py_UopsSymbol *res = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } - sym_set_flag(res, NOT_NULL); + res->tag = JIT_SYM_NON_NULL_TAG; return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_type(_Py_UOpsContext *ctx, PyTypeObject *typ) +JitOptSymbol * +_Py_uop_sym_new_type(JitOptContext *ctx, PyTypeObject *typ) { - _Py_UopsSymbol *res = sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } @@ -237,11 +299,11 @@ _Py_uop_sym_new_type(_Py_UOpsContext *ctx, PyTypeObject *typ) } // Adds a new reference to const_val, owned by the symbol. -_Py_UopsSymbol * -_Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val) +JitOptSymbol * +_Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val) { assert(const_val != NULL); - _Py_UopsSymbol *res = sym_new(ctx); + JitOptSymbol *res = sym_new(ctx); if (res == NULL) { return out_of_space(ctx); } @@ -249,10 +311,10 @@ _Py_uop_sym_new_const(_Py_UOpsContext *ctx, PyObject *const_val) return res; } -_Py_UopsSymbol * -_Py_uop_sym_new_null(_Py_UOpsContext *ctx) +JitOptSymbol * +_Py_uop_sym_new_null(JitOptContext *ctx) { - _Py_UopsSymbol *null_sym = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *null_sym = sym_new(ctx); if (null_sym == NULL) { return out_of_space(ctx); } @@ -261,64 +323,105 @@ _Py_uop_sym_new_null(_Py_UOpsContext *ctx) } PyTypeObject * -_Py_uop_sym_get_type(_Py_UopsSymbol *sym) +_Py_uop_sym_get_type(JitOptSymbol *sym) { - if (_Py_uop_sym_is_bottom(sym)) { - return NULL; - } - return sym->typ; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return NULL; + case JIT_SYM_KNOWN_CLASS_TAG: + return sym->cls.type; + case JIT_SYM_KNOWN_VALUE_TAG: + return Py_TYPE(sym->value.value); + case JIT_SYM_TUPLE_TAG: + return &PyTuple_Type; + } + Py_UNREACHABLE(); } unsigned int -_Py_uop_sym_get_type_version(_Py_UopsSymbol *sym) +_Py_uop_sym_get_type_version(JitOptSymbol *sym) { - return sym->type_version; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return 0; + case JIT_SYM_TYPE_VERSION_TAG: + return sym->version.version; + case JIT_SYM_KNOWN_CLASS_TAG: + return sym->cls.version; + case JIT_SYM_KNOWN_VALUE_TAG: + return Py_TYPE(sym->value.value)->tp_version_tag; + case JIT_SYM_TUPLE_TAG: + return PyTuple_Type.tp_version_tag; + } + Py_UNREACHABLE(); } bool -_Py_uop_sym_has_type(_Py_UopsSymbol *sym) +_Py_uop_sym_has_type(JitOptSymbol *sym) { - if (_Py_uop_sym_is_bottom(sym)) { - return false; - } - return sym->typ != NULL; + JitSymType tag = sym->tag; + switch(tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return false; + case JIT_SYM_KNOWN_CLASS_TAG: + case JIT_SYM_KNOWN_VALUE_TAG: + case JIT_SYM_TUPLE_TAG: + return true; + } + Py_UNREACHABLE(); } bool -_Py_uop_sym_matches_type(_Py_UopsSymbol *sym, PyTypeObject *typ) +_Py_uop_sym_matches_type(JitOptSymbol *sym, PyTypeObject *typ) { assert(typ != NULL && PyType_Check(typ)); return _Py_uop_sym_get_type(sym) == typ; } bool -_Py_uop_sym_matches_type_version(_Py_UopsSymbol *sym, unsigned int version) +_Py_uop_sym_matches_type_version(JitOptSymbol *sym, unsigned int version) { return _Py_uop_sym_get_type_version(sym) == version; } - int -_Py_uop_sym_truthiness(_Py_UopsSymbol *sym) -{ - /* There are some non-constant values for - * which `bool(val)` always evaluates to - * True or False, such as tuples with known - * length, but unknown contents, or bound-methods. - * This function will need updating - * should we support those values. - */ - if (_Py_uop_sym_is_bottom(sym)) { - return -1; - } - if (!_Py_uop_sym_is_const(sym)) { - return -1; - } - PyObject *value = _Py_uop_sym_get_const(sym); +_Py_uop_sym_truthiness(JitOptSymbol *sym) +{ + switch(sym->tag) { + case JIT_SYM_NULL_TAG: + case JIT_SYM_TYPE_VERSION_TAG: + case JIT_SYM_BOTTOM_TAG: + case JIT_SYM_NON_NULL_TAG: + case JIT_SYM_UNKNOWN_TAG: + return -1; + case JIT_SYM_KNOWN_CLASS_TAG: + /* TODO : + * Instances of some classes are always + * true. We should return 1 in those cases */ + return -1; + case JIT_SYM_KNOWN_VALUE_TAG: + break; + case JIT_SYM_TUPLE_TAG: + return sym->tuple.length != 0; + } + PyObject *value = sym->value.value; + /* Only handle a few known safe types */ if (value == Py_None) { return 0; } - /* Only handle a few known safe types */ PyTypeObject *tp = Py_TYPE(value); if (tp == &PyLong_Type) { return !_PyLong_IsZero((PyLongObject *)value); @@ -332,13 +435,84 @@ _Py_uop_sym_truthiness(_Py_UopsSymbol *sym) return -1; } +static JitOptSymbol * +allocation_base(JitOptContext *ctx) +{ + return ctx->t_arena.arena; +} + +JitOptSymbol * +_Py_uop_sym_new_tuple(JitOptContext *ctx, int size, JitOptSymbol **args) +{ + JitOptSymbol *res = sym_new(ctx); + if (res == NULL) { + return out_of_space(ctx); + } + if (size > MAX_SYMBOLIC_TUPLE_SIZE) { + res->tag = JIT_SYM_KNOWN_CLASS_TAG; + res->cls.type = &PyTuple_Type; + } + else { + res->tag = JIT_SYM_TUPLE_TAG; + res->tuple.length = size; + for (int i = 0; i < size; i++) { + res->tuple.items[i] = (uint16_t)(args[i] - allocation_base(ctx)); + } + } + return res; +} + +JitOptSymbol * +_Py_uop_sym_tuple_getitem(JitOptContext *ctx, JitOptSymbol *sym, int item) +{ + assert(item >= 0); + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + PyObject *tuple = sym->value.value; + if (PyTuple_CheckExact(tuple) && item < PyTuple_GET_SIZE(tuple)) { + return _Py_uop_sym_new_const(ctx, PyTuple_GET_ITEM(tuple, item)); + } + } + else if (sym->tag == JIT_SYM_TUPLE_TAG && item < sym->tuple.length) { + return allocation_base(ctx) + sym->tuple.items[item]; + } + return _Py_uop_sym_new_unknown(ctx); +} + +int +_Py_uop_sym_tuple_length(JitOptSymbol *sym) +{ + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + PyObject *tuple = sym->value.value; + if (PyTuple_CheckExact(tuple)) { + return PyTuple_GET_SIZE(tuple); + } + } + else if (sym->tag == JIT_SYM_TUPLE_TAG) { + return sym->tuple.length; + } + return -1; +} + +// Return true if known to be immortal. +bool +_Py_uop_sym_is_immortal(JitOptSymbol *sym) +{ + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + return _Py_IsImmortal(sym->value.value); + } + if (sym->tag == JIT_SYM_KNOWN_CLASS_TAG) { + return sym->cls.type == &PyBool_Type; + } + return false; +} + // 0 on success, -1 on error. _Py_UOpsAbstractFrame * _Py_uop_frame_new( - _Py_UOpsContext *ctx, + JitOptContext *ctx, PyCodeObject *co, int curr_stackentries, - _Py_UopsSymbol **args, + JitOptSymbol **args, int arg_len) { assert(ctx->curr_frame_depth < MAX_ABSTRACT_FRAME_DEPTH); @@ -363,14 +537,14 @@ _Py_uop_frame_new( } for (int i = arg_len; i < co->co_nlocalsplus; i++) { - _Py_UopsSymbol *local = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *local = _Py_uop_sym_new_unknown(ctx); frame->locals[i] = local; } // Initialize the stack as well for (int i = 0; i < curr_stackentries; i++) { - _Py_UopsSymbol *stackvar = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *stackvar = _Py_uop_sym_new_unknown(ctx); frame->stack[i] = stackvar; } @@ -378,7 +552,7 @@ _Py_uop_frame_new( } void -_Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx) +_Py_uop_abstractcontext_fini(JitOptContext *ctx) { if (ctx == NULL) { return; @@ -386,13 +560,17 @@ _Py_uop_abstractcontext_fini(_Py_UOpsContext *ctx) ctx->curr_frame_depth = 0; int tys = ctx->t_arena.ty_curr_number; for (int i = 0; i < tys; i++) { - Py_CLEAR(ctx->t_arena.arena[i].const_val); + JitOptSymbol *sym = &ctx->t_arena.arena[i]; + if (sym->tag == JIT_SYM_KNOWN_VALUE_TAG) { + Py_CLEAR(sym->value.value); + } } } void -_Py_uop_abstractcontext_init(_Py_UOpsContext *ctx) +_Py_uop_abstractcontext_init(JitOptContext *ctx) { + static_assert(sizeof(JitOptSymbol) <= 2*sizeof(uint64_t)); ctx->limit = ctx->locals_and_stack + MAX_ABSTRACT_INTERP_SIZE; ctx->n_consumed = ctx->locals_and_stack; #ifdef Py_DEBUG // Aids debugging a little. There should never be NULL in the abstract interpreter. @@ -410,7 +588,7 @@ _Py_uop_abstractcontext_init(_Py_UOpsContext *ctx) } int -_Py_uop_frame_pop(_Py_UOpsContext *ctx) +_Py_uop_frame_pop(JitOptContext *ctx) { _Py_UOpsAbstractFrame *frame = ctx->frame; ctx->n_consumed = frame->locals; @@ -431,26 +609,25 @@ do { \ } \ } while (0) -static _Py_UopsSymbol * -make_bottom(_Py_UOpsContext *ctx) +static JitOptSymbol * +make_bottom(JitOptContext *ctx) { - _Py_UopsSymbol *sym = _Py_uop_sym_new_unknown(ctx); - _Py_uop_sym_set_null(ctx, sym); - _Py_uop_sym_set_non_null(ctx, sym); + JitOptSymbol *sym = sym_new(ctx); + sym->tag = JIT_SYM_BOTTOM_TAG; return sym; } PyObject * _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) { - _Py_UOpsContext context; - _Py_UOpsContext *ctx = &context; + JitOptContext context; + JitOptContext *ctx = &context; _Py_uop_abstractcontext_init(ctx); PyObject *val_42 = NULL; PyObject *val_43 = NULL; // Use a single 'sym' variable so copy-pasting tests is easier. - _Py_UopsSymbol *sym = _Py_uop_sym_new_unknown(ctx); + JitOptSymbol *sym = _Py_uop_sym_new_unknown(ctx); if (sym == NULL) { goto fail; } @@ -510,6 +687,7 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) TEST_PREDICATE(_Py_uop_sym_is_const(sym), "42 is not a constant"); TEST_PREDICATE(_Py_uop_sym_get_const(sym) != NULL, "42 as constant is NULL"); TEST_PREDICATE(_Py_uop_sym_get_const(sym) == val_42, "42 as constant isn't 42"); + TEST_PREDICATE(_Py_uop_sym_is_immortal(sym), "42 is not immortal"); _Py_uop_sym_set_type(ctx, sym, &PyLong_Type); // Should be a no-op TEST_PREDICATE(_Py_uop_sym_matches_type(sym, &PyLong_Type), "(42 and 42) isn't an int"); @@ -518,6 +696,9 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) _Py_uop_sym_set_type(ctx, sym, &PyFloat_Type); // Should make it bottom TEST_PREDICATE(_Py_uop_sym_is_bottom(sym), "(42 and float) isn't bottom"); + sym = _Py_uop_sym_new_type(ctx, &PyBool_Type); + TEST_PREDICATE(_Py_uop_sym_is_immortal(sym), "a bool is not immortal"); + sym = _Py_uop_sym_new_type(ctx, &PyLong_Type); if (sym == NULL) { goto fail; @@ -534,15 +715,37 @@ _Py_uop_symbols_test(PyObject *Py_UNUSED(self), PyObject *Py_UNUSED(ignored)) sym = _Py_uop_sym_new_const(ctx, PyLong_FromLong(0)); TEST_PREDICATE(_Py_uop_sym_truthiness(sym) == 0, "bool(0) is not False"); + JitOptSymbol *i1 = _Py_uop_sym_new_type(ctx, &PyFloat_Type); + JitOptSymbol *i2 = _Py_uop_sym_new_const(ctx, val_43); + JitOptSymbol *array[2] = { i1, i2 }; + sym = _Py_uop_sym_new_tuple(ctx, 2, array); + TEST_PREDICATE( + _Py_uop_sym_matches_type(_Py_uop_sym_tuple_getitem(ctx, sym, 0), &PyFloat_Type), + "tuple item does not match value used to create tuple" + ); + TEST_PREDICATE( + _Py_uop_sym_get_const(_Py_uop_sym_tuple_getitem(ctx, sym, 1)) == val_43, + "tuple item does not match value used to create tuple" + ); + PyObject *pair[2] = { val_42, val_43 }; + PyObject *tuple = _PyTuple_FromArray(pair, 2); + sym = _Py_uop_sym_new_const(ctx, tuple); + TEST_PREDICATE( + _Py_uop_sym_get_const(_Py_uop_sym_tuple_getitem(ctx, sym, 1)) == val_43, + "tuple item does not match value used to create tuple" + ); + _Py_uop_abstractcontext_fini(ctx); Py_DECREF(val_42); Py_DECREF(val_43); + Py_DECREF(tuple); Py_RETURN_NONE; fail: _Py_uop_abstractcontext_fini(ctx); Py_XDECREF(val_42); Py_XDECREF(val_43); + Py_DECREF(tuple); return NULL; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 9a8a92aaceb0be..f357ddfbcfb033 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -111,23 +111,7 @@ static void call_ll_exitfuncs(_PyRuntimeState *runtime); _Py_COMP_DIAG_PUSH _Py_COMP_DIAG_IGNORE_DEPR_DECLS -#if defined(MS_WINDOWS) - -#pragma section("PyRuntime", read, write) -__declspec(allocate("PyRuntime")) - -#elif defined(__APPLE__) - -__attribute__(( - section(SEG_DATA ",PyRuntime") -)) - -#endif - -_PyRuntimeState _PyRuntime -#if defined(__linux__) && (defined(__GNUC__) || defined(__clang__)) -__attribute__ ((section (".PyRuntime"))) -#endif +GENERATE_DEBUG_SECTION(PyRuntime, _PyRuntimeState _PyRuntime) = _PyRuntimeState_INIT(_PyRuntime, _Py_Debug_Cookie); _Py_COMP_DIAG_POP @@ -444,40 +428,6 @@ interpreter_update_config(PyThreadState *tstate, int only_update_path_config) } -int -_PyInterpreterState_SetConfig(const PyConfig *src_config) -{ - PyThreadState *tstate = _PyThreadState_GET(); - int res = -1; - - PyConfig config; - PyConfig_InitPythonConfig(&config); - PyStatus status = _PyConfig_Copy(&config, src_config); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - status = _PyConfig_Read(&config, 1); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - status = _PyConfig_Copy(&tstate->interp->config, &config); - if (_PyStatus_EXCEPTION(status)) { - _PyErr_SetFromPyStatus(status); - goto done; - } - - res = interpreter_update_config(tstate, 0); - -done: - PyConfig_Clear(&config); - return res; -} - - /* Global initializations. Can be undone by Py_Finalize(). Don't call this twice without an intervening Py_Finalize() call. @@ -707,7 +657,12 @@ pycore_create_interpreter(_PyRuntimeState *runtime, // the settings are loaded (so that feature_flags are set) but before // any calls are made to obmalloc functions. if (_PyMem_init_obmalloc(interp) < 0) { - return _PyStatus_NO_MEMORY(); + return _PyStatus_NO_MEMORY(); + } + + status = _PyTraceMalloc_Init(); + if (_PyStatus_EXCEPTION(status)) { + return status; } PyThreadState *tstate = _PyThreadState_New(interp, @@ -1500,18 +1455,6 @@ Py_Initialize(void) } -PyStatus -_Py_InitializeMain(void) -{ - PyStatus status = _PyRuntime_Initialize(); - if (_PyStatus_EXCEPTION(status)) { - return status; - } - PyThreadState *tstate = _PyThreadState_GET(); - return pyinit_main(tstate); -} - - static void finalize_modules_delete_special(PyThreadState *tstate, int verbose) { @@ -2223,6 +2166,7 @@ _Py_Finalize(_PyRuntimeState *runtime) finalize_interp_clear(tstate); + #ifdef Py_TRACE_REFS /* Display addresses (& refcnts) of all objects still alive. * An address can be used to find the repr of the object, printed diff --git a/Python/pystate.c b/Python/pystate.c index c546b7c3a9f10e..26047edb459480 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1515,6 +1515,7 @@ init_threadstate(_PyThreadStateImpl *_tstate, tstate->dict_global_version = 0; _tstate->asyncio_running_loop = NULL; + _tstate->asyncio_running_task = NULL; tstate->delete_later = NULL; @@ -1697,6 +1698,7 @@ PyThreadState_Clear(PyThreadState *tstate) Py_CLEAR(tstate->threading_local_sentinel); Py_CLEAR(((_PyThreadStateImpl *)tstate)->asyncio_running_loop); + Py_CLEAR(((_PyThreadStateImpl *)tstate)->asyncio_running_task); Py_CLEAR(tstate->dict); Py_CLEAR(tstate->async_exc); @@ -2880,24 +2882,9 @@ _PyInterpreterState_GetConfig(PyInterpreterState *interp) } -int -_PyInterpreterState_GetConfigCopy(PyConfig *config) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - - PyStatus status = _PyConfig_Copy(config, &interp->config); - if (PyStatus_Exception(status)) { - _PyErr_SetFromPyStatus(status); - return -1; - } - return 0; -} - - const PyConfig* _Py_GetConfig(void) { - assert(PyGILState_Check()); PyThreadState *tstate = current_fast_get(); _Py_EnsureTstateNotNULL(tstate); return _PyInterpreterState_GetConfig(tstate->interp); diff --git a/Python/pytime.c b/Python/pytime.c index 2b37cd991ef4e4..c039fc98ce4bde 100644 --- a/Python/pytime.c +++ b/Python/pytime.c @@ -1,5 +1,6 @@ #include "Python.h" #include "pycore_time.h" // PyTime_t +#include "pycore_pystate.h" // _Py_AssertHoldsTstate() #include // gmtime_r() #ifdef HAVE_SYS_TIME_H @@ -897,14 +898,14 @@ _PyTime_AsTimespec(PyTime_t t, struct timespec *ts) #endif -// N.B. If raise_exc=0, this may be called without the GIL. +// N.B. If raise_exc=0, this may be called without a thread state. static int py_get_system_clock(PyTime_t *tp, _Py_clock_info_t *info, int raise_exc) { assert(info == NULL || raise_exc); if (raise_exc) { - // raise_exc requires to hold the GIL - assert(PyGILState_Check()); + // raise_exc requires to hold a thread state + _Py_AssertHoldsTstate(); } #ifdef MS_WINDOWS @@ -1142,14 +1143,14 @@ py_mach_timebase_info(_PyTimeFraction *base, int raise_exc) #endif -// N.B. If raise_exc=0, this may be called without the GIL. +// N.B. If raise_exc=0, this may be called without a thread state. static int py_get_monotonic_clock(PyTime_t *tp, _Py_clock_info_t *info, int raise_exc) { assert(info == NULL || raise_exc); if (raise_exc) { - // raise_exc requires to hold the GIL - assert(PyGILState_Check()); + // raise_exc requires to hold a thread state + _Py_AssertHoldsTstate(); } #if defined(MS_WINDOWS) diff --git a/Python/specialize.c b/Python/specialize.c index 8d9f19c8895187..fa022346bdea6a 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -1174,7 +1174,7 @@ do_specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* assert(tp_version != 0); write_u32(lm_cache->type_version, tp_version); /* borrowed */ - write_obj(lm_cache->descr, fget); + write_ptr(lm_cache->descr, fget); specialize(instr, LOAD_ATTR_PROPERTY); return 0; } @@ -1254,7 +1254,7 @@ do_specialize_instance_load_attr(PyObject* owner, _Py_CODEUNIT* instr, PyObject* #endif write_u32(lm_cache->keys_version, version); /* borrowed */ - write_obj(lm_cache->descr, descr); + write_ptr(lm_cache->descr, descr); write_u32(lm_cache->type_version, tp_version); specialize(instr, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN); return 0; @@ -1534,7 +1534,7 @@ specialize_class_load_attr(PyObject *owner, _Py_CODEUNIT *instr, } #endif write_u32(cache->type_version, tp_version); - write_obj(cache->descr, descr); + write_ptr(cache->descr, descr); if (metaclass_check) { write_u32(cache->keys_version, meta_version); specialize(instr, LOAD_ATTR_CLASS_WITH_METACLASS_CHECK); @@ -1642,7 +1642,7 @@ specialize_attr_loadclassattr(PyObject *owner, _Py_CODEUNIT *instr, * working since Python 2.6 and it's battle-tested. */ write_u32(cache->type_version, tp_version); - write_obj(cache->descr, descr); + write_ptr(cache->descr, descr); return 1; } @@ -2412,6 +2412,110 @@ binary_op_fail_kind(int oparg, PyObject *lhs, PyObject *rhs) } #endif +/** Binary Op Specialization Extensions */ + +/* float-long */ + +static inline int +float_compactlong_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + PyFloat_CheckExact(lhs) && + !isnan(PyFloat_AsDouble(lhs)) && + PyLong_CheckExact(rhs) && + _PyLong_IsCompact((PyLongObject *)rhs) + ); +} + +static inline int +nonzero_float_compactlong_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + float_compactlong_guard(lhs, rhs) && !PyLong_IsZero(rhs) + ); +} + +#define FLOAT_LONG_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + double lhs_val = PyFloat_AsDouble(lhs); \ + Py_ssize_t rhs_val = _PyLong_CompactValue((PyLongObject *)rhs); \ + return PyFloat_FromDouble(lhs_val OP rhs_val); \ + } +FLOAT_LONG_ACTION(float_compactlong_add, +) +FLOAT_LONG_ACTION(float_compactlong_subtract, -) +FLOAT_LONG_ACTION(float_compactlong_multiply, *) +FLOAT_LONG_ACTION(float_compactlong_true_div, /) +#undef FLOAT_LONG_ACTION + +/* long-float */ + +static inline int +compactlong_float_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + PyLong_CheckExact(lhs) && + _PyLong_IsCompact((PyLongObject *)lhs) && + PyFloat_CheckExact(rhs) && + !isnan(PyFloat_AsDouble(rhs)) + ); +} + +static inline int +nonzero_compactlong_float_guard(PyObject *lhs, PyObject *rhs) +{ + return ( + compactlong_float_guard(lhs, rhs) && PyFloat_AsDouble(rhs) != 0.0 + ); +} + +#define LONG_FLOAT_ACTION(NAME, OP) \ + static PyObject * \ + (NAME)(PyObject *lhs, PyObject *rhs) \ + { \ + double rhs_val = PyFloat_AsDouble(rhs); \ + Py_ssize_t lhs_val = _PyLong_CompactValue((PyLongObject *)lhs); \ + return PyFloat_FromDouble(lhs_val OP rhs_val); \ + } +LONG_FLOAT_ACTION(compactlong_float_add, +) +LONG_FLOAT_ACTION(compactlong_float_subtract, -) +LONG_FLOAT_ACTION(compactlong_float_multiply, *) +LONG_FLOAT_ACTION(compactlong_float_true_div, /) +#undef LONG_FLOAT_ACTION + +static _PyBinaryOpSpecializationDescr float_compactlong_specs[NB_OPARG_LAST+1] = { + [NB_ADD] = {float_compactlong_guard, float_compactlong_add}, + [NB_SUBTRACT] = {float_compactlong_guard, float_compactlong_subtract}, + [NB_TRUE_DIVIDE] = {nonzero_float_compactlong_guard, float_compactlong_true_div}, + [NB_MULTIPLY] = {float_compactlong_guard, float_compactlong_multiply}, +}; + +static _PyBinaryOpSpecializationDescr compactlong_float_specs[NB_OPARG_LAST+1] = { + [NB_ADD] = {compactlong_float_guard, compactlong_float_add}, + [NB_SUBTRACT] = {compactlong_float_guard, compactlong_float_subtract}, + [NB_TRUE_DIVIDE] = {nonzero_compactlong_float_guard, compactlong_float_true_div}, + [NB_MULTIPLY] = {compactlong_float_guard, compactlong_float_multiply}, +}; + +static int +binary_op_extended_specialization(PyObject *lhs, PyObject *rhs, int oparg, + _PyBinaryOpSpecializationDescr **descr) +{ +#define LOOKUP_SPEC(TABLE, OPARG) \ + if ((TABLE)[(OPARG)].action) { \ + if ((TABLE)[(OPARG)].guard(lhs, rhs)) { \ + *descr = &((TABLE)[OPARG]); \ + return 1; \ + } \ + } + + LOOKUP_SPEC(compactlong_float_specs, oparg); + LOOKUP_SPEC(float_compactlong_specs, oparg); +#undef LOOKUP_SPEC + return 0; +} + void _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *instr, int oparg, _PyStackRef *locals) @@ -2420,6 +2524,12 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in PyObject *rhs = PyStackRef_AsPyObjectBorrow(rhs_st); assert(ENABLE_SPECIALIZATION_FT); assert(_PyOpcode_Caches[BINARY_OP] == INLINE_CACHE_ENTRIES_BINARY_OP); + + _PyBinaryOpCache *cache = (_PyBinaryOpCache *)(instr + 1); + if (instr->op.code == BINARY_OP_EXTEND) { + write_ptr(cache->external_cache, NULL); + } + switch (oparg) { case NB_ADD: case NB_INPLACE_ADD: @@ -2474,8 +2584,17 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in } break; } + + _PyBinaryOpSpecializationDescr *descr; + if (binary_op_extended_specialization(lhs, rhs, oparg, &descr)) { + specialize(instr, BINARY_OP_EXTEND); + write_ptr(cache->external_cache, (void*)descr); + return; + } + SPECIALIZATION_FAIL(BINARY_OP, binary_op_fail_kind(oparg, lhs, rhs)); unspecialize(instr); + return; } diff --git a/Python/traceback.c b/Python/traceback.c index e819909b6045c3..62387f12392265 100644 --- a/Python/traceback.c +++ b/Python/traceback.c @@ -890,7 +890,7 @@ _Py_DumpASCII(int fd, PyObject *text) static void dump_frame(int fd, _PyInterpreterFrame *frame) { - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); PyCodeObject *code =_PyFrame_GetCode(frame); PUTS(fd, " File "); @@ -965,7 +965,7 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) unsigned int depth = 0; while (1) { - if (frame->owner == FRAME_OWNED_BY_CSTACK) { + if (frame->owner == FRAME_OWNED_BY_INTERPRETER) { /* Trampoline frame */ frame = frame->previous; if (frame == NULL) { @@ -973,7 +973,7 @@ dump_traceback(int fd, PyThreadState *tstate, int write_header) } /* Can't have more than one shim frame in a row */ - assert(frame->owner != FRAME_OWNED_BY_CSTACK); + assert(frame->owner != FRAME_OWNED_BY_INTERPRETER); } if (MAX_FRAME_DEPTH <= depth) { diff --git a/Python/tracemalloc.c b/Python/tracemalloc.c index f661d69c0312fa..d69b0ebd585a7f 100644 --- a/Python/tracemalloc.c +++ b/Python/tracemalloc.c @@ -2,6 +2,8 @@ #include "pycore_fileutils.h" // _Py_write_noraise() #include "pycore_gc.h" // PyGC_Head #include "pycore_hashtable.h" // _Py_hashtable_t +#include "pycore_initconfig.h" // _PyStatus_NO_MEMORY() +#include "pycore_lock.h" // PyMutex_LockFlags() #include "pycore_object.h" // _PyType_PreHeaderSize() #include "pycore_pymem.h" // _Py_tracemalloc_config #include "pycore_runtime.h" // _Py_ID() @@ -19,6 +21,8 @@ _Py_DECLARE_STR(anon_unknown, ""); /* Forward declaration */ static void* raw_malloc(size_t size); static void raw_free(void *ptr); +static int _PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, + void* Py_UNUSED(ignore)); #ifdef Py_DEBUG # define TRACE_DEBUG @@ -30,18 +34,12 @@ static void raw_free(void *ptr); #define allocators _PyRuntime.tracemalloc.allocators -#if defined(TRACE_RAW_MALLOC) /* This lock is needed because tracemalloc_free() is called without the GIL held from PyMem_RawFree(). It cannot acquire the lock because it would introduce a deadlock in _PyThreadState_DeleteCurrent(). */ -# define tables_lock _PyRuntime.tracemalloc.tables_lock -# define TABLES_LOCK() PyThread_acquire_lock(tables_lock, 1) -# define TABLES_UNLOCK() PyThread_release_lock(tables_lock) -#else - /* variables are protected by the GIL */ -# define TABLES_LOCK() -# define TABLES_UNLOCK() -#endif +#define tables_lock _PyRuntime.tracemalloc.tables_lock +#define TABLES_LOCK() PyMutex_LockFlags(&tables_lock, _Py_LOCK_DONT_DETACH) +#define TABLES_UNLOCK() PyMutex_Unlock(&tables_lock) #define DEFAULT_DOMAIN 0 @@ -95,9 +93,6 @@ tracemalloc_error(const char *format, ...) #endif -#if defined(TRACE_RAW_MALLOC) -#define REENTRANT_THREADLOCAL - #define tracemalloc_reentrant_key _PyRuntime.tracemalloc.reentrant_key /* Any non-NULL pointer can be used */ @@ -106,16 +101,16 @@ tracemalloc_error(const char *format, ...) static int get_reentrant(void) { - void *ptr; - assert(PyThread_tss_is_created(&tracemalloc_reentrant_key)); - ptr = PyThread_tss_get(&tracemalloc_reentrant_key); + + void *ptr = PyThread_tss_get(&tracemalloc_reentrant_key); if (ptr != NULL) { assert(ptr == REENTRANT); return 1; } - else + else { return 0; + } } static void @@ -134,25 +129,6 @@ set_reentrant(int reentrant) } } -#else - -/* TRACE_RAW_MALLOC not defined: variable protected by the GIL */ -static int tracemalloc_reentrant = 0; - -static int -get_reentrant(void) -{ - return tracemalloc_reentrant; -} - -static void -set_reentrant(int reentrant) -{ - assert(reentrant != tracemalloc_reentrant); - tracemalloc_reentrant = reentrant; -} -#endif - static Py_uhash_t hashtable_hash_pyobject(const void *key) @@ -252,6 +228,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) { assert(PyStackRef_CodeCheck(pyframe->f_executable)); frame->filename = &_Py_STR(anon_unknown); + int lineno = PyUnstable_InterpreterFrame_GetLine(pyframe); if (lineno < 0) { lineno = 0; @@ -259,7 +236,6 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) frame->lineno = (unsigned int)lineno; PyObject *filename = filename = _PyFrame_GetCode(pyframe)->co_filename; - if (filename == NULL) { #ifdef TRACE_DEBUG tracemalloc_error("failed to get the filename of the code object"); @@ -275,7 +251,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) } if (!PyUnicode_IS_READY(filename)) { /* Don't make a Unicode string ready to avoid reentrant calls - to tracemalloc_malloc() or tracemalloc_realloc() */ + to tracemalloc_alloc() or tracemalloc_realloc() */ #ifdef TRACE_DEBUG tracemalloc_error("filename is not a ready unicode string"); #endif @@ -309,7 +285,7 @@ tracemalloc_get_frame(_PyInterpreterFrame *pyframe, frame_t *frame) static Py_uhash_t traceback_hash(traceback_t *traceback) { - /* code based on tuplehash() of Objects/tupleobject.c */ + /* code based on tuple_hash() of Objects/tupleobject.c */ Py_uhash_t x, y; /* Unsigned for defined overflow behavior. */ int len = traceback->nframe; Py_uhash_t mult = PyHASH_MULTIPLIER; @@ -335,13 +311,8 @@ traceback_hash(traceback_t *traceback) static void traceback_get_frames(traceback_t *traceback) { - PyThreadState *tstate = PyGILState_GetThisThreadState(); - if (tstate == NULL) { -#ifdef TRACE_DEBUG - tracemalloc_error("failed to get the current thread state"); -#endif - return; - } + PyThreadState *tstate = _PyThreadState_GET(); + assert(tstate != NULL); _PyInterpreterFrame *pyframe = _PyThreadState_GetFrame(tstate); while (pyframe) { @@ -364,7 +335,7 @@ traceback_new(void) traceback_t *traceback; _Py_hashtable_entry_t *entry; - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); /* get frames */ traceback = tracemalloc_traceback; @@ -440,7 +411,7 @@ tracemalloc_get_traces_table(unsigned int domain) static void -tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) +tracemalloc_remove_trace_unlocked(unsigned int domain, uintptr_t ptr) { assert(tracemalloc_config.tracing); @@ -459,12 +430,12 @@ tracemalloc_remove_trace(unsigned int domain, uintptr_t ptr) } #define REMOVE_TRACE(ptr) \ - tracemalloc_remove_trace(DEFAULT_DOMAIN, (uintptr_t)(ptr)) + tracemalloc_remove_trace_unlocked(DEFAULT_DOMAIN, (uintptr_t)(ptr)) static int -tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, - size_t size) +tracemalloc_add_trace_unlocked(unsigned int domain, uintptr_t ptr, + size_t size) { assert(tracemalloc_config.tracing); @@ -519,82 +490,147 @@ tracemalloc_add_trace(unsigned int domain, uintptr_t ptr, } #define ADD_TRACE(ptr, size) \ - tracemalloc_add_trace(DEFAULT_DOMAIN, (uintptr_t)(ptr), size) + tracemalloc_add_trace_unlocked(DEFAULT_DOMAIN, (uintptr_t)(ptr), size) static void* -tracemalloc_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize) +tracemalloc_alloc(int need_gil, int use_calloc, + void *ctx, size_t nelem, size_t elsize) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - void *ptr; - assert(elsize == 0 || nelem <= SIZE_MAX / elsize); - if (use_calloc) + int reentrant = get_reentrant(); + + // Ignore reentrant call. + // + // For example, PyObjet_Malloc() calls + // PyMem_Malloc() for allocations larger than 512 bytes: don't trace the + // same memory allocation twice. + // + // If reentrant calls are not ignored, PyGILState_Ensure() can call + // PyMem_RawMalloc() which would call PyGILState_Ensure() again in a loop. + if (!reentrant) { + set_reentrant(1); + } + + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; + void *ptr; + if (use_calloc) { ptr = alloc->calloc(alloc->ctx, nelem, elsize); - else + } + else { ptr = alloc->malloc(alloc->ctx, nelem * elsize); - if (ptr == NULL) - return NULL; + } + + if (ptr == NULL) { + goto done; + } + if (reentrant) { + goto done; + } + PyGILState_STATE gil_state; + if (need_gil) { + gil_state = PyGILState_Ensure(); + } TABLES_LOCK(); - if (ADD_TRACE(ptr, nelem * elsize) < 0) { - /* Failed to allocate a trace for the new memory block */ - TABLES_UNLOCK(); - alloc->free(alloc->ctx, ptr); - return NULL; + + if (tracemalloc_config.tracing) { + if (ADD_TRACE(ptr, nelem * elsize) < 0) { + // Failed to allocate a trace for the new memory block + alloc->free(alloc->ctx, ptr); + ptr = NULL; + } } + // else: gh-128679: tracemalloc.stop() was called by another thread + TABLES_UNLOCK(); + if (need_gil) { + PyGILState_Release(gil_state); + } + +done: + if (!reentrant) { + set_reentrant(0); + } return ptr; } static void* -tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) +tracemalloc_realloc(int need_gil, void *ctx, void *ptr, size_t new_size) { + int reentrant = get_reentrant(); + + // Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for + // allocations larger than 512 bytes: don't trace the same memory block + // twice. + if (!reentrant) { + set_reentrant(1); + } + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - void *ptr2; + void *ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - if (ptr2 == NULL) - return NULL; + if (ptr2 == NULL) { + goto done; + } + if (reentrant) { + goto done; + } - if (ptr != NULL) { - /* an existing memory block has been resized */ + PyGILState_STATE gil_state; + if (need_gil) { + gil_state = PyGILState_Ensure(); + } + TABLES_LOCK(); - TABLES_LOCK(); + if (!tracemalloc_config.tracing) { + // gh-128679: tracemalloc.stop() was called by another thread + goto unlock; + } + + if (ptr != NULL) { + // An existing memory block has been resized - /* tracemalloc_add_trace() updates the trace if there is already - a trace at address ptr2 */ + // tracemalloc_add_trace_unlocked() updates the trace if there is + // already a trace at address ptr2. if (ptr2 != ptr) { REMOVE_TRACE(ptr); } if (ADD_TRACE(ptr2, new_size) < 0) { - /* Memory allocation failed. The error cannot be reported to - the caller, because realloc() may already have shrunk the - memory block and so removed bytes. - - This case is very unlikely: a hash entry has just been - released, so the hash table should have at least one free entry. - - The GIL and the table lock ensures that only one thread is - allocating memory. */ + // Memory allocation failed. The error cannot be reported to the + // caller, because realloc() already have shrunk the memory block + // and so removed bytes. + // + // This case is very unlikely: a hash entry has just been released, + // so the hash table should have at least one free entry. + // + // The GIL and the table lock ensures that only one thread is + // allocating memory. Py_FatalError("tracemalloc_realloc() failed to allocate a trace"); } - TABLES_UNLOCK(); } else { - /* new allocation */ + // New allocation - TABLES_LOCK(); if (ADD_TRACE(ptr2, new_size) < 0) { - /* Failed to allocate a trace for the new memory block */ - TABLES_UNLOCK(); + // Failed to allocate a trace for the new memory block alloc->free(alloc->ctx, ptr2); - return NULL; + ptr2 = NULL; } - TABLES_UNLOCK(); + } + +unlock: + TABLES_UNLOCK(); + if (need_gil) { + PyGILState_Release(gil_state); + } + +done: + if (!reentrant) { + set_reentrant(0); } return ptr2; } @@ -603,170 +639,68 @@ tracemalloc_realloc(void *ctx, void *ptr, size_t new_size) static void tracemalloc_free(void *ctx, void *ptr) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - if (ptr == NULL) + if (ptr == NULL) { return; + } - /* GIL cannot be locked in PyMem_RawFree() because it would introduce - a deadlock in _PyThreadState_DeleteCurrent(). */ - + PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; alloc->free(alloc->ctx, ptr); - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); -} - - -static void* -tracemalloc_alloc_gil(int use_calloc, void *ctx, size_t nelem, size_t elsize) -{ - void *ptr; - if (get_reentrant()) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (use_calloc) - return alloc->calloc(alloc->ctx, nelem, elsize); - else - return alloc->malloc(alloc->ctx, nelem * elsize); + return; } - /* Ignore reentrant call. PyObjet_Malloc() calls PyMem_Malloc() for - allocations larger than 512 bytes, don't trace the same memory - allocation twice. */ - set_reentrant(1); + TABLES_LOCK(); - ptr = tracemalloc_alloc(use_calloc, ctx, nelem, elsize); + if (tracemalloc_config.tracing) { + REMOVE_TRACE(ptr); + } + // else: gh-128679: tracemalloc.stop() was called by another thread - set_reentrant(0); - return ptr; + TABLES_UNLOCK(); } static void* tracemalloc_malloc_gil(void *ctx, size_t size) { - return tracemalloc_alloc_gil(0, ctx, 1, size); + return tracemalloc_alloc(0, 0, ctx, 1, size); } static void* tracemalloc_calloc_gil(void *ctx, size_t nelem, size_t elsize) { - return tracemalloc_alloc_gil(1, ctx, nelem, elsize); + return tracemalloc_alloc(0, 1, ctx, nelem, elsize); } static void* tracemalloc_realloc_gil(void *ctx, void *ptr, size_t new_size) { - void *ptr2; - - if (get_reentrant()) { - /* Reentrant call to PyMem_Realloc() and PyMem_RawRealloc(). - Example: PyMem_RawRealloc() is called internally by pymalloc - (_PyObject_Malloc() and _PyObject_Realloc()) to allocate a new - arena (new_arena()). */ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - if (ptr2 != NULL && ptr != NULL) { - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); - } - return ptr2; - } - - /* Ignore reentrant call. PyObjet_Realloc() calls PyMem_Realloc() for - allocations larger than 512 bytes. Don't trace the same memory - allocation twice. */ - set_reentrant(1); - - ptr2 = tracemalloc_realloc(ctx, ptr, new_size); - - set_reentrant(0); - return ptr2; -} - - -#ifdef TRACE_RAW_MALLOC -static void* -tracemalloc_raw_alloc(int use_calloc, void *ctx, size_t nelem, size_t elsize) -{ - PyGILState_STATE gil_state; - void *ptr; - - if (get_reentrant()) { - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - if (use_calloc) - return alloc->calloc(alloc->ctx, nelem, elsize); - else - return alloc->malloc(alloc->ctx, nelem * elsize); - } - - /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc() - indirectly which would call PyGILState_Ensure() if reentrant are not - disabled. */ - set_reentrant(1); - - gil_state = PyGILState_Ensure(); - ptr = tracemalloc_alloc(use_calloc, ctx, nelem, elsize); - PyGILState_Release(gil_state); - - set_reentrant(0); - return ptr; + return tracemalloc_realloc(0, ctx, ptr, new_size); } static void* tracemalloc_raw_malloc(void *ctx, size_t size) { - return tracemalloc_raw_alloc(0, ctx, 1, size); + return tracemalloc_alloc(1, 0, ctx, 1, size); } static void* tracemalloc_raw_calloc(void *ctx, size_t nelem, size_t elsize) { - return tracemalloc_raw_alloc(1, ctx, nelem, elsize); + return tracemalloc_alloc(1, 1, ctx, nelem, elsize); } static void* tracemalloc_raw_realloc(void *ctx, void *ptr, size_t new_size) { - PyGILState_STATE gil_state; - void *ptr2; - - if (get_reentrant()) { - /* Reentrant call to PyMem_RawRealloc(). */ - PyMemAllocatorEx *alloc = (PyMemAllocatorEx *)ctx; - - ptr2 = alloc->realloc(alloc->ctx, ptr, new_size); - - if (ptr2 != NULL && ptr != NULL) { - TABLES_LOCK(); - REMOVE_TRACE(ptr); - TABLES_UNLOCK(); - } - return ptr2; - } - - /* Ignore reentrant call. PyGILState_Ensure() may call PyMem_RawMalloc() - indirectly which would call PyGILState_Ensure() if reentrant calls are - not disabled. */ - set_reentrant(1); - - gil_state = PyGILState_Ensure(); - ptr2 = tracemalloc_realloc(ctx, ptr, new_size); - PyGILState_Release(gil_state); - - set_reentrant(0); - return ptr2; + return tracemalloc_realloc(1, ctx, ptr, new_size); } -#endif /* TRACE_RAW_MALLOC */ static void @@ -777,60 +711,36 @@ tracemalloc_clear_filename(void *value) } -/* reentrant flag must be set to call this function and GIL must be held */ static void -tracemalloc_clear_traces(void) +tracemalloc_clear_traces_unlocked(void) { - /* The GIL protects variables against concurrent access */ - assert(PyGILState_Check()); + // Clearing tracemalloc_filenames requires the GIL to call Py_DECREF() + _Py_AssertHoldsTstate(); + + set_reentrant(1); - TABLES_LOCK(); _Py_hashtable_clear(tracemalloc_traces); _Py_hashtable_clear(tracemalloc_domains); + _Py_hashtable_clear(tracemalloc_tracebacks); + _Py_hashtable_clear(tracemalloc_filenames); + tracemalloc_traced_memory = 0; tracemalloc_peak_traced_memory = 0; - TABLES_UNLOCK(); - - _Py_hashtable_clear(tracemalloc_tracebacks); - _Py_hashtable_clear(tracemalloc_filenames); + set_reentrant(0); } -int +PyStatus _PyTraceMalloc_Init(void) { - if (tracemalloc_config.initialized == TRACEMALLOC_FINALIZED) { - PyErr_SetString(PyExc_RuntimeError, - "the tracemalloc module has been unloaded"); - return -1; - } - - if (tracemalloc_config.initialized == TRACEMALLOC_INITIALIZED) - return 0; + assert(tracemalloc_config.initialized == TRACEMALLOC_NOT_INITIALIZED); PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); -#ifdef REENTRANT_THREADLOCAL if (PyThread_tss_create(&tracemalloc_reentrant_key) != 0) { -#ifdef MS_WINDOWS - PyErr_SetFromWindowsErr(0); -#else - PyErr_SetFromErrno(PyExc_OSError); -#endif - return -1; - } -#endif - -#if defined(TRACE_RAW_MALLOC) - if (tables_lock == NULL) { - tables_lock = PyThread_allocate_lock(); - if (tables_lock == NULL) { - PyErr_SetString(PyExc_RuntimeError, "cannot allocate lock"); - return -1; - } + return _PyStatus_NO_MEMORY(); } -#endif tracemalloc_filenames = hashtable_new(hashtable_hash_pyobject, hashtable_compare_unicode, @@ -844,9 +754,9 @@ _PyTraceMalloc_Init(void) tracemalloc_domains = tracemalloc_create_domains_table(); if (tracemalloc_filenames == NULL || tracemalloc_tracebacks == NULL - || tracemalloc_traces == NULL || tracemalloc_domains == NULL) { - PyErr_NoMemory(); - return -1; + || tracemalloc_traces == NULL || tracemalloc_domains == NULL) + { + return _PyStatus_NO_MEMORY(); } tracemalloc_empty_traceback.nframe = 1; @@ -857,7 +767,7 @@ _PyTraceMalloc_Init(void) tracemalloc_empty_traceback.hash = traceback_hash(&tracemalloc_empty_traceback); tracemalloc_config.initialized = TRACEMALLOC_INITIALIZED; - return 0; + return _PyStatus_OK(); } @@ -876,25 +786,13 @@ tracemalloc_deinit(void) _Py_hashtable_destroy(tracemalloc_tracebacks); _Py_hashtable_destroy(tracemalloc_filenames); -#if defined(TRACE_RAW_MALLOC) - if (tables_lock != NULL) { - PyThread_free_lock(tables_lock); - tables_lock = NULL; - } -#endif - -#ifdef REENTRANT_THREADLOCAL PyThread_tss_delete(&tracemalloc_reentrant_key); -#endif } int _PyTraceMalloc_Start(int max_nframe) { - PyMemAllocatorEx alloc; - size_t size; - if (max_nframe < 1 || (unsigned long) max_nframe > MAX_NFRAME) { PyErr_Format(PyExc_ValueError, "the number of frames must be in range [1; %lu]", @@ -902,23 +800,15 @@ _PyTraceMalloc_Start(int max_nframe) return -1; } - if (_PyTraceMalloc_Init() < 0) { - return -1; - } - - if (PyRefTracer_SetTracer(_PyTraceMalloc_TraceRef, NULL) < 0) { - return -1; - } - - if (tracemalloc_config.tracing) { - /* hook already installed: do nothing */ + if (_PyTraceMalloc_IsTracing()) { + /* hooks already installed: do nothing */ return 0; } tracemalloc_config.max_nframe = max_nframe; /* allocate a buffer to store a new traceback */ - size = TRACEBACK_SIZE(max_nframe); + size_t size = TRACEBACK_SIZE(max_nframe); assert(tracemalloc_traceback == NULL); tracemalloc_traceback = raw_malloc(size); if (tracemalloc_traceback == NULL) { @@ -926,7 +816,7 @@ _PyTraceMalloc_Start(int max_nframe) return -1; } -#ifdef TRACE_RAW_MALLOC + PyMemAllocatorEx alloc; alloc.malloc = tracemalloc_raw_malloc; alloc.calloc = tracemalloc_raw_calloc; alloc.realloc = tracemalloc_raw_realloc; @@ -935,7 +825,6 @@ _PyTraceMalloc_Start(int max_nframe) alloc.ctx = &allocators.raw; PyMem_GetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &alloc); -#endif alloc.malloc = tracemalloc_malloc_gil; alloc.calloc = tracemalloc_calloc_gil; @@ -950,8 +839,14 @@ _PyTraceMalloc_Start(int max_nframe) PyMem_GetAllocator(PYMEM_DOMAIN_OBJ, &allocators.obj); PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &alloc); + if (PyRefTracer_SetTracer(_PyTraceMalloc_TraceRef, NULL) < 0) { + return -1; + } + /* everything is ready: start tracing Python memory allocations */ + TABLES_LOCK(); tracemalloc_config.tracing = 1; + TABLES_UNLOCK(); return 0; } @@ -960,24 +855,30 @@ _PyTraceMalloc_Start(int max_nframe) void _PyTraceMalloc_Stop(void) { - if (!tracemalloc_config.tracing) - return; + TABLES_LOCK(); + + if (!tracemalloc_config.tracing) { + goto done; + } /* stop tracing Python memory allocations */ tracemalloc_config.tracing = 0; /* unregister the hook on memory allocators */ -#ifdef TRACE_RAW_MALLOC PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &allocators.raw); -#endif PyMem_SetAllocator(PYMEM_DOMAIN_MEM, &allocators.mem); PyMem_SetAllocator(PYMEM_DOMAIN_OBJ, &allocators.obj); - tracemalloc_clear_traces(); + tracemalloc_clear_traces_unlocked(); /* release memory */ raw_free(tracemalloc_traceback); tracemalloc_traceback = NULL; + + (void)PyRefTracer_SetTracer(NULL, NULL); + +done: + TABLES_UNLOCK(); } @@ -985,15 +886,16 @@ _PyTraceMalloc_Stop(void) static PyObject* frame_to_pyobject(frame_t *frame) { - PyObject *frame_obj, *lineno_obj; + assert(get_reentrant()); - frame_obj = PyTuple_New(2); - if (frame_obj == NULL) + PyObject *frame_obj = PyTuple_New(2); + if (frame_obj == NULL) { return NULL; + } PyTuple_SET_ITEM(frame_obj, 0, Py_NewRef(frame->filename)); - lineno_obj = PyLong_FromUnsignedLong(frame->lineno); + PyObject *lineno_obj = PyLong_FromUnsignedLong(frame->lineno); if (lineno_obj == NULL) { Py_DECREF(frame_obj); return NULL; @@ -1008,7 +910,6 @@ static PyObject* traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) { PyObject *frames; - if (intern_table != NULL) { frames = _Py_hashtable_get(intern_table, (const void *)traceback); if (frames) { @@ -1017,8 +918,9 @@ traceback_to_pyobject(traceback_t *traceback, _Py_hashtable_t *intern_table) } frames = PyTuple_New(traceback->nframe); - if (frames == NULL) + if (frames == NULL) { return NULL; + } for (int i=0; i < traceback->nframe; i++) { PyObject *frame = frame_to_pyobject(&traceback->frames[i]); @@ -1046,14 +948,14 @@ static PyObject* trace_to_pyobject(unsigned int domain, const trace_t *trace, _Py_hashtable_t *intern_tracebacks) { - PyObject *trace_obj = NULL; - PyObject *obj; + assert(get_reentrant()); - trace_obj = PyTuple_New(4); - if (trace_obj == NULL) + PyObject *trace_obj = PyTuple_New(4); + if (trace_obj == NULL) { return NULL; + } - obj = PyLong_FromSize_t(domain); + PyObject *obj = PyLong_FromSize_t(domain); if (obj == NULL) { Py_DECREF(trace_obj); return NULL; @@ -1100,7 +1002,6 @@ tracemalloc_copy_trace(_Py_hashtable_t *traces, void *user_data) { _Py_hashtable_t *traces2 = (_Py_hashtable_t *)user_data; - trace_t *trace = (trace_t *)value; trace_t *trace2 = raw_malloc(sizeof(trace_t)); @@ -1141,7 +1042,6 @@ tracemalloc_copy_domain(_Py_hashtable_t *domains, void *user_data) { _Py_hashtable_t *domains2 = (_Py_hashtable_t *)user_data; - unsigned int domain = (unsigned int)FROM_PTR(key); _Py_hashtable_t *traces = (_Py_hashtable_t *)value; @@ -1182,7 +1082,6 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, void *user_data) { get_traces_t *get_traces = user_data; - const trace_t *trace = (const trace_t *)value; PyObject *tuple = trace_to_pyobject(get_traces->domain, trace, @@ -1196,7 +1095,6 @@ tracemalloc_get_traces_fill(_Py_hashtable_t *traces, if (res < 0) { return 1; } - return 0; } @@ -1207,7 +1105,6 @@ tracemalloc_get_traces_domain(_Py_hashtable_t *domains, void *user_data) { get_traces_t *get_traces = user_data; - unsigned int domain = (unsigned int)FROM_PTR(key); _Py_hashtable_t *traces = (_Py_hashtable_t *)value; @@ -1227,27 +1124,21 @@ tracemalloc_pyobject_decref(void *value) static traceback_t* -tracemalloc_get_traceback(unsigned int domain, uintptr_t ptr) +tracemalloc_get_traceback_unlocked(unsigned int domain, uintptr_t ptr) { - - if (!tracemalloc_config.tracing) + if (!tracemalloc_config.tracing) { return NULL; + } - trace_t *trace; - TABLES_LOCK(); _Py_hashtable_t *traces = tracemalloc_get_traces_table(domain); - if (traces) { - trace = _Py_hashtable_get(traces, TO_PTR(ptr)); - } - else { - trace = NULL; + if (!traces) { + return NULL; } - TABLES_UNLOCK(); + trace_t *trace = _Py_hashtable_get(traces, TO_PTR(ptr)); if (!trace) { return NULL; } - return trace->traceback; } @@ -1269,24 +1160,28 @@ _PyMem_DumpFrame(int fd, frame_t * frame) void _PyMem_DumpTraceback(int fd, const void *ptr) { - traceback_t *traceback; - int i; - + TABLES_LOCK(); if (!tracemalloc_config.tracing) { PUTS(fd, "Enable tracemalloc to get the memory block " "allocation traceback\n\n"); - return; + goto done; } - traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, (uintptr_t)ptr); - if (traceback == NULL) - return; + traceback_t *traceback; + traceback = tracemalloc_get_traceback_unlocked(DEFAULT_DOMAIN, + (uintptr_t)ptr); + if (traceback == NULL) { + goto done; + } PUTS(fd, "Memory block allocated at (most recent call first):\n"); - for (i=0; i < traceback->nframe; i++) { + for (int i=0; i < traceback->nframe; i++) { _PyMem_DumpFrame(fd, &traceback->frames[i]); } PUTS(fd, "\n"); + +done: + TABLES_UNLOCK(); } #undef PUTS @@ -1307,45 +1202,48 @@ int PyTraceMalloc_Track(unsigned int domain, uintptr_t ptr, size_t size) { - int res; - PyGILState_STATE gil_state; + PyGILState_STATE gil_state = PyGILState_Ensure(); + TABLES_LOCK(); - if (!tracemalloc_config.tracing) { + int result; + if (tracemalloc_config.tracing) { + result = tracemalloc_add_trace_unlocked(domain, ptr, size); + } + else { /* tracemalloc is not tracing: do nothing */ - return -2; + result = -2; } - gil_state = PyGILState_Ensure(); - - TABLES_LOCK(); - res = tracemalloc_add_trace(domain, ptr, size); TABLES_UNLOCK(); - PyGILState_Release(gil_state); - return res; + return result; } int PyTraceMalloc_Untrack(unsigned int domain, uintptr_t ptr) { - if (!tracemalloc_config.tracing) { + TABLES_LOCK(); + + int result; + if (tracemalloc_config.tracing) { + tracemalloc_remove_trace_unlocked(domain, ptr); + result = 0; + } + else { /* tracemalloc is not tracing: do nothing */ - return -2; + result = -2; } - TABLES_LOCK(); - tracemalloc_remove_trace(domain, ptr); TABLES_UNLOCK(); - - return 0; + return result; } void _PyTraceMalloc_Fini(void) { - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); tracemalloc_deinit(); } @@ -1355,87 +1253,102 @@ _PyTraceMalloc_Fini(void) Do nothing if tracemalloc is not tracing memory allocations or if the object memory block is not already traced. */ -int -_PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, void* Py_UNUSED(ignore)) +static int +_PyTraceMalloc_TraceRef(PyObject *op, PyRefTracerEvent event, + void* Py_UNUSED(ignore)) { if (event != PyRefTracer_CREATE) { return 0; } + if (get_reentrant()) { + return 0; + } - assert(PyGILState_Check()); + _Py_AssertHoldsTstate(); + TABLES_LOCK(); if (!tracemalloc_config.tracing) { - /* tracemalloc is not tracing: do nothing */ - return -1; + goto done; } PyTypeObject *type = Py_TYPE(op); const size_t presize = _PyType_PreHeaderSize(type); uintptr_t ptr = (uintptr_t)((char *)op - presize); - int res = -1; - - TABLES_LOCK(); trace_t *trace = _Py_hashtable_get(tracemalloc_traces, TO_PTR(ptr)); if (trace != NULL) { /* update the traceback of the memory block */ traceback_t *traceback = traceback_new(); if (traceback != NULL) { trace->traceback = traceback; - res = 0; } } /* else: cannot track the object, its memory block size is unknown */ - TABLES_UNLOCK(); - return res; +done: + TABLES_UNLOCK(); + return 0; } PyObject* _PyTraceMalloc_GetTraceback(unsigned int domain, uintptr_t ptr) { - traceback_t *traceback; + TABLES_LOCK(); - traceback = tracemalloc_get_traceback(domain, ptr); - if (traceback == NULL) - Py_RETURN_NONE; + traceback_t *traceback = tracemalloc_get_traceback_unlocked(domain, ptr); + PyObject *result; + if (traceback) { + set_reentrant(1); + result = traceback_to_pyobject(traceback, NULL); + set_reentrant(0); + } + else { + result = Py_NewRef(Py_None); + } - return traceback_to_pyobject(traceback, NULL); + TABLES_UNLOCK(); + return result; } int _PyTraceMalloc_IsTracing(void) { - return tracemalloc_config.tracing; + TABLES_LOCK(); + int tracing = tracemalloc_config.tracing; + TABLES_UNLOCK(); + return tracing; } void _PyTraceMalloc_ClearTraces(void) { - - if (!tracemalloc_config.tracing) { - return; + TABLES_LOCK(); + if (tracemalloc_config.tracing) { + tracemalloc_clear_traces_unlocked(); } - set_reentrant(1); - tracemalloc_clear_traces(); - set_reentrant(0); + TABLES_UNLOCK(); } PyObject * _PyTraceMalloc_GetTraces(void) { + TABLES_LOCK(); + set_reentrant(1); + get_traces_t get_traces; get_traces.domain = DEFAULT_DOMAIN; get_traces.traces = NULL; get_traces.domains = NULL; get_traces.tracebacks = NULL; get_traces.list = PyList_New(0); - if (get_traces.list == NULL) - goto error; + if (get_traces.list == NULL) { + goto finally; + } - if (!tracemalloc_config.tracing) - return get_traces.list; + if (!tracemalloc_config.tracing) { + goto finally; + } /* the traceback hash table is used temporarily to intern traceback tuple of (filename, lineno) tuples */ @@ -1449,24 +1362,17 @@ _PyTraceMalloc_GetTraces(void) // Copy all traces so tracemalloc_get_traces_fill() doesn't have to disable // temporarily tracemalloc which would impact other threads and so would // miss allocations while get_traces() is called. - TABLES_LOCK(); get_traces.traces = tracemalloc_copy_traces(tracemalloc_traces); - TABLES_UNLOCK(); - if (get_traces.traces == NULL) { goto no_memory; } - TABLES_LOCK(); get_traces.domains = tracemalloc_copy_domains(tracemalloc_domains); - TABLES_UNLOCK(); - if (get_traces.domains == NULL) { goto no_memory; } // Convert traces to a list of tuples - set_reentrant(1); int err = _Py_hashtable_foreach(get_traces.traces, tracemalloc_get_traces_fill, &get_traces); @@ -1475,20 +1381,22 @@ _PyTraceMalloc_GetTraces(void) tracemalloc_get_traces_domain, &get_traces); } - set_reentrant(0); + if (err) { - goto error; + Py_CLEAR(get_traces.list); + goto finally; } - goto finally; no_memory: PyErr_NoMemory(); - -error: Py_CLEAR(get_traces.list); + goto finally; finally: + set_reentrant(0); + TABLES_UNLOCK(); + if (get_traces.tracebacks != NULL) { _Py_hashtable_destroy(get_traces.tracebacks); } @@ -1506,37 +1414,33 @@ PyObject * _PyTraceMalloc_GetObjectTraceback(PyObject *obj) /*[clinic end generated code: output=41ee0553a658b0aa input=29495f1b21c53212]*/ { - PyTypeObject *type; - traceback_t *traceback; - - type = Py_TYPE(obj); + PyTypeObject *type = Py_TYPE(obj); const size_t presize = _PyType_PreHeaderSize(type); uintptr_t ptr = (uintptr_t)((char *)obj - presize); - - traceback = tracemalloc_get_traceback(DEFAULT_DOMAIN, ptr); - if (traceback == NULL) { - Py_RETURN_NONE; - } - - return traceback_to_pyobject(traceback, NULL); + return _PyTraceMalloc_GetTraceback(DEFAULT_DOMAIN, ptr); } -int _PyTraceMalloc_GetTracebackLimit(void) { +int _PyTraceMalloc_GetTracebackLimit(void) +{ return tracemalloc_config.max_nframe; } size_t -_PyTraceMalloc_GetMemory(void) { - +_PyTraceMalloc_GetMemory(void) +{ + TABLES_LOCK(); size_t size; + if (tracemalloc_config.tracing) { + size = _Py_hashtable_size(tracemalloc_tracebacks); + size += _Py_hashtable_size(tracemalloc_filenames); - size = _Py_hashtable_size(tracemalloc_tracebacks); - size += _Py_hashtable_size(tracemalloc_filenames); - - TABLES_LOCK(); - size += _Py_hashtable_size(tracemalloc_traces); - _Py_hashtable_foreach(tracemalloc_domains, - tracemalloc_get_tracemalloc_memory_cb, &size); + size += _Py_hashtable_size(tracemalloc_traces); + _Py_hashtable_foreach(tracemalloc_domains, + tracemalloc_get_tracemalloc_memory_cb, &size); + } + else { + size = 0; + } TABLES_UNLOCK(); return size; } @@ -1545,26 +1449,27 @@ _PyTraceMalloc_GetMemory(void) { PyObject * _PyTraceMalloc_GetTracedMemory(void) { - Py_ssize_t size, peak_size; - - if (!tracemalloc_config.tracing) - return Py_BuildValue("ii", 0, 0); - TABLES_LOCK(); - size = tracemalloc_traced_memory; - peak_size = tracemalloc_peak_traced_memory; + Py_ssize_t traced, peak; + if (tracemalloc_config.tracing) { + traced = tracemalloc_traced_memory; + peak = tracemalloc_peak_traced_memory; + } + else { + traced = 0; + peak = 0; + } TABLES_UNLOCK(); - return Py_BuildValue("nn", size, peak_size); + return Py_BuildValue("nn", traced, peak); } void _PyTraceMalloc_ResetPeak(void) { - if (!tracemalloc_config.tracing) { - return; - } TABLES_LOCK(); - tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + if (tracemalloc_config.tracing) { + tracemalloc_peak_traced_memory = tracemalloc_traced_memory; + } TABLES_UNLOCK(); } diff --git a/Python/uniqueid.c b/Python/uniqueid.c index b9f30713feeb57..64c3e6cfbbe825 100644 --- a/Python/uniqueid.c +++ b/Python/uniqueid.c @@ -86,7 +86,7 @@ _PyObject_AssignUniqueId(PyObject *obj) if (pool->freelist == NULL) { if (resize_interp_type_id_pool(pool) < 0) { UNLOCK_POOL(pool); - return -1; + return _Py_INVALID_UNIQUE_ID; } } @@ -94,7 +94,9 @@ _PyObject_AssignUniqueId(PyObject *obj) pool->freelist = entry->next; entry->obj = obj; _PyObject_SetDeferredRefcount(obj); - Py_ssize_t unique_id = (entry - pool->table); + // The unique id is one plus the index of the entry in the table. + Py_ssize_t unique_id = (entry - pool->table) + 1; + assert(unique_id > 0); UNLOCK_POOL(pool); return unique_id; } @@ -106,8 +108,9 @@ _PyObject_ReleaseUniqueId(Py_ssize_t unique_id) struct _Py_unique_id_pool *pool = &interp->unique_ids; LOCK_POOL(pool); - assert(unique_id >= 0 && unique_id < pool->size); - _Py_unique_id_entry *entry = &pool->table[unique_id]; + assert(unique_id > 0 && unique_id <= pool->size); + Py_ssize_t idx = unique_id - 1; + _Py_unique_id_entry *entry = &pool->table[idx]; entry->next = pool->freelist; pool->freelist = entry; UNLOCK_POOL(pool); @@ -116,18 +119,18 @@ _PyObject_ReleaseUniqueId(Py_ssize_t unique_id) static Py_ssize_t clear_unique_id(PyObject *obj) { - Py_ssize_t id = -1; + Py_ssize_t id = _Py_INVALID_UNIQUE_ID; if (PyType_Check(obj)) { if (PyType_HasFeature((PyTypeObject *)obj, Py_TPFLAGS_HEAPTYPE)) { PyHeapTypeObject *ht = (PyHeapTypeObject *)obj; id = ht->unique_id; - ht->unique_id = -1; + ht->unique_id = _Py_INVALID_UNIQUE_ID; } } else if (PyCode_Check(obj)) { PyCodeObject *co = (PyCodeObject *)obj; id = co->_co_unique_id; - co->_co_unique_id = -1; + co->_co_unique_id = _Py_INVALID_UNIQUE_ID; } else if (PyDict_Check(obj)) { PyDictObject *mp = (PyDictObject *)obj; @@ -141,23 +144,23 @@ void _PyObject_DisablePerThreadRefcounting(PyObject *obj) { Py_ssize_t id = clear_unique_id(obj); - if (id >= 0) { + if (id != _Py_INVALID_UNIQUE_ID) { _PyObject_ReleaseUniqueId(id); } } void -_PyObject_ThreadIncrefSlow(PyObject *obj, Py_ssize_t unique_id) +_PyObject_ThreadIncrefSlow(PyObject *obj, size_t idx) { _PyThreadStateImpl *tstate = (_PyThreadStateImpl *)_PyThreadState_GET(); - if (unique_id < 0 || resize_local_refcounts(tstate) < 0) { + if (((Py_ssize_t)idx) < 0 || resize_local_refcounts(tstate) < 0) { // just incref the object directly. Py_INCREF(obj); return; } - assert(unique_id < tstate->refcounts.size); - tstate->refcounts.values[unique_id]++; + assert(idx < (size_t)tstate->refcounts.size); + tstate->refcounts.values[idx]++; #ifdef Py_REF_DEBUG _Py_IncRefTotal((PyThreadState *)tstate); #endif @@ -217,7 +220,7 @@ _PyObject_FinalizeUniqueIdPool(PyInterpreterState *interp) if (obj != NULL) { Py_ssize_t id = clear_unique_id(obj); (void)id; - assert(id == i); + assert(id == i + 1); } } PyMem_Free(pool->table); diff --git a/Tools/build/mypy.ini b/Tools/build/mypy.ini index cf1dac7fde5ac5..0e5d6e874a72e5 100644 --- a/Tools/build/mypy.ini +++ b/Tools/build/mypy.ini @@ -8,6 +8,6 @@ python_version = 3.10 # ...And be strict: strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined warn_unreachable = True diff --git a/Tools/c-analyzer/cpython/globals-to-fix.tsv b/Tools/c-analyzer/cpython/globals-to-fix.tsv index a74779803228c2..54954cfb5f83ff 100644 --- a/Tools/c-analyzer/cpython/globals-to-fix.tsv +++ b/Tools/c-analyzer/cpython/globals-to-fix.tsv @@ -407,7 +407,8 @@ Modules/_tkinter.c - trbInCmd - ## other Include/datetime.h - PyDateTimeAPI - -Modules/_ctypes/cfield.c _ctypes_get_fielddesc initialized - +Modules/_ctypes/cfield.c _ctypes_init_fielddesc initialized - +Modules/_ctypes/cfield.c - formattable - Modules/_ctypes/malloc_closure.c - _pagesize - Modules/_cursesmodule.c - curses_module_loaded - Modules/_cursesmodule.c - curses_initscr_called - @@ -422,7 +423,6 @@ Modules/readline.c - libedit_history_start - ##----------------------- ## state -Modules/_ctypes/cfield.c - formattable - Modules/_ctypes/malloc_closure.c - free_list - Modules/_curses_panel.c - lop - Modules/_ssl/debughelpers.c _PySSL_keylog_callback lock - diff --git a/Tools/c-analyzer/cpython/ignored.tsv b/Tools/c-analyzer/cpython/ignored.tsv index c8c30a7985aa2e..1aabe262eac480 100644 --- a/Tools/c-analyzer/cpython/ignored.tsv +++ b/Tools/c-analyzer/cpython/ignored.tsv @@ -53,6 +53,9 @@ Python/pyhash.c - _Py_HashSecret - ## thread-safe hashtable (internal locks) Python/parking_lot.c - buckets - +## data needed for introspecting asyncio state from debuggers and profilers +Modules/_asynciomodule.c - AsyncioDebug - + ################################## ## state tied to Py_Main() @@ -378,14 +381,14 @@ Python/pylifecycle.c - INTERPRETER_TRAMPOLINE_CODEDEF - Python/pystate.c - initial - Python/specialize.c - adaptive_opcodes - Python/specialize.c - cache_requirements - +Python/specialize.c - float_compactlong_specs - +Python/specialize.c - compactlong_float_specs - Python/stdlib_module_names.h - _Py_stdlib_module_names - Python/sysmodule.c - perf_map_state - Python/sysmodule.c - _PySys_ImplCacheTag - Python/sysmodule.c - _PySys_ImplName - Python/sysmodule.c - whatstrings - Python/optimizer.c - _PyDefaultOptimizer_Type - -Python/optimizer.c - _PyCounterExecutor_Type - -Python/optimizer.c - _PyCounterOptimizer_Type - Python/optimizer.c - _PyUOpExecutor_Type - Python/optimizer.c - _PyUOpOptimizer_Type - Python/optimizer.c - _PyOptimizer_Default - diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 4013b503502df6..bc9c42e045a610 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -258,6 +258,12 @@ def is_super(self) -> bool: return False +@dataclass +class Label: + name: str + body: list[lexer.Token] + + @dataclass class PseudoInstruction: name: str @@ -291,6 +297,7 @@ class Analysis: uops: dict[str, Uop] families: dict[str, Family] pseudos: dict[str, PseudoInstruction] + labels: dict[str, Label] opmap: dict[str, int] have_arg: int min_instrumented: int @@ -590,7 +597,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyGen_GetGeneratorFromFrame", "_PyInterpreterState_GET", "_PyList_AppendTakeRef", - "_PyList_FromStackRefSteal", + "_PyList_FromStackRefStealOnSuccess", "_PyList_ITEMS", "_PyLong_Add", "_PyLong_CompactValue", @@ -609,8 +616,7 @@ def has_error_without_pop(op: parser.InstDef) -> bool: "_PyObject_InlineValues", "_PyObject_ManagedDictPointer", "_PyThreadState_HasStackSpace", - "_PyTuple_FromArraySteal", - "_PyTuple_FromStackRefSteal", + "_PyTuple_FromStackRefStealOnSuccess", "_PyTuple_ITEMS", "_PyType_HasFeature", "_PyType_NewManagedObject", @@ -1015,6 +1021,13 @@ def add_pseudo( ) +def add_label( + label: parser.LabelDef, + labels: dict[str, Label], +) -> None: + labels[label.name] = Label(label.name, label.block.tokens) + + def assign_opcodes( instructions: dict[str, Instruction], families: dict[str, Family], @@ -1133,6 +1146,7 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: uops: dict[str, Uop] = {} families: dict[str, Family] = {} pseudos: dict[str, PseudoInstruction] = {} + labels: dict[str, Label] = {} for node in forest: match node: case parser.InstDef(name): @@ -1147,6 +1161,8 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: pass case parser.Pseudo(): pass + case parser.LabelDef(): + pass case _: assert False for node in forest: @@ -1158,6 +1174,8 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: add_family(node, instructions, families) case parser.Pseudo(): add_pseudo(node, instructions, pseudos) + case parser.LabelDef(): + add_label(node, labels) case _: pass for uop in uops.values(): @@ -1183,7 +1201,7 @@ def analyze_forest(forest: list[parser.AstNode]) -> Analysis: families["BINARY_OP"].members.append(inst) opmap, first_arg, min_instrumented = assign_opcodes(instructions, families, pseudos) return Analysis( - instructions, uops, families, pseudos, opmap, first_arg, min_instrumented + instructions, uops, families, pseudos, labels, opmap, first_arg, min_instrumented ) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index f54afbb880d2fa..f1f166ae104ba5 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -127,6 +127,7 @@ def __init__(self, out: CWriter): "DISPATCH": self.dispatch, "INSTRUCTION_SIZE": self.instruction_size, "POP_INPUT": self.pop_input, + "GO_TO_INSTRUCTION": self.go_to_instruction, } self.out = out @@ -405,6 +406,23 @@ def sync_sp( self._print_storage(storage) return True + def go_to_instruction( + self, + tkn: Token, + tkn_iter: TokenIterator, + uop: Uop, + storage: Storage, + inst: Instruction | None, + ) -> bool: + next(tkn_iter) + name = next(tkn_iter) + next(tkn_iter) + next(tkn_iter) + assert name.kind == "IDENTIFIER" + self.emit("\n") + self.emit(f"goto PREDICTED_{name.text};\n") + return True + def emit_save(self, storage: Storage) -> None: storage.save(self.out) self._print_storage(storage) diff --git a/Tools/cases_generator/lexer.py b/Tools/cases_generator/lexer.py index bee2a185745f4d..cf3c39762f29cb 100644 --- a/Tools/cases_generator/lexer.py +++ b/Tools/cases_generator/lexer.py @@ -213,6 +213,9 @@ def choice(*opts: str) -> str: # A macro in the DSL MACRO = "MACRO" kwds.append(MACRO) +# A label in the DSL +LABEL = "LABEL" +kwds.append(LABEL) keywords = {name.lower(): name for name in kwds} ANNOTATION = "ANNOTATION" diff --git a/Tools/cases_generator/mypy.ini b/Tools/cases_generator/mypy.ini index 8e5a31851c596e..e54349bf54a954 100644 --- a/Tools/cases_generator/mypy.ini +++ b/Tools/cases_generator/mypy.ini @@ -8,7 +8,7 @@ python_version = 3.10 # ...And be strict: strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined warn_unreachable = True allow_redefinition = True diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py index d08b621aed552b..5cfec4bfecbf07 100644 --- a/Tools/cases_generator/optimizer_generator.py +++ b/Tools/cases_generator/optimizer_generator.py @@ -36,10 +36,10 @@ def validate_uop(override: Uop, uop: Uop) -> None: def type_name(var: StackItem) -> str: if var.is_array(): - return f"_Py_UopsSymbol **" + return f"JitOptSymbol **" if var.type: return var.type - return f"_Py_UopsSymbol *" + return f"JitOptSymbol *" def declare_variables(uop: Uop, out: CWriter, skip_inputs: bool) -> None: @@ -126,7 +126,7 @@ def write_uop( try: out.start_line() if override: - code_list, storage = Storage.for_uop(stack, prototype, extract_bits=False) + code_list, storage = Storage.for_uop(stack, prototype) for code in code_list: out.emit(code) if debug: @@ -151,11 +151,11 @@ def write_uop( var.defined = False storage = emitter.emit_tokens(override, storage, None) out.start_line() - storage.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False) + storage.flush(out, cast_type="JitOptSymbol *") else: emit_default(out, uop, stack) out.start_line() - stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False) + stack.flush(out, cast_type="JitOptSymbol *") except StackError as ex: raise analysis_error(ex.args[0], prototype.body[0]) # from None @@ -198,7 +198,7 @@ def generate_abstract_interpreter( declare_variables(override, out, skip_inputs=False) else: declare_variables(uop, out, skip_inputs=True) - stack = Stack() + stack = Stack(False) write_uop(override, uop, out, stack, debug, skip_inputs=(override is None)) out.start_line() out.emit("break;\n") diff --git a/Tools/cases_generator/parser.py b/Tools/cases_generator/parser.py index db672ad5501f15..68bbb88719e682 100644 --- a/Tools/cases_generator/parser.py +++ b/Tools/cases_generator/parser.py @@ -3,6 +3,7 @@ Macro, Pseudo, Family, + LabelDef, Parser, Context, CacheEffect, diff --git a/Tools/cases_generator/parsing.py b/Tools/cases_generator/parsing.py index de31d9b232f9df..eb8c8a7ecd32e8 100644 --- a/Tools/cases_generator/parsing.py +++ b/Tools/cases_generator/parsing.py @@ -150,8 +150,13 @@ class Pseudo(Node): targets: list[str] # opcodes this can be replaced by as_sequence: bool +@dataclass +class LabelDef(Node): + name: str + block: Block -AstNode = InstDef | Macro | Pseudo | Family + +AstNode = InstDef | Macro | Pseudo | Family | LabelDef class Parser(PLexer): @@ -165,6 +170,18 @@ def definition(self) -> AstNode | None: return pseudo if inst := self.inst_def(): return inst + if label := self.label_def(): + return label + return None + + @contextual + def label_def(self) -> LabelDef | None: + if self.expect(lx.LABEL): + if self.expect(lx.LPAREN): + if tkn := self.expect(lx.IDENTIFIER): + if self.expect(lx.RPAREN): + if block := self.block(): + return LabelDef(tkn.text, block) return None @contextual @@ -357,9 +374,12 @@ def uops(self) -> list[UOp] | None: def uop(self) -> UOp | None: if tkn := self.expect(lx.IDENTIFIER): if self.expect(lx.DIVIDE): + sign = 1 + if negate := self.expect(lx.MINUS): + sign = -1 if num := self.expect(lx.NUMBER): try: - size = int(num.text) + size = sign * int(num.text) except ValueError: raise self.make_syntax_error( f"Expected integer, got {num.text!r}" diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 286f47d0cfb11b..5121837ed8334b 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -224,13 +224,14 @@ def array_or_scalar(var: StackItem | Local) -> str: return "array" if var.is_array() else "scalar" class Stack: - def __init__(self) -> None: + def __init__(self, extract_bits: bool=True) -> None: self.top_offset = StackOffset.empty() self.base_offset = StackOffset.empty() self.variables: list[Local] = [] self.defined: set[str] = set() + self.extract_bits = extract_bits - def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]: + def pop(self, var: StackItem) -> tuple[str, Local]: self.top_offset.pop(var) indirect = "&" if var.is_array() else "" if self.variables: @@ -272,7 +273,7 @@ def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]: return "", Local.unused(var) self.defined.add(var.name) cast = f"({var.type})" if (not indirect and var.type) else "" - bits = ".bits" if cast and extract_bits else "" + bits = ".bits" if cast and self.extract_bits else "" assign = f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};" if var.condition: if var.condition == "1": @@ -315,7 +316,7 @@ def _adjust_stack_pointer(self, out: CWriter, number: str) -> None: out.emit("assert(WITHIN_STACK_BOUNDS());\n") def flush( - self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True + self, out: CWriter, cast_type: str = "uintptr_t" ) -> None: out.start_line() var_offset = self.base_offset.copy() @@ -324,7 +325,7 @@ def flush( var.defined and not var.in_memory ): - Stack._do_emit(out, var.item, var_offset, cast_type, extract_bits) + Stack._do_emit(out, var.item, var_offset, cast_type, self.extract_bits) var.in_memory = True var_offset.push(var.item) number = self.top_offset.to_c() @@ -346,7 +347,7 @@ def as_comment(self) -> str: ) def copy(self) -> "Stack": - other = Stack() + other = Stack(self.extract_bits) other.top_offset = self.top_offset.copy() other.base_offset = self.base_offset.copy() other.variables = [var.copy() for var in self.variables] @@ -507,10 +508,10 @@ def locals_cached(self) -> bool: return True return False - def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True) -> None: + def flush(self, out: CWriter, cast_type: str = "uintptr_t") -> None: self.clear_dead_inputs() self._push_defined_outputs() - self.stack.flush(out, cast_type, extract_bits) + self.stack.flush(out, cast_type) def save(self, out: CWriter) -> None: assert self.spilled >= 0 @@ -530,12 +531,12 @@ def reload(self, out: CWriter) -> None: out.emit("stack_pointer = _PyFrame_GetStackPointer(frame);\n") @staticmethod - def for_uop(stack: Stack, uop: Uop, extract_bits: bool = True) -> tuple[list[str], "Storage"]: + def for_uop(stack: Stack, uop: Uop) -> tuple[list[str], "Storage"]: code_list: list[str] = [] inputs: list[Local] = [] peeks: list[Local] = [] for input in reversed(uop.stack.inputs): - code, local = stack.pop(input, extract_bits) + code, local = stack.pop(input) code_list.append(code) if input.peek: peeks.append(local) diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index 40562da99b20ea..59ce5c95852d28 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -32,6 +32,10 @@ FOOTER = "#undef TIER_ONE\n" +INSTRUCTION_START_MARKER = "/* BEGIN INSTRUCTIONS */" +INSTRUCTION_END_MARKER = "/* END INSTRUCTIONS */" +LABEL_START_MARKER = "/* BEGIN LABELS */" +LABEL_END_MARKER = "/* END LABELS */" def declare_variable(var: StackItem, out: CWriter) -> None: @@ -133,13 +137,64 @@ def generate_tier1( ) -> None: write_header(__file__, filenames, outfile) outfile.write( - """ + f""" #ifdef TIER_TWO #error "This file is for Tier 1 only" #endif #define TIER_ONE 1 + +#if !USE_COMPUTED_GOTOS + dispatch_opcode: + switch (opcode) +#endif + {{ + {INSTRUCTION_START_MARKER} """ ) + generate_tier1_cases(analysis, outfile, lines) + outfile.write(f""" + {INSTRUCTION_END_MARKER} +#if USE_COMPUTED_GOTOS + _unknown_opcode: +#else + EXTRA_CASES // From pycore_opcode_metadata.h, a 'case' for each unused opcode +#endif + /* Tell C compilers not to hold the opcode variable in the loop. + next_instr points the current instruction without TARGET(). */ + opcode = next_instr->op.code; + _PyErr_Format(tstate, PyExc_SystemError, + "%U:%d: unknown opcode %d", + _PyFrame_GetCode(frame)->co_filename, + PyUnstable_InterpreterFrame_GetLine(frame), + opcode); + goto error; + + }} + + /* This should never be reached. Every opcode should end with DISPATCH() + or goto error. */ + Py_UNREACHABLE(); + {LABEL_START_MARKER} +""") + generate_tier1_labels(analysis, outfile, lines) + outfile.write(f"{LABEL_END_MARKER}\n") + outfile.write(FOOTER) + +def generate_tier1_labels( + analysis: Analysis, outfile: TextIO, lines: bool +) -> None: + out = CWriter(outfile, 2, lines) + out.emit("\n") + for name, label in analysis.labels.items(): + out.emit(f"{name}:\n") + for tkn in label.body: + out.emit(tkn) + out.emit("\n") + out.emit("\n") + +def generate_tier1_cases( + analysis: Analysis, outfile: TextIO, lines: bool +) -> None: out = CWriter(outfile, 2, lines) emitter = Emitter(out) out.emit("\n") @@ -161,7 +216,7 @@ def generate_tier1( out.emit(f"next_instr += {inst.size};\n") out.emit(f"INSTRUCTION_STATS({name});\n") if inst.is_target: - out.emit(f"PREDICTED({name});\n") + out.emit(f"PREDICTED_{name}:;\n") if needs_this: out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n") out.emit(unused_guard) @@ -185,7 +240,6 @@ def generate_tier1( out.start_line() out.emit("}") out.emit("\n") - outfile.write(FOOTER) arg_parser = argparse.ArgumentParser( diff --git a/Tools/clinic/libclinic/converters.py b/Tools/clinic/libclinic/converters.py index a65f73ba02fb5d..2998eb519648aa 100644 --- a/Tools/clinic/libclinic/converters.py +++ b/Tools/clinic/libclinic/converters.py @@ -1182,10 +1182,8 @@ def pre_render(self) -> None: @property def parser_type(self) -> str: assert self.type is not None - if self.function.kind in {METHOD_INIT, METHOD_NEW, STATIC_METHOD, CLASS_METHOD}: - tp, _ = correct_name_for_self(self.function) - return tp - return self.type + tp, _ = correct_name_for_self(self.function) + return tp def render(self, parameter: Parameter, data: CRenderData) -> None: """ diff --git a/Tools/clinic/mypy.ini b/Tools/clinic/mypy.ini index b1fdad673c61a1..6520e05db0bc31 100644 --- a/Tools/clinic/mypy.ini +++ b/Tools/clinic/mypy.ini @@ -7,6 +7,6 @@ python_version = 3.10 # and be strict! strict = True -strict_concatenate = True +extra_checks = True enable_error_code = ignore-without-code,redundant-expr,truthy-bool warn_unreachable = True diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 698ecbd3b549aa..e0d92e21dc42b3 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -99,7 +99,7 @@ def interp_frame_has_tlbc_index(): Py_TPFLAGS_TYPE_SUBCLASS = (1 << 31) #From pycore_frame.h -FRAME_OWNED_BY_CSTACK = 3 +FRAME_OWNED_BY_INTERPRETER = 3 MAX_OUTPUT_LEN=1024 @@ -1113,7 +1113,7 @@ def _f_lasti(self): return int(instr_ptr - first_instr) def is_shim(self): - return self._f_special("owner", int) == FRAME_OWNED_BY_CSTACK + return self._f_special("owner", int) == FRAME_OWNED_BY_INTERPRETER def previous(self): return self._f_special("previous", PyFramePtr) diff --git a/configure b/configure index 70581e11b60682..885c2cf7828d6c 100755 --- a/configure +++ b/configure @@ -814,7 +814,7 @@ MODULE_TIME_TRUE MODULE__IO_FALSE MODULE__IO_TRUE MODULE_BUILDTYPE -PYTHREAD_NAME_MAXLEN +_PYTHREAD_NAME_MAXLEN TEST_MODULES OPENSSL_LDFLAGS OPENSSL_LIBS @@ -9627,7 +9627,7 @@ fi as_fn_append LINKFORSHARED " -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js" as_fn_append LINKFORSHARED " -sEXPORTED_RUNTIME_METHODS=FS,callMain,ENV" - as_fn_append LINKFORSHARED " -sEXPORTED_FUNCTIONS=_main,_Py_Version" + as_fn_append LINKFORSHARED " -sEXPORTED_FUNCTIONS=_main,_Py_Version,__PyRuntime,__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET" as_fn_append LINKFORSHARED " -sSTACK_SIZE=5MB" if test "x$enable_wasm_dynamic_linking" = xyes @@ -30403,17 +30403,17 @@ CPPFLAGS=$save_CPPFLAGS # gh-59705: Maximum length in bytes of a thread name case "$ac_sys_system" in - Linux*) PYTHREAD_NAME_MAXLEN=15;; # Linux and Android - SunOS*) PYTHREAD_NAME_MAXLEN=31;; - NetBSD*) PYTHREAD_NAME_MAXLEN=31;; - Darwin) PYTHREAD_NAME_MAXLEN=63;; - iOS) PYTHREAD_NAME_MAXLEN=63;; - FreeBSD*) PYTHREAD_NAME_MAXLEN=98;; - *) PYTHREAD_NAME_MAXLEN=;; + Linux*) _PYTHREAD_NAME_MAXLEN=15;; # Linux and Android + SunOS*) _PYTHREAD_NAME_MAXLEN=31;; + NetBSD*) _PYTHREAD_NAME_MAXLEN=31;; + Darwin) _PYTHREAD_NAME_MAXLEN=63;; + iOS) _PYTHREAD_NAME_MAXLEN=63;; + FreeBSD*) _PYTHREAD_NAME_MAXLEN=98;; + *) _PYTHREAD_NAME_MAXLEN=;; esac -if test -n "$PYTHREAD_NAME_MAXLEN"; then +if test -n "$_PYTHREAD_NAME_MAXLEN"; then -printf "%s\n" "#define PYTHREAD_NAME_MAXLEN $PYTHREAD_NAME_MAXLEN" >>confdefs.h +printf "%s\n" "#define _PYTHREAD_NAME_MAXLEN $_PYTHREAD_NAME_MAXLEN" >>confdefs.h fi diff --git a/configure.ac b/configure.ac index d7c3920d049d67..f89a0801948ca5 100644 --- a/configure.ac +++ b/configure.ac @@ -2369,7 +2369,7 @@ AS_CASE([$ac_sys_system], dnl Include file system support AS_VAR_APPEND([LINKFORSHARED], [" -sFORCE_FILESYSTEM -lidbfs.js -lnodefs.js -lproxyfs.js -lworkerfs.js"]) AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_RUNTIME_METHODS=FS,callMain,ENV"]) - AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_FUNCTIONS=_main,_Py_Version"]) + AS_VAR_APPEND([LINKFORSHARED], [" -sEXPORTED_FUNCTIONS=_main,_Py_Version,__PyRuntime,__PyEM_EMSCRIPTEN_COUNT_ARGS_OFFSET"]) AS_VAR_APPEND([LINKFORSHARED], [" -sSTACK_SIZE=5MB"]) AS_VAR_IF([enable_wasm_dynamic_linking], [yes], [ @@ -7539,19 +7539,19 @@ _RESTORE_VAR([CPPFLAGS]) # gh-59705: Maximum length in bytes of a thread name case "$ac_sys_system" in - Linux*) PYTHREAD_NAME_MAXLEN=15;; # Linux and Android - SunOS*) PYTHREAD_NAME_MAXLEN=31;; - NetBSD*) PYTHREAD_NAME_MAXLEN=31;; - Darwin) PYTHREAD_NAME_MAXLEN=63;; - iOS) PYTHREAD_NAME_MAXLEN=63;; - FreeBSD*) PYTHREAD_NAME_MAXLEN=98;; - *) PYTHREAD_NAME_MAXLEN=;; + Linux*) _PYTHREAD_NAME_MAXLEN=15;; # Linux and Android + SunOS*) _PYTHREAD_NAME_MAXLEN=31;; + NetBSD*) _PYTHREAD_NAME_MAXLEN=31;; + Darwin) _PYTHREAD_NAME_MAXLEN=63;; + iOS) _PYTHREAD_NAME_MAXLEN=63;; + FreeBSD*) _PYTHREAD_NAME_MAXLEN=98;; + *) _PYTHREAD_NAME_MAXLEN=;; esac -if test -n "$PYTHREAD_NAME_MAXLEN"; then - AC_DEFINE_UNQUOTED([PYTHREAD_NAME_MAXLEN], [$PYTHREAD_NAME_MAXLEN], +if test -n "$_PYTHREAD_NAME_MAXLEN"; then + AC_DEFINE_UNQUOTED([_PYTHREAD_NAME_MAXLEN], [$_PYTHREAD_NAME_MAXLEN], [Maximum length in bytes of a thread name]) fi -AC_SUBST([PYTHREAD_NAME_MAXLEN]) +AC_SUBST([_PYTHREAD_NAME_MAXLEN]) # stdlib diff --git a/iOS/testbed/__main__.py b/iOS/testbed/__main__.py index 068272835a5b95..b4499f5ac171a8 100644 --- a/iOS/testbed/__main__.py +++ b/iOS/testbed/__main__.py @@ -2,6 +2,7 @@ import asyncio import json import plistlib +import re import shutil import subprocess import sys @@ -12,6 +13,18 @@ DECODE_ARGS = ("UTF-8", "backslashreplace") +# The system log prefixes each line: +# 2025-01-17 16:14:29.090 Df iOSTestbed[23987:1fd393b4] (Python) ... +# 2025-01-17 16:14:29.090 E iOSTestbed[23987:1fd393b4] (Python) ... + +LOG_PREFIX_REGEX = re.compile( + r"^\d{4}-\d{2}-\d{2}" # YYYY-MM-DD + r"\s+\d+:\d{2}:\d{2}\.\d+" # HH:MM:SS.sss + r"\s+\w+" # Df/E + r"\s+iOSTestbed\[\d+:\w+\]" # Process/thread ID + r"\s+\(Python\)\s" # Logger name +) + # Work around a bug involving sys.exit and TaskGroups # (https://github.com/python/cpython/issues/101515). @@ -131,6 +144,8 @@ async def log_stream_task(initial_devices): ) as process: suppress_dupes = False while line := (await process.stdout.readline()).decode(*DECODE_ARGS): + # Strip the prefix from each log line + line = LOG_PREFIX_REGEX.sub("", line) # The iOS log streamer can sometimes lag; when it does, it outputs # a warning about messages being dropped... often multiple times. # Only print the first of these duplicated warnings. diff --git a/pyconfig.h.in b/pyconfig.h.in index aaf52168c3d39d..30e55158bad4d6 100644 --- a/pyconfig.h.in +++ b/pyconfig.h.in @@ -1662,9 +1662,6 @@ /* Define as the preferred size in bits of long digits */ #undef PYLONG_BITS_IN_DIGIT -/* Maximum length in bytes of a thread name */ -#undef PYTHREAD_NAME_MAXLEN - /* enabled builtin hash modules */ #undef PY_BUILTIN_HASHLIB_HASHES @@ -1980,6 +1977,9 @@ /* framework name */ #undef _PYTHONFRAMEWORK +/* Maximum length in bytes of a thread name */ +#undef _PYTHREAD_NAME_MAXLEN + /* Define to force use of thread-safe errno, h_errno, and other functions */ #undef _REENTRANT