Skip to content

Commit

Permalink
Drop python3.8 support core and providers (apache#42766)
Browse files Browse the repository at this point in the history
* Drop Python 3.8 support in core

Add newsfragment

* Drop Python 3.8 support in provider packages
  • Loading branch information
jscheffl authored Oct 8, 2024
1 parent 000d2da commit 63ff22f
Show file tree
Hide file tree
Showing 193 changed files with 998 additions and 1,215 deletions.
3 changes: 0 additions & 3 deletions .github/actions/breeze/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,6 @@ description: 'Sets up Python and Breeze'
inputs:
python-version:
description: 'Python version to use'
# Version of Python used for reproducibility of the packages built
# Python 3.8 tarfile produces different tarballs than Python 3.9+ tarfile that's why we are forcing
# Python 3.9 for all release preparation commands to make sure that the tarballs are reproducible
default: "3.9"
outputs:
host-python-version:
Expand Down
18 changes: 9 additions & 9 deletions .github/workflows/build-images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# under the License.
#
---
name: "Build Images"
name: Build Images
run-name: >
Build images for ${{ github.event.pull_request.title }} ${{ github.event.pull_request._links.html.href }}
on: # yamllint disable-line rule:truthy
Expand Down Expand Up @@ -54,7 +54,7 @@ concurrency:
jobs:
build-info:
timeout-minutes: 10
name: "Build Info"
name: Build Info
# At build-info stage we do not yet have outputs so we need to hard-code the runs-on to public runners
runs-on: ["ubuntu-22.04"]
env:
Expand Down Expand Up @@ -89,7 +89,7 @@ jobs:
}}"
if: github.repository == 'apache/airflow'
steps:
- name: "Cleanup repo"
- name: Cleanup repo
shell: bash
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
- name: Discover PR merge commit
Expand Down Expand Up @@ -154,13 +154,13 @@ jobs:
# COMPOSITE ACTIONS. WE CAN RUN ANYTHING THAT IS IN THE TARGET BRANCH AND THERE IS NO RISK THAT
# CODE WILL BE RUN FROM THE PR.
####################################################################################################
- name: "Cleanup docker"
- name: Cleanup docker
run: ./scripts/ci/cleanup_docker.sh
- name: "Setup python"
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: 3.8
- name: "Install Breeze"
python-version: "3.9"
- name: Install Breeze
uses: ./.github/actions/breeze
####################################################################################################
# WE RUN SELECTIVE CHECKS HERE USING THE TARGET COMMIT AND ITS PARENT TO BE ABLE TO COMPARE THEM
Expand Down Expand Up @@ -212,7 +212,7 @@ jobs:
docker-cache: ${{ needs.build-info.outputs.docker-cache }}

generate-constraints:
name: "Generate constraints"
name: Generate constraints
needs: [build-info, build-ci-images]
uses: ./.github/workflows/generate-constraints.yml
with:
Expand Down Expand Up @@ -247,7 +247,7 @@ jobs:
push-image: "true"
use-uv: "true"
image-tag: ${{ needs.build-info.outputs.image-tag }}
platform: "linux/amd64"
platform: linux/amd64
python-versions: ${{ needs.build-info.outputs.python-versions }}
default-python-version: ${{ needs.build-info.outputs.default-python-version }}
branch: ${{ needs.build-info.outputs.default-branch }}
Expand Down
5 changes: 2 additions & 3 deletions .github/workflows/check-providers.yml
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,9 @@ jobs:
run: >
breeze release-management generate-issue-content-providers
--only-available-in-dist --disable-progress
- name: >
Remove incompatible Python ${{ matrix.python-version }} provider packages
- name: Remove Python 3.9-incompatible provider packages
run: |
echo "Removing Python 3.8-incompatible provider: cloudant"
echo "Removing Python 3.9-incompatible provider: cloudant"
rm -vf dist/apache_airflow_providers_cloudant*
- name: "Generate source constraints from CI image"
shell: bash
Expand Down
7 changes: 0 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1066,13 +1066,6 @@ repos:
files: \.py$
exclude: ^.*/.*_vendor/
additional_dependencies: ['rich>=12.4.4']
- id: check-compat-cache-on-methods
name: Check that compat cache do not use on class methods
entry: ./scripts/ci/pre_commit/compat_cache_on_methods.py
language: python
pass_filenames: true
files: ^airflow/.*\.py$
exclude: ^.*/.*_vendor/
- id: check-code-deprecations
name: Check deprecations categories in decorators
entry: ./scripts/ci/pre_commit/check_deprecations.py
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ formats: []
sphinx:
configuration: docs/rtd-deprecation/conf.py
python:
version: "3.8"
version: "3.9"
install:
- method: pip
path: .
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow
# latest released version here
ARG AIRFLOW_VERSION="2.10.2"

ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm"
ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm"

ARG AIRFLOW_PIP_VERSION=24.2
ARG AIRFLOW_UV_VERSION=0.4.17
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.ci
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
#
# WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT.
#
ARG PYTHON_BASE_IMAGE="python:3.8-slim-bookworm"
ARG PYTHON_BASE_IMAGE="python:3.9-slim-bookworm"

##############################################################################################
# This is the script image where we keep all inlined bash scripts needed in other segments
Expand Down Expand Up @@ -810,7 +810,7 @@ chmod 1777 /tmp

AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd)

PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8}
PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.9}

export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}}

Expand Down
12 changes: 5 additions & 7 deletions INSTALL
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,7 @@ This is what it shows currently:
┏━━━━━━━━━━━━━┳━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓
┃ Name ┃ Type ┃ Description ┃
┡━━━━━━━━━━━━━╇━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩
│ default │ virtual │ Default environment with Python 3.8 for maximum compatibility │
├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤
│ airflow-38 │ virtual │ Environment with Python 3.8. No devel installed. │
│ default │ virtual │ Default environment with Python 3.9 for maximum compatibility │
├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤
│ airflow-39 │ virtual │ Environment with Python 3.9. No devel installed. │
├─────────────┼─────────┼───────────────────────────────────────────────────────────────┤
Expand All @@ -154,7 +152,7 @@ This is what it shows currently:
│ airflow-312 │ virtual │ Environment with Python 3.12. No devel installed │
└─────────────┴─────────┴───────────────────────────────────────────────────────────────┘

The default env (if you have not used one explicitly) is `default` and it is a Python 3.8
The default env (if you have not used one explicitly) is `default` and it is a Python 3.9
virtualenv for maximum compatibility with `devel` extra installed - this devel extra contains the minimum set
of dependencies and tools that should be used during unit testing of core Airflow and running all `airflow`
CLI commands - without support for providers or databases.
Expand Down Expand Up @@ -228,15 +226,15 @@ to avoid "works-for-me" syndrome, where you use different versions of dependenci
that are used in main CI tests and by other contributors.

There are different constraint files for different Python versions. For example, this command will install
all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.8:
all basic devel requirements and requirements of Google provider as last successfully tested for Python 3.9:

pip install -e ".[devel,google]"" \
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt"
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.9.txt"

Using the 'constraints-no-providers' constraint files, you can upgrade Airflow without paying attention to the provider's dependencies. This allows you to keep installed provider dependencies and install the latest supported ones using pure Airflow core.

pip install -e ".[devel]" \
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt"
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.9.txt"

Airflow extras
==============
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ Apache Airflow is tested with:

| | Main version (dev) | Stable version (2.10.2) |
|------------|----------------------------|----------------------------|
| Python | 3.8, 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 |
| Python | 3.9, 3.10, 3.11, 3.12 | 3.8, 3.9, 3.10, 3.11, 3.12 |
| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) |
| Kubernetes | 1.28, 1.29, 1.30, 1.31 | 1.27, 1.28, 1.29, 1.30 |
| PostgreSQL | 12, 13, 14, 15, 16, 17 | 12, 13, 14, 15, 16 |
Expand Down Expand Up @@ -178,14 +178,14 @@ them to the appropriate format and workflow that your tool requires.

```bash
pip install 'apache-airflow==2.10.2' \
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt"
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt"
```

2. Installing with extras (i.e., postgres, google)

```bash
pip install 'apache-airflow[postgres,google]==2.10.2' \
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.8.txt"
--constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.10.2/constraints-3.9.txt"
```

For information on installing provider packages, check
Expand Down Expand Up @@ -313,7 +313,7 @@ They are based on the official release schedule of Python and Kubernetes, nicely
1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a
version stays supported by Airflow if two major cloud providers still provide support for it. We drop
support for those EOL versions in main right after EOL date, and it is effectively removed when we release
the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.8 it
the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.9 it
means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of
Airflow released after will not have it.

Expand Down
2 changes: 1 addition & 1 deletion airflow/cli/commands/connection_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import json
import os
import warnings
from functools import cache
from pathlib import Path
from typing import Any
from urllib.parse import urlsplit, urlunsplit
Expand All @@ -30,7 +31,6 @@

from airflow.cli.simple_table import AirflowConsole
from airflow.cli.utils import is_stdout, print_export_output
from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.exceptions import AirflowNotFoundException
from airflow.hooks.base import BaseHook
Expand Down
33 changes: 0 additions & 33 deletions airflow/compat/functools.py

This file was deleted.

4 changes: 1 addition & 3 deletions airflow/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -1790,9 +1790,7 @@ def load_providers_configuration(self):
)
self._default_values = create_default_config_parser(self.configuration_description)
# sensitive_config_values needs to be refreshed here. This is a cached_property, so we can delete
# the cached values, and it will be refreshed on next access. This has been an implementation
# detail in Python 3.8 but as of Python 3.9 it is documented behaviour.
# See https://docs.python.org/3/library/functools.html#functools.cached_property
# the cached values, and it will be refreshed on next access.
try:
del self.sensitive_config_values
except AttributeError:
Expand Down
2 changes: 1 addition & 1 deletion airflow/io/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import inspect
import logging
from functools import cache
from typing import (
TYPE_CHECKING,
Callable,
Expand All @@ -26,7 +27,6 @@

from fsspec.implementations.local import LocalFileSystem

from airflow.compat.functools import cache
from airflow.providers_manager import ProvidersManager
from airflow.stats import Stats
from airflow.utils.module_loading import import_string
Expand Down
2 changes: 1 addition & 1 deletion airflow/models/taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
from contextlib import nullcontext
from datetime import timedelta
from enum import Enum
from functools import cache
from typing import TYPE_CHECKING, Any, Callable, Collection, Generator, Iterable, Mapping, Tuple
from urllib.parse import quote

Expand Down Expand Up @@ -69,7 +70,6 @@
from airflow.api_internal.internal_api_call import InternalApiConfig, internal_api_call
from airflow.assets import Asset, AssetAlias
from airflow.assets.manager import asset_manager
from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.exceptions import (
AirflowException,
Expand Down
2 changes: 1 addition & 1 deletion airflow/operators/python.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,13 @@
import warnings
from abc import ABCMeta, abstractmethod
from collections.abc import Container
from functools import cache
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable, Mapping, NamedTuple, Sequence

import lazy_object_proxy

from airflow.compat.functools import cache
from airflow.exceptions import (
AirflowConfigException,
AirflowException,
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/MANAGING_PROVIDERS_LIFECYCLE.rst
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit
manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail
because suspension of the provider might cause changes in the dependencies, so if you see errors about
missing dependencies imports, non-usable classes etc., you will need to build the CI image locally
via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run
via ``breeze build-image --python 3.9 --upgrade-to-newer-dependencies`` after the first pre-commit run
and then run the static checks again.

If you want to be absolutely sure to run all static checks you can always do this via
Expand Down
9 changes: 3 additions & 6 deletions airflow/providers/amazon/aws/transfers/sql_to_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,12 +223,9 @@ def _partition_dataframe(self, df: pd.DataFrame) -> Iterable[tuple[str, pd.DataF
for group_label in (grouped_df := df.groupby(**self.groupby_kwargs)).groups:
yield (
cast(str, group_label),
cast(
"pd.DataFrame",
grouped_df.get_group(group_label)
.drop(random_column_name, axis=1, errors="ignore")
.reset_index(drop=True),
),
grouped_df.get_group(group_label)
.drop(random_column_name, axis=1, errors="ignore")
.reset_index(drop=True),
)

def _get_hook(self) -> DbApiHook:
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/amazon/aws/utils/mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,11 @@

from __future__ import annotations

from functools import cached_property
from functools import cache, cached_property
from typing import Any, Generic, NamedTuple, TypeVar

from typing_extensions import final

from airflow.compat.functools import cache
from airflow.providers.amazon.aws.hooks.base_aws import AwsGenericHook

AwsHookType = TypeVar("AwsHookType", bound=AwsGenericHook)
Expand Down
3 changes: 1 addition & 2 deletions airflow/providers/cloudant/provider.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,9 @@ dependencies:

excluded-python-versions:
# ibmcloudant transitively brings in urllib3 2.x, but the snowflake provider has a dependency that pins
# urllib3 to 1.x on Python 3.8 and 3.9; thus we exclude those Python versions from taking the update
# urllib3 to 1.x on Python 3.9; thus we exclude those Python versions from taking the update
# to ibmcloudant.
# See #21004, #41555, and https://github.com/snowflakedb/snowflake-connector-python/issues/2016
- "3.8"
- "3.9"

integrations:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@
import logging
import secrets
import string
from functools import cache
from typing import TYPE_CHECKING

import pendulum
from deprecated import deprecated
from kubernetes.client.rest import ApiException
from slugify import slugify

from airflow.compat.functools import cache
from airflow.configuration import conf
from airflow.exceptions import AirflowProviderDeprecationWarning

Expand Down
Loading

0 comments on commit 63ff22f

Please sign in to comment.