From 9fa75aaf7a391ebf0e6b6949445c060f6de2ceb9 Mon Sep 17 00:00:00 2001 From: Jarek Potiuk Date: Mon, 29 May 2023 12:01:11 +0200 Subject: [PATCH] Remove Python 3.7 support (#30963) --- .github/actions/breeze/action.yml | 4 +- .github/actions/build-prod-images/action.yml | 2 +- .readthedocs.yml | 2 +- BREEZE.rst | 43 +++- CI.rst | 6 +- CONTRIBUTING.rst | 10 +- Dockerfile | 2 +- Dockerfile.ci | 4 +- IMAGES.rst | 56 ++--- INSTALL | 12 +- LOCAL_VIRTUALENV.rst | 20 +- README.md | 30 +-- STATIC_CODE_CHECKS.rst | 2 +- TESTING.rst | 90 ++++---- .../endpoints/rpc_api_endpoint.py | 2 +- airflow/compat/functools.py | 8 +- airflow/compat/functools.pyi | 3 +- airflow/configuration.py | 5 +- airflow/decorators/base.py | 2 +- airflow/jobs/job.py | 2 +- airflow/models/abstractoperator.py | 3 +- airflow/models/dag.py | 5 +- airflow/models/xcom.py | 3 +- airflow/operators/bash.py | 2 +- .../SUSPENDING_AND_RESUMING_PROVIDERS.rst | 8 +- .../alibaba/cloud/log/oss_task_handler.py | 2 +- .../alibaba/cloud/sensors/oss_key.py | 2 +- airflow/providers/amazon/aws/hooks/appflow.py | 2 +- .../providers/amazon/aws/hooks/base_aws.py | 3 +- .../amazon/aws/hooks/glue_crawler.py | 2 +- .../providers/amazon/aws/hooks/quicksight.py | 2 +- .../amazon/aws/hooks/redshift_sql.py | 2 +- .../amazon/aws/log/cloudwatch_task_handler.py | 2 +- .../amazon/aws/log/s3_task_handler.py | 2 +- .../providers/amazon/aws/operators/appflow.py | 2 +- .../providers/amazon/aws/operators/athena.py | 2 +- .../providers/amazon/aws/operators/batch.py | 2 +- airflow/providers/amazon/aws/operators/ecs.py | 2 +- airflow/providers/amazon/aws/operators/emr.py | 3 +- .../amazon/aws/operators/glue_crawler.py | 2 +- .../amazon/aws/operators/lambda_function.py | 2 +- .../amazon/aws/operators/redshift_data.py | 2 +- .../amazon/aws/operators/sagemaker.py | 2 +- .../amazon/aws/secrets/secrets_manager.py | 2 +- .../amazon/aws/secrets/systems_manager.py | 2 +- .../providers/amazon/aws/sensors/athena.py | 2 +- airflow/providers/amazon/aws/sensors/batch.py | 2 +- .../amazon/aws/sensors/cloud_formation.py | 2 +- airflow/providers/amazon/aws/sensors/dms.py | 2 +- .../providers/amazon/aws/sensors/dynamodb.py | 2 +- airflow/providers/amazon/aws/sensors/ec2.py | 2 +- airflow/providers/amazon/aws/sensors/ecs.py | 2 +- airflow/providers/amazon/aws/sensors/eks.py | 2 +- airflow/providers/amazon/aws/sensors/emr.py | 3 +- .../providers/amazon/aws/sensors/glacier.py | 2 +- airflow/providers/amazon/aws/sensors/glue.py | 2 +- .../aws/sensors/glue_catalog_partition.py | 2 +- .../amazon/aws/sensors/glue_crawler.py | 2 +- .../amazon/aws/sensors/lambda_function.py | 2 +- .../amazon/aws/sensors/quicksight.py | 2 +- airflow/providers/amazon/aws/sensors/rds.py | 2 +- .../amazon/aws/sensors/redshift_cluster.py | 2 +- airflow/providers/amazon/aws/sensors/s3.py | 2 +- .../providers/amazon/aws/sensors/sagemaker.py | 2 +- airflow/providers/amazon/aws/sensors/sqs.py | 5 +- .../amazon/aws/sensors/step_function.py | 2 +- .../amazon/aws/transfers/dynamodb_to_s3.py | 2 +- .../amazon/aws/transfers/s3_to_sql.py | 2 +- .../amazon/aws/transfers/sql_to_s3.py | 12 +- .../providers/amazon/aws/triggers/batch.py | 2 +- airflow/providers/amazon/aws/triggers/ec2.py | 2 +- .../amazon/aws/triggers/redshift_cluster.py | 2 +- .../amazon/aws/triggers/sagemaker.py | 2 +- .../amazon/aws/utils/connection_wrapper.py | 2 +- .../flink/operators/flink_kubernetes.py | 2 +- airflow/providers/apache/kafka/hooks/base.py | 2 +- airflow/providers/arangodb/hooks/arangodb.py | 2 +- airflow/providers/asana/hooks/asana.py | 3 +- .../cncf/kubernetes/hooks/kubernetes.py | 2 +- .../cncf/kubernetes/operators/pod.py | 2 +- airflow/providers/common/sql/hooks/sql.py | 3 +- airflow/providers/common/sql/operators/sql.py | 2 +- .../databricks/hooks/databricks_base.py | 2 +- .../databricks/operators/databricks.py | 2 +- .../databricks/operators/databricks_repos.py | 2 +- .../sensors/databricks_partition.py | 2 +- .../databricks/sensors/databricks_sql.py | 2 +- airflow/providers/dbt/cloud/hooks/dbt.py | 3 +- airflow/providers/docker/hooks/docker.py | 2 +- airflow/providers/docker/operators/docker.py | 2 +- .../elasticsearch/hooks/elasticsearch.py | 2 +- airflow/providers/facebook/ads/hooks/ads.py | 2 +- airflow/providers/ftp/operators/ftp.py | 2 +- airflow/providers/google/ads/hooks/ads.py | 2 +- .../_internal_client/secret_manager_client.py | 2 +- .../providers/google/cloud/hooks/automl.py | 2 +- .../google/cloud/hooks/compute_ssh.py | 2 +- .../google/cloud/hooks/kubernetes_engine.py | 2 +- .../providers/google/cloud/hooks/pubsub.py | 2 +- .../providers/google/cloud/hooks/vision.py | 2 +- .../google/cloud/log/gcs_task_handler.py | 2 +- .../cloud/log/stackdriver_task_handler.py | 2 +- .../google/cloud/operators/bigquery_dts.py | 2 +- .../google/cloud/operators/dataflow.py | 2 +- .../cloud/operators/kubernetes_engine.py | 2 +- .../_internal_client/vault_client.py | 3 +- .../microsoft/azure/log/wasb_task_handler.py | 2 +- .../microsoft/azure/secrets/key_vault.py | 2 +- .../microsoft/azure/transfers/sftp_to_wasb.py | 2 +- .../providers/salesforce/hooks/salesforce.py | 2 +- airflow/providers/slack/hooks/slack.py | 3 +- .../providers/slack/hooks/slack_webhook.py | 3 +- .../providers/slack/notifications/slack.py | 2 +- airflow/providers/slack/operators/slack.py | 2 +- .../slack/operators/slack_webhook.py | 2 +- airflow/providers/ssh/hooks/ssh.py | 2 +- airflow/ti_deps/deps/trigger_rule_dep.py | 4 +- airflow/timetables/_cron.py | 2 +- airflow/typing_compat.py | 8 +- airflow/utils/context.py | 3 +- airflow/utils/file.py | 3 +- airflow/utils/log/file_task_handler.py | 2 +- airflow/utils/log/log_reader.py | 2 +- airflow/utils/log/secrets_masker.py | 3 +- airflow/www/extensions/init_views.py | 2 +- airflow/www/fab_security/manager.py | 2 +- airflow/www/views.py | 3 +- chart/values.schema.json | 2 +- constraints/README.md | 6 +- dev/README_RELEASE_AIRFLOW.md | 6 +- dev/README_RELEASE_PROVIDER_PACKAGES.md | 4 +- dev/breeze/README.md | 2 +- dev/breeze/SELECTIVE_CHECKS.md | 12 +- ...002-implement-standalone-python-command.md | 6 +- dev/breeze/setup.cfg | 5 +- .../commands/minor_release_command.py | 2 +- .../commands/release_candidate_command.py | 2 +- .../src/airflow_breeze/global_constants.py | 4 +- .../params/common_build_params.py | 2 +- .../airflow_breeze/utils/kubernetes_utils.py | 6 +- .../src/airflow_breeze/utils/run_utils.py | 2 +- .../airflow_breeze/utils/selective_checks.py | 34 ++- dev/breeze/tests/test_cache.py | 4 +- dev/breeze/tests/test_exclude_from_matrix.py | 10 +- dev/breeze/tests/test_selective_checks.py | 200 +++++++++--------- dev/check_files.py | 2 +- .../SETUP_TEMPLATE.cfg.jinja2 | 2 +- .../prepare_provider_packages.py | 4 +- dev/retag_docker_images.py | 2 +- dev/stats/get_important_pr_candidates.py | 7 +- ...validate_version_added_fields_in_config.py | 2 +- docker_tests/docker_tests_utils.py | 6 +- .../installing-from-pypi.rst | 2 +- .../modules_management.rst | 16 +- docs/apache-airflow/extra-packages-ref.rst | 2 +- .../installation/installing-from-pypi.rst | 18 +- .../installation/prerequisites.rst | 2 +- .../installation/supported-versions.rst | 4 +- docs/apache-airflow/start.rst | 6 +- docs/docker-stack/README.md | 8 +- docs/docker-stack/build-arg-ref.rst | 2 +- docs/docker-stack/build.rst | 16 +- .../customizing/add-build-essential-custom.sh | 2 +- .../customizing/custom-sources.sh | 2 +- .../customizing/github-main.sh | 2 +- .../customizing/pypi-dev-runtime-deps.sh | 2 +- .../customizing/pypi-selected-version.sh | 2 +- .../restricted/restricted_environments.sh | 12 +- docs/docker-stack/entrypoint.rst | 8 +- docs/docker-stack/index.rst | 8 +- images/breeze/output-commands-hash.txt | 66 +++--- images/breeze/output-commands.svg | 92 ++++---- images/breeze/output_ci-image_build.svg | 6 +- images/breeze/output_ci-image_pull.svg | 60 +++--- images/breeze/output_ci-image_verify.svg | 32 ++- images/breeze/output_k8s_build-k8s-image.svg | 52 +++-- .../breeze/output_k8s_configure-cluster.svg | 60 +++--- images/breeze/output_k8s_create-cluster.svg | 6 +- images/breeze/output_k8s_delete-cluster.svg | 34 ++- images/breeze/output_k8s_deploy-airflow.svg | 70 +++--- images/breeze/output_k8s_k9s.svg | 32 ++- images/breeze/output_k8s_logs.svg | 34 ++- .../breeze/output_k8s_run-complete-tests.svg | 68 +++--- images/breeze/output_k8s_shell.svg | 40 ++-- images/breeze/output_k8s_status.svg | 36 ++-- images/breeze/output_k8s_tests.svg | 68 +++--- images/breeze/output_k8s_upload-k8s-image.svg | 58 +++-- images/breeze/output_prod-image_build.svg | 6 +- images/breeze/output_prod-image_pull.svg | 60 +++--- images/breeze/output_prod-image_verify.svg | 34 ++- ...elease-management_generate-constraints.svg | 6 +- ...release-management_release-prod-images.svg | 2 +- images/breeze/output_setup_config.svg | 4 +- images/breeze/output_shell.svg | 110 +++++----- images/breeze/output_start-airflow.svg | 4 +- .../output_testing_docker-compose-tests.svg | 4 +- .../output_testing_integration-tests.svg | 52 +++-- images/breeze/output_testing_tests.svg | 82 ++++--- pyproject.toml | 6 +- scripts/ci/docker-compose/devcontainer.env | 4 +- scripts/ci/docker-compose/devcontainer.yml | 2 +- .../pre_commit_check_provider_yaml_files.py | 2 +- .../pre_commit_migration_reference.py | 2 +- scripts/ci/pre_commit/pre_commit_mypy.py | 2 +- .../pre_commit_update_er_diagram.py | 2 +- scripts/docker/entrypoint_ci.sh | 2 +- scripts/in_container/verify_providers.py | 7 +- setup.cfg | 8 +- setup.py | 3 - tests/executors/test_debug_executor.py | 6 +- .../aws/deferrable/hooks/test_base_aws.py | 6 +- .../deferrable/hooks/test_redshift_cluster.py | 20 +- .../triggers/test_redshift_cluster.py | 13 +- .../amazon/aws/triggers/test_batch.py | 12 +- .../providers/amazon/aws/triggers/test_ec2.py | 19 +- .../aws/triggers/test_redshift_cluster.py | 150 +++++++------ .../amazon/aws/triggers/test_sagemaker.py | 15 +- tests/providers/amazon/aws/utils/compat.py | 37 ---- tests/providers/apache/livy/compat.py | 37 ---- .../providers/apache/livy/hooks/test_livy.py | 46 ++-- .../apache/livy/triggers/test_livy.py | 20 +- .../databricks/hooks/test_databricks.py | 10 +- .../dbt/cloud/triggers/test_dbt_cloud.py | 39 ++-- .../google/cloud/hooks/test_bigquery.py | 16 +- .../google/cloud/hooks/test_bigquery_dts.py | 21 +- .../google/cloud/hooks/test_cloud_build.py | 7 +- .../google/cloud/hooks/test_cloud_composer.py | 10 +- .../google/cloud/hooks/test_dataproc.py | 37 ++-- .../google/cloud/operators/test_mlengine.py | 2 +- .../google/cloud/triggers/test_bigquery.py | 69 +++--- .../cloud/triggers/test_bigquery_dts.py | 20 +- .../google/cloud/triggers/test_cloud_build.py | 10 +- .../google/cloud/triggers/test_dataflow.py | 10 +- .../google/cloud/triggers/test_datafusion.py | 10 +- .../google/cloud/triggers/test_dataproc.py | 26 +-- .../google/cloud/triggers/test_gcs.py | 57 ++--- .../google/cloud/triggers/test_mlengine.py | 12 +- tests/providers/google/cloud/utils/compat.py | 37 ---- .../azure/hooks/test_azure_data_factory.py | 48 ++--- .../azure/triggers/test_azure_data_factory.py | 38 ++-- .../cloud/ml_engine/example_mlengine.py | 6 +- .../cloud/ml_engine/example_mlengine_async.py | 6 +- .../providers/papermill/input_notebook.ipynb | 2 +- 243 files changed, 1424 insertions(+), 1680 deletions(-) delete mode 100644 tests/providers/amazon/aws/utils/compat.py delete mode 100644 tests/providers/apache/livy/compat.py delete mode 100644 tests/providers/google/cloud/utils/compat.py diff --git a/.github/actions/breeze/action.yml b/.github/actions/breeze/action.yml index 351e17eb2c550..77de5f2d295b4 100644 --- a/.github/actions/breeze/action.yml +++ b/.github/actions/breeze/action.yml @@ -37,8 +37,8 @@ runs: path: ~/.local/pipx # README has the latest breeze's hash and python location is used to distinguish between # different minor versions of python - key: "breeze-${{ env.pythonLocation }}-${{ hashFiles('dev/breeze/README.md') }}" - restore-keys: breeze-${{ env.pythonLocation }} + key: "breeze-3.8-${{ env.pythonLocation }}-${{ hashFiles('dev/breeze/README.md') }}" + restore-keys: breeze-3.8-${{ env.pythonLocation }} - name: "Install Breeze" shell: bash run: ./scripts/ci/install_breeze.sh diff --git a/.github/actions/build-prod-images/action.yml b/.github/actions/build-prod-images/action.yml index b4cec13ddc1a1..6d0ca1b550896 100644 --- a/.github/actions/build-prod-images/action.yml +++ b/.github/actions/build-prod-images/action.yml @@ -37,7 +37,7 @@ runs: shell: bash run: breeze ci-image pull --tag-as-latest env: - PYTHON_MAJOR_MINOR_VERSION: "3.7" + PYTHON_MAJOR_MINOR_VERSION: "3.8" - name: "Cleanup dist and context file" shell: bash run: rm -fv ./dist/* ./docker-context-files/* diff --git a/.readthedocs.yml b/.readthedocs.yml index 592c9da64e1cf..aa16e3a8e3d57 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ formats: [] sphinx: configuration: docs/rtd-deprecation/conf.py python: - version: "3.7" + version: "3.8" install: - method: pip path: . diff --git a/BREEZE.rst b/BREEZE.rst index f629afed47b48..5994cea06af1c 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -245,6 +245,17 @@ Run this command to install Breeze (make sure to use ``-e`` flag): pipx install -e ./dev/breeze + +.. note:: Note for Windows users + + The ``./dev/breeze`` in command about is a PATH to sub-folder where breeze source packages are. + If you are on Windows, you should use Windows way to point to the ``dev/breeze`` sub-folder + of Airflow either as absolute or relative path. For example: + + .. code-block:: bash + + pipx install -e dev\breeze + Once this is complete, you should have ``breeze`` binary on your PATH and available to run by ``breeze`` command. @@ -278,6 +289,26 @@ where it was installed. You can run ``breeze setup version`` command to see where breeze installed from and what are the current sources that Breeze works on +.. warning:: Upgrading from earlier Python version + + If you used Breeze with Python 3.7 and when running it, it will complain that it needs Python 3.8. In this + case you should force-reinstall Breeze with ``pipx``: + + .. code-block:: bash + + pipx install --force -e ./dev/breeze + + .. note:: Note for Windows users + + The ``./dev/breeze`` in command about is a PATH to sub-folder where breeze source packages are. + If you are on Windows, you should use Windows way to point to the ``dev/breeze`` sub-folder + of Airflow either as absolute or relative path. For example: + + .. code-block:: bash + + pipx install --force -e dev\breeze + + Running Breeze for the first time --------------------------------- @@ -360,12 +391,12 @@ You can use additional ``breeze`` flags to choose your environment. You can spec version to use, and backend (the meta-data database). Thanks to that, with Breeze, you can recreate the same environments as we have in matrix builds in the CI. -For example, you can choose to run Python 3.7 tests with MySQL as backend and with mysql version 8 +For example, you can choose to run Python 3.8 tests with MySQL as backend and with mysql version 8 as follows: .. code-block:: bash - breeze --python 3.7 --backend mysql --mysql-version 8 + breeze --python 3.8 --backend mysql --mysql-version 8 The choices you make are persisted in the ``./.build/`` cache directory so that next time when you use the ``breeze`` script, it could use the values that were used previously. This way you do not have to specify @@ -526,7 +557,7 @@ When you are starting airflow from local sources, www asset compilation is autom .. code-block:: bash - breeze --python 3.7 --backend mysql start-airflow + breeze --python 3.8 --backend mysql start-airflow You can also use it to start any released version of Airflow from ``PyPI`` with the @@ -534,7 +565,7 @@ You can also use it to start any released version of Airflow from ``PyPI`` with .. code-block:: bash - breeze start-airflow --python 3.7 --backend mysql --use-airflow-version 2.2.5 + breeze start-airflow --python 3.8 --backend mysql --use-airflow-version 2.2.5 Those are all available flags of ``start-airflow`` command: @@ -1422,10 +1453,10 @@ suffix and they need to also be paired with corresponding runtime dependency add .. code-block:: bash - breeze prod-image build --python 3.7 --additional-dev-deps "libasound2-dev" \ + breeze prod-image build --python 3.8 --additional-dev-deps "libasound2-dev" \ --additional-runtime-apt-deps "libasound2" -Same as above but uses python 3.7. +Same as above but uses python 3.8. Building PROD image ................... diff --git a/CI.rst b/CI.rst index 2484e8dc28360..80e158981548b 100644 --- a/CI.rst +++ b/CI.rst @@ -145,7 +145,7 @@ have to be percent-encoded when you access them via UI (/ = %2F) +--------------+----------------------------------------------------------+----------------------------------------------------------+ * might be either "main" or "v2-*-test" -* - Python version (Major + Minor).Should be one of ["3.7", "3.8", "3.9"]. +* - Python version (Major + Minor).Should be one of ["3.8", "3.9", "3.10", "3.11"]. * - full-length SHA of commit either from the tip of the branch (for pushes/schedule) or commit from the tip of the branch used for the PR. @@ -523,9 +523,9 @@ For example knowing that the CI job was for commit ``cd27124534b46c9688a1d89e75f .. code-block:: bash - docker pull ghcr.io/apache/airflow/main/ci/python3.7:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker pull ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 - docker run -it ghcr.io/apache/airflow/main/ci/python3.7:cd27124534b46c9688a1d89e75fcd137ab5137e3 + docker run -it ghcr.io/apache/airflow/main/ci/python3.8:cd27124534b46c9688a1d89e75fcd137ab5137e3 But you usually need to pass more variables and complex setup if you want to connect to a database or diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index a3f8afe63136d..120adb2b26a43 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -414,7 +414,7 @@ these guidelines: - Run tests locally before opening PR. - You can use any supported python version to run the tests, but the best is to check - if it works for the oldest supported version (Python 3.7 currently). In rare cases + if it works for the oldest supported version (Python 3.8 currently). In rare cases tests might fail with the oldest version when you use features that are available in newer Python versions. For that purpose we have ``airflow.compat`` package where we keep back-ported useful features from newer versions. @@ -840,7 +840,7 @@ from the PyPI package: .. code-block:: bash pip install apache-airflow[google,amazon,async]==2.2.5 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.2.5/constraints-3.8.txt" The last one can be used to install Airflow in "minimal" mode - i.e when bare Airflow is installed without extras. @@ -852,7 +852,7 @@ requirements). .. code-block:: bash pip install -e . \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This works also with extras - for example: @@ -860,7 +860,7 @@ This works also with extras - for example: .. code-block:: bash pip install ".[ssh]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" There are different set of fixed constraint files for different python major/minor versions and you should @@ -872,7 +872,7 @@ If you want to update just airflow dependencies, without paying attention to pro .. code-block:: bash pip install . --upgrade \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-no-providers-3.8.txt" The ``constraints-.txt`` and ``constraints-no-providers-.txt`` diff --git a/Dockerfile b/Dockerfile index 8ddf2b5574bbd..8245cbfd46af0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -46,7 +46,7 @@ ARG AIRFLOW_USER_HOME_DIR=/home/airflow # latest released version here ARG AIRFLOW_VERSION="2.6.1" -ARG PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" +ARG PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" ARG AIRFLOW_PIP_VERSION=23.1.2 ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" diff --git a/Dockerfile.ci b/Dockerfile.ci index 0c5cda1395b3b..a8eb998fa2ac0 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -16,7 +16,7 @@ # # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. # -ARG PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" +ARG PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" ############################################################################################## # This is the script image where we keep all inlined bash scripts needed in other segments @@ -615,7 +615,7 @@ chmod 1777 /tmp AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) -PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.7} +PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.8} export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} diff --git a/IMAGES.rst b/IMAGES.rst index 9034077ce30b7..cb049bf005712 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -99,18 +99,18 @@ image version for the chosen Python version. The images are build with default extras - different extras for CI and production image and you can change the extras via the ``--extras`` parameters and add new ones with ``--additional-extras``. -For example if you want to build Python 3.7 version of production image with +For example if you want to build Python 3.8 version of production image with "all" extras installed you should run this command: .. code-block:: bash - breeze prod-image build --python 3.7 --extras "all" + breeze prod-image build --python 3.8 --extras "all" If you just want to add new extras you can add them like that: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras "all" + breeze prod-image build --python 3.8 --additional-extras "all" The command that builds the CI image is optimized to minimize the time needed to rebuild the image when the source code of Airflow evolves. This means that if you already have the image locally downloaded and @@ -128,7 +128,7 @@ parameter to Breeze: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras=trino --install-airflow-version=2.0.0 + breeze prod-image build --python 3.8 --additional-extras=trino --install-airflow-version=2.0.0 This will build the image using command similar to: @@ -136,7 +136,7 @@ This will build the image using command similar to: pip install \ apache-airflow[async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv]==2.0.0 \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.0/constraints-3.8.txt" .. note:: @@ -163,14 +163,14 @@ HEAD of development for constraints): .. code-block:: bash pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" You can also skip installing airflow and install it from locally provided files by using ``--install-packages-from-context`` parameter to Breeze: .. code-block:: bash - breeze prod-image build --python 3.7 --additional-extras=trino --install-packages-from-context + breeze prod-image build --python 3.8 --additional-extras=trino --install-packages-from-context In this case you airflow and all packages (.whl files) should be placed in ``docker-context-files`` folder. @@ -197,21 +197,21 @@ or ``disabled`` flags when you run Breeze commands. For example: .. code-block:: bash - breeze ci-image build --python 3.7 --docker-cache local + breeze ci-image build --python 3.8 --docker-cache local Will build the CI image using local build cache (note that it will take quite a long time the first time you run it). .. code-block:: bash - breeze prod-image build --python 3.7 --docker-cache registry + breeze prod-image build --python 3.8 --docker-cache registry Will build the production image with cache used from registry. .. code-block:: bash - breeze prod-image build --python 3.7 --docker-cache disabled + breeze prod-image build --python 3.8 --docker-cache disabled Will build the production image from the scratch. @@ -309,7 +309,7 @@ in the `<#ci-image-build-arguments>`_ chapter below. Here just a few examples are presented which should give you general understanding of what you can customize. -This builds the production image in version 3.7 with additional airflow extras from 2.0.0 PyPI package and +This builds the production image in version 3.8 with additional airflow extras from 2.0.0 PyPI package and additional apt dev and runtime dependencies. As of Airflow 2.3.0, it is required to build images with ``DOCKER_BUILDKIT=1`` variable @@ -320,7 +320,7 @@ you have ``buildx`` plugin installed. DOCKER_BUILDKIT=1 docker build . -f Dockerfile.ci \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ @@ -331,7 +331,7 @@ the same image can be built using ``breeze`` (it supports auto-completion of the .. code-block:: bash - breeze ci-image build --python 3.7 --additional-extras=jdbc --additional-python-deps="pandas" \ + breeze ci-image build --python 3.8 --additional-extras=jdbc --additional-python-deps="pandas" \ --additional-dev-apt-deps="gcc g++" You can customize more aspects of the image - such as additional commands executed before apt dependencies @@ -343,7 +343,7 @@ based on example in `this comment ]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This will install Airflow in 'editable' mode - where sources of Airflow are taken directly from the source code rather than moved to the installation directory. During the installation airflow will install - but then @@ -173,9 +173,9 @@ You can also install Airflow in non-editable mode: .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, 3.9, 3.10 or 3.11 + # use the same version of python as you are working with, 3.8, 3.9, 3.10 or 3.11 pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" This will copy the sources to directory where usually python packages are installed. You can see the list of directories via ``python -m site`` command. In this case the providers are installed from PyPI, not from @@ -183,9 +183,9 @@ sources, unless you set ``INSTALL_PROVIDERS_FROM_SOURCES`` environment variable .. code-block:: bash - # use the same version of python as you are working with, 3.7, 3.8, 3.9, 3.10 or 3.11 + # use the same version of python as you are working with, 3.8, 3.9, 3.10 or 3.11 INSTALL_PROVIDERS_FROM_SOURCES="true" pip install ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-source-providers-3.8.txt" Note: when you first initialize database (the next step), you may encounter some problems. @@ -244,7 +244,7 @@ before running ``pip install`` command: .. code-block:: bash INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -U -e ".[devel,]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt" This way no providers packages will be installed and they will always be imported from the "airflow/providers" folder. diff --git a/README.md b/README.md index 291d0add6232f..c206d410dd44a 100644 --- a/README.md +++ b/README.md @@ -86,15 +86,15 @@ Airflow is not a streaming solution, but it is often used to process real-time d Apache Airflow is tested with: -| | Main version (dev) | Stable version (2.6.1) | -|------------|------------------------------|------------------------| -| Python | 3.7, 3.8, 3.9, 3.10, 3.11 | 3.7, 3.8, 3.9, 3.10 | -| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | -| Kubernetes | 1.23, 1.24, 1.25, 1.26, 1.27 | 1.23, 1.24, 1.25, 1.26 | -| PostgreSQL | 11, 12, 13, 14, 15 | 11, 12, 13, 14, 15 | -| MySQL | 5.7, 8 | 5.7, 8 | -| SQLite | 3.15.0+ | 3.15.0+ | -| MSSQL | 2017(\*), 2019(\*) | 2017(\*), 2019(\*) | +| | Main version (dev) | Stable version (2.6.1) | +|-------------|------------------------------|-------------------------| +| Python | 3.8, 3.9, 3.10, 3.11 | 3.8, 3.9, 3.10 | +| Platform | AMD64/ARM64(\*) | AMD64/ARM64(\*) | +| Kubernetes | 1.23, 1.24, 1.25, 1.26, 1.27 | 1.23, 1.24, 1.25, 1.26 | +| PostgreSQL | 11, 12, 13, 14, 15 | 11, 12, 13, 14, 15 | +| MySQL | 5.7, 8 | 5.7, 8 | +| SQLite | 3.15.0+ | 3.15.0+ | +| MSSQL | 2017(\*), 2019(\*) | 2017(\*), 2019(\*) | \* Experimental @@ -164,14 +164,14 @@ them to the appropriate format and workflow that your tool requires. ```bash pip install 'apache-airflow==2.6.1' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.1/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.1/constraints-3.8.txt" ``` 2. Installing with extras (i.e., postgres, google) ```bash pip install 'apache-airflow[postgres,google]==2.6.1' \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.1/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.6.1/constraints-3.8.txt" ``` For information on installing provider packages, check @@ -303,16 +303,16 @@ They are based on the official release schedule of Python and Kubernetes, nicely 1. We drop support for Python and Kubernetes versions when they reach EOL. Except for Kubernetes, a version stays supported by Airflow if two major cloud providers still provide support for it. We drop support for those EOL versions in main right after EOL date, and it is effectively removed when we release - the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.7 it + the first new MINOR (Or MAJOR if there is no new MINOR version) of Airflow. For example, for Python 3.8 it means that we will drop support in main right after 27.06.2023, and the first MAJOR or MINOR version of Airflow released after will not have it. 2. The "oldest" supported version of Python/Kubernetes is the default one until we decide to switch to later version. "Default" is only meaningful in terms of "smoke tests" in CI PRs, which are run using this default version and the default reference image available. Currently `apache/airflow:latest` - and `apache/airflow:2.6.1` images are Python 3.7 images. This means that default reference image will - become the default at the time when we start preparing for dropping 3.7 support which is few months - before the end of life for Python 3.7. + and `apache/airflow:2.6.1` images are Python 3.8 images. This means that default reference image will + become the default at the time when we start preparing for dropping 3.8 support which is few months + before the end of life for Python 3.8. 3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index c7bb2528a4c28..657976939ff5e 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -41,7 +41,7 @@ use. So, you can be sure your modifications will also work for CI if they pass pre-commit hooks. We have integrated the fantastic `pre-commit `__ framework -in our development workflow. To install and use it, you need at least Python 3.7 locally. +in our development workflow. To install and use it, you need at least Python 3.8 locally. Installing pre-commit hooks ........................... diff --git a/TESTING.rst b/TESTING.rst index 37d9bbe081bd5..0d4312a3e2a07 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -789,7 +789,7 @@ per each combination of Python and Kubernetes version. This is used during CI wh tests against those different clusters - even in parallel. The cluster name follows the pattern ``airflow-python-X.Y-vA.B.C`` where X.Y is a major/minor Python version -and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.7-v1.24.0`` +and A.B.C is Kubernetes version. Example cluster name: ``airflow-python-3.8-v1.24.0`` Most of the commands can be executed in parallel for multiple images/clusters by adding ``--run-in-parallel`` to create clusters or deploy airflow. Similarly checking for status, dumping logs and deleting clusters @@ -957,7 +957,7 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kindconfig.yaml: + Config created in /Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml: # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file @@ -993,7 +993,7 @@ Should result in KinD creating the K8S cluster. - Creating cluster "airflow-python-3.7-v1.24.2" ... + Creating cluster "airflow-python-3.8-v1.24.2" ... ✓ Ensuring node image (kindest/node:v1.24.2) 🖼 ✓ Preparing nodes 📦 📦 ✓ Writing configuration 📜 @@ -1001,10 +1001,10 @@ Should result in KinD creating the K8S cluster. ✓ Installing CNI 🔌 ✓ Installing StorageClass 💾 ✓ Joining worker nodes 🚜 - Set kubectl context to "kind-airflow-python-3.7-v1.24.2" + Set kubectl context to "kind-airflow-python-3.8-v1.24.2" You can now use your cluster with: - kubectl cluster-info --context kind-airflow-python-3.7-v1.24.2 + kubectl cluster-info --context kind-airflow-python-3.8-v1.24.2 Not sure what to do next? 😅 Check out https://kind.sigs.k8s.io/docs/user/quick-start/ @@ -1012,9 +1012,9 @@ Should result in KinD creating the K8S cluster. Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.7 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow - KinD cluster airflow-python-3.7-v1.24.2 created! + KinD cluster airflow-python-3.8-v1.24.2 created! NEXT STEP: You might now configure your cluster by: @@ -1028,20 +1028,20 @@ Should result in KinD creating the K8S cluster. .. code-block:: text - Configuring airflow-python-3.7-v1.24.2 to be ready for Airflow deployment - Deleting K8S namespaces for kind-airflow-python-3.7-v1.24.2 + Configuring airflow-python-3.8-v1.24.2 to be ready for Airflow deployment + Deleting K8S namespaces for kind-airflow-python-3.8-v1.24.2 Error from server (NotFound): namespaces "airflow" not found Error from server (NotFound): namespaces "test-namespace" not found Creating namespaces namespace/airflow created namespace/test-namespace created - Created K8S namespaces for cluster kind-airflow-python-3.7-v1.24.2 + Created K8S namespaces for cluster kind-airflow-python-3.8-v1.24.2 - Deploying test resources for cluster kind-airflow-python-3.7-v1.24.2 + Deploying test resources for cluster kind-airflow-python-3.8-v1.24.2 persistentvolume/test-volume created persistentvolumeclaim/test-volume created service/airflow-webserver-node-port created - Deployed test resources for cluster kind-airflow-python-3.7-v1.24.2 + Deployed test resources for cluster kind-airflow-python-3.8-v1.24.2 NEXT STEP: You might now build your k8s image by: @@ -1059,45 +1059,45 @@ Should show the status of current KinD cluster. .. code-block:: text ======================================================================================================================== - Cluster: airflow-python-3.7-v1.24.2 + Cluster: airflow-python-3.8-v1.24.2 - * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kubeconfig - * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.7-v1.24.2/.kindconfig.yaml + * KUBECONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kubeconfig + * KINDCONFIG=/Users/jarek/IdeaProjects/airflow/.build/.k8s-clusters/airflow-python-3.8-v1.24.2/.kindconfig.yaml - Cluster info: airflow-python-3.7-v1.24.2 + Cluster info: airflow-python-3.8-v1.24.2 Kubernetes control plane is running at https://127.0.0.1:48366 CoreDNS is running at https://127.0.0.1:48366/api/v1/namespaces/kube-system/services/kube-dns:dns/proxy To further debug and diagnose cluster problems, use 'kubectl cluster-info dump'. - Storage class for airflow-python-3.7-v1.24.2 + Storage class for airflow-python-3.8-v1.24.2 NAME PROVISIONER RECLAIMPOLICY VOLUMEBINDINGMODE ALLOWVOLUMEEXPANSION AGE standard (default) rancher.io/local-path Delete WaitForFirstConsumer false 83s - Running pods for airflow-python-3.7-v1.24.2 + Running pods for airflow-python-3.8-v1.24.2 NAME READY STATUS RESTARTS AGE coredns-6d4b75cb6d-rwp9d 1/1 Running 0 71s coredns-6d4b75cb6d-vqnrc 1/1 Running 0 71s - etcd-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + etcd-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s kindnet-ckc8l 1/1 Running 0 69s kindnet-qqt8k 1/1 Running 0 71s - kube-apiserver-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s - kube-controller-manager-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + kube-apiserver-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s + kube-controller-manager-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s kube-proxy-6g7hn 1/1 Running 0 69s kube-proxy-dwfvp 1/1 Running 0 71s - kube-scheduler-airflow-python-3.7-v1.24.2-control-plane 1/1 Running 0 84s + kube-scheduler-airflow-python-3.8-v1.24.2-control-plane 1/1 Running 0 84s KinD Cluster API server URL: http://localhost:48366 Connecting to localhost:18150. Num try: 1 Error when connecting to localhost:18150 : ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response')) - Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.7 --kubernetes-version v1.24.2` to (re)deploy airflow + Airflow webserver is not available at port 18150. Run `breeze k8s deploy-airflow --python 3.8 --kubernetes-version v1.24.2` to (re)deploy airflow - Cluster healthy: airflow-python-3.7-v1.24.2 + Cluster healthy: airflow-python-3.8-v1.24.2 5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will guide you if you did not - either by running the build separately or passing ``--rebuild-base-image`` flag @@ -1108,15 +1108,15 @@ Should show the status of current KinD cluster. .. code-block:: text - Building the K8S image for Python 3.7 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.7:latest + Building the K8S image for Python 3.8 using airflow base image: ghcr.io/apache/airflow/main/prod/python3.8:latest [+] Building 0.1s (8/8) FINISHED => [internal] load build definition from Dockerfile 0.0s => => transferring dockerfile: 301B 0.0s => [internal] load .dockerignore 0.0s => => transferring context: 35B 0.0s - => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.7:latest 0.0s - => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.7:latest 0.0s + => [internal] load metadata for ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s + => [1/3] FROM ghcr.io/apache/airflow/main/prod/python3.8:latest 0.0s => [internal] load build context 0.0s => => transferring context: 3.00kB 0.0s => CACHED [2/3] COPY airflow/example_dags/ /opt/airflow/dags/ 0.0s @@ -1124,7 +1124,7 @@ Should show the status of current KinD cluster. => exporting to image 0.0s => => exporting layers 0.0s => => writing image sha256:c0bdd363c549c3b0731b8e8ce34153d081f239ee2b582355b7b3ffd5394c40bb 0.0s - => => naming to ghcr.io/apache/airflow/main/prod/python3.7-kubernetes:latest + => => naming to ghcr.io/apache/airflow/main/prod/python3.8-kubernetes:latest NEXT STEP: You might now upload your k8s image by: @@ -1144,9 +1144,9 @@ Should show the status of current KinD cluster. Good version of kubectl installed: 1.25.0 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Good version of helm installed: 3.9.2 in /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin Stable repo is already added - Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.7-kubernetes to cluster airflow-python-3.7-v1.24.2 - Image: "ghcr.io/apache/airflow/main/prod/python3.7-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.7-v1.24.2-worker", loading... - Image: "ghcr.io/apache/airflow/main/prod/python3.7-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.7-v1.24.2-control-plane", loading... + Uploading Airflow image ghcr.io/apache/airflow/main/prod/python3.8-kubernetes to cluster airflow-python-3.8-v1.24.2 + Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-worker", loading... + Image: "ghcr.io/apache/airflow/main/prod/python3.8-kubernetes" with ID "sha256:fb6195f7c2c2ad97788a563a3fe9420bf3576c85575378d642cd7985aff97412" not yet present on node "airflow-python-3.8-v1.24.2-control-plane", loading... NEXT STEP: You might now deploy airflow by: @@ -1161,8 +1161,8 @@ Should show the status of current KinD cluster. .. code-block:: text - Deploying Airflow for cluster airflow-python-3.7-v1.24.2 - Deploying kind-airflow-python-3.7-v1.24.2 with airflow Helm Chart. + Deploying Airflow for cluster airflow-python-3.8-v1.24.2 + Deploying kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. Copied chart sources to /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart Deploying Airflow from /private/var/folders/v3/gvj4_mw152q556w2rrh7m46w0000gn/T/chart_edu__kir/chart NAME: airflow @@ -1204,12 +1204,12 @@ Should show the status of current KinD cluster. Information on how to set a static webserver secret key can be found here: https://airflow.apache.org/docs/helm-chart/stable/production-guide.html#webserver-secret-key - Deployed kind-airflow-python-3.7-v1.24.2 with airflow Helm Chart. + Deployed kind-airflow-python-3.8-v1.24.2 with airflow Helm Chart. - Airflow for Python 3.7 and K8S version v1.24.2 has been successfully deployed. + Airflow for Python 3.8 and K8S version v1.24.2 has been successfully deployed. - The KinD cluster name: airflow-python-3.7-v1.24.2 - The kubectl cluster name: kind-airflow-python-3.7-v1.24.2. + The KinD cluster name: airflow-python-3.8-v1.24.2 + The kubectl cluster name: kind-airflow-python-3.8-v1.24.2. KinD Cluster API server URL: http://localhost:48366 @@ -1243,7 +1243,7 @@ The virtualenv required will be created automatically when the scripts are run. .. code-block:: text - Running tests with kind-airflow-python-3.7-v1.24.2 cluster. + Running tests with kind-airflow-python-3.8-v1.24.2 cluster. Command to run: pytest kubernetes_tests ========================================================================================= test session starts ========================================================================================== platform darwin -- Python 3.9.9, pytest-6.2.5, py-1.11.0, pluggy-1.0.0 -- /Users/jarek/IdeaProjects/airflow/.build/.k8s-env/bin/python @@ -1272,7 +1272,7 @@ Once you enter the environment, you receive this information: Entering interactive k8s shell. - (kind-airflow-python-3.7-v1.24.2:KubernetesExecutor)> + (kind-airflow-python-3.8-v1.24.2:KubernetesExecutor)> In a separate terminal you can open the k9s CLI: @@ -1382,9 +1382,9 @@ Kind has also useful commands to inspect your running cluster: .. code-block:: text - Deleting KinD cluster airflow-python-3.7-v1.24.2! - Deleting cluster "airflow-python-3.7-v1.24.2" ... - KinD cluster airflow-python-3.7-v1.24.2 deleted! + Deleting KinD cluster airflow-python-3.8-v1.24.2! + Deleting cluster "airflow-python-3.8-v1.24.2" ... + KinD cluster airflow-python-3.8-v1.24.2 deleted! Running complete k8s tests @@ -1543,13 +1543,13 @@ Here is the typical session that you need to do to run system tests: .. code-block:: bash breeze down - breeze --python 3.7 --db-reset --forward-credentials + breeze --python 3.8 --db-reset --forward-credentials This will: * stop the whole environment (i.e. recreates metadata database from the scratch) * run Breeze with: - * python 3.7 version + * python 3.8 version * resetting the Airflow database * forward your local credentials to Breeze @@ -1599,7 +1599,7 @@ Breeze session. They are usually expensive to run. .. code-block:: bash breeze down - breeze --python 3.7 --db-reset --forward-credentials + breeze --python 3.8 --db-reset --forward-credentials 2. Run create action in helper (to create slowly created resources): diff --git a/airflow/api_internal/endpoints/rpc_api_endpoint.py b/airflow/api_internal/endpoints/rpc_api_endpoint.py index 50e72c614c1e7..b6ac604c05393 100644 --- a/airflow/api_internal/endpoints/rpc_api_endpoint.py +++ b/airflow/api_internal/endpoints/rpc_api_endpoint.py @@ -30,7 +30,7 @@ log = logging.getLogger(__name__) -@functools.lru_cache() +@functools.lru_cache def _initialize_map() -> dict[str, Callable]: from airflow.dag_processing.manager import DagFileProcessorManager from airflow.dag_processing.processor import DagFileProcessor diff --git a/airflow/compat/functools.py b/airflow/compat/functools.py index dc0c520b796c0..7b521f9f9da2e 100644 --- a/airflow/compat/functools.py +++ b/airflow/compat/functools.py @@ -19,11 +19,6 @@ import sys -if sys.version_info >= (3, 8): - from functools import cached_property -else: - from cached_property import cached_property - if sys.version_info >= (3, 9): from functools import cache else: @@ -31,5 +26,8 @@ cache = lru_cache(maxsize=None) +# We need to keep it around, in case it was used in the code of old providers, but since we are +# Python 3.8+ we can directly import the functools one +from functools import cached_property # type: ignore __all__ = ["cache", "cached_property"] diff --git a/airflow/compat/functools.pyi b/airflow/compat/functools.pyi index 32cbbaa431e1e..7d1bef5939e6e 100644 --- a/airflow/compat/functools.pyi +++ b/airflow/compat/functools.pyi @@ -20,9 +20,8 @@ # TODO: Remove this file after the upstream fix is available in our toolchain. from __future__ import annotations -from typing import Callable, TypeVar +from typing import TypeVar T = TypeVar("T") -def cached_property(f: Callable[..., T]) -> T: ... def cache(f: T) -> T: ... diff --git a/airflow/configuration.py b/airflow/configuration.py index be988e01b7977..694032cd9ed34 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -42,7 +42,6 @@ from typing_extensions import overload -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowConfigException from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend from airflow.utils import yaml @@ -258,11 +257,11 @@ class AirflowConfigParser(ConfigParser): # Now build the inverse so we can go from old_section/old_key to new_section/new_key # if someone tries to retrieve it based on old_section/old_key - @cached_property + @functools.cached_property def inversed_deprecated_options(self): return {(sec, name): key for key, (sec, name, ver) in self.deprecated_options.items()} - @cached_property + @functools.cached_property def inversed_deprecated_sections(self): return { old_section: new_section for new_section, (old_section, ver) in self.deprecated_sections.items() diff --git a/airflow/decorators/base.py b/airflow/decorators/base.py index e79f13af37d8d..616a7eab887f6 100644 --- a/airflow/decorators/base.py +++ b/airflow/decorators/base.py @@ -19,6 +19,7 @@ import inspect import re import warnings +from functools import cached_property from itertools import chain from textwrap import dedent from typing import ( @@ -41,7 +42,6 @@ from sqlalchemy.orm import Session from airflow import Dataset -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models.abstractoperator import DEFAULT_RETRIES, DEFAULT_RETRY_DELAY from airflow.models.baseoperator import ( diff --git a/airflow/jobs/job.py b/airflow/jobs/job.py index 0451092bd7289..733ed60667141 100644 --- a/airflow/jobs/job.py +++ b/airflow/jobs/job.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from time import sleep from typing import Callable, NoReturn @@ -25,7 +26,6 @@ from sqlalchemy.orm import backref, foreign, relationship from sqlalchemy.orm.session import Session, make_transient -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.executors.executor_loader import ExecutorLoader diff --git a/airflow/models/abstractoperator.py b/airflow/models/abstractoperator.py index 42e1cc688e174..155316d14df1e 100644 --- a/airflow/models/abstractoperator.py +++ b/airflow/models/abstractoperator.py @@ -19,9 +19,10 @@ import datetime import inspect +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, ClassVar, Collection, Iterable, Iterator, Sequence -from airflow.compat.functools import cache, cached_property +from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import AirflowException from airflow.models.expandinput import NotFullyPopulated diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 4009893fee77b..757ef6b6ed5a0 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -64,7 +64,6 @@ import airflow.templates from airflow import settings, utils from airflow.api_internal.internal_api_call import internal_api_call -from airflow.compat.functools import cached_property from airflow.configuration import conf, secrets_backend_list from airflow.exceptions import ( AirflowDagInconsistent, @@ -961,7 +960,7 @@ def next_dagrun_after_date(self, date_last_automated_dagrun: pendulum.DateTime | return None return info.run_after - @cached_property + @functools.cached_property def _time_restriction(self) -> TimeRestriction: start_dates = [t.start_date for t in self.tasks if t.start_date] if self.start_date is not None: @@ -2417,7 +2416,7 @@ def has_task(self, task_id: str): def has_task_group(self, task_group_id: str) -> bool: return task_group_id in self.task_group_dict - @cached_property + @functools.cached_property def task_group_dict(self): return {k: v for k, v in self._task_group.get_task_group_dict().items() if k is not None} diff --git a/airflow/models/xcom.py b/airflow/models/xcom.py index 826172f4f5a97..981d0d4c585fd 100644 --- a/airflow/models/xcom.py +++ b/airflow/models/xcom.py @@ -26,7 +26,7 @@ import logging import pickle import warnings -from functools import wraps +from functools import cached_property, wraps from typing import TYPE_CHECKING, Any, Generator, Iterable, cast, overload import attr @@ -47,7 +47,6 @@ from airflow import settings from airflow.api_internal.internal_api_call import internal_api_call -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.models.base import COLLATION_ARGS, ID_LEN, Base diff --git a/airflow/operators/bash.py b/airflow/operators/bash.py index e474bd8d68754..6ec1b0e80d6fc 100644 --- a/airflow/operators/bash.py +++ b/airflow/operators/bash.py @@ -20,9 +20,9 @@ import os import shutil import warnings +from functools import cached_property from typing import Container, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowSkipException from airflow.hooks.subprocess import SubprocessHook from airflow.models.baseoperator import BaseOperator diff --git a/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst b/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst index a6b5a01247ea6..9fa649052d7f7 100644 --- a/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst +++ b/airflow/providers/SUSPENDING_AND_RESUMING_PROVIDERS.rst @@ -32,7 +32,7 @@ If you have pre-commit installed, pre-commit will be run automatically on commit manually after commit, you can run it via ``breeze static-checks --last-commit`` some of the tests might fail because suspension of the provider might cause changes in the dependencies, so if you see errors about missing dependencies imports, non-usable classes etc., you will need to build the CI image locally -via ``breeze build-image --python 3.7 --upgrade-to-newer-dependencies`` after the first pre-commit run +via ``breeze build-image --python 3.8 --upgrade-to-newer-dependencies`` after the first pre-commit run and then run the static checks again. If you want to be absolutely sure to run all static checks you can always do this via @@ -73,7 +73,7 @@ Example failing collection after ``google`` provider has been suspended: ImportError while importing test module '/opt/airflow/tests/providers/apache/beam/operators/test_beam.py'. Hint: make sure your test modules/packages have valid Python names. Traceback: - /usr/local/lib/python3.7/importlib/__init__.py:127: in import_module + /usr/local/lib/python3.8/importlib/__init__.py:127: in import_module return _bootstrap._gcd_import(name[level:], package, level) tests/providers/apache/beam/operators/test_beam.py:25: in from airflow.providers.apache.beam.operators.beam import ( @@ -101,7 +101,7 @@ The fix is to add this line at the top of the ``tests/providers/apache/beam/oper Traceback (most recent call last): File "/opt/airflow/scripts/in_container/verify_providers.py", line 266, in import_all_classes _module = importlib.import_module(modinfo.name) - File "/usr/local/lib/python3.7/importlib/__init__.py", line 127, in import_module + File "/usr/local/lib/python3.8/importlib/__init__.py", line 127, in import_module return _bootstrap._gcd_import(name, package, level) File "", line 1006, in _gcd_import File "", line 983, in _find_and_load @@ -109,7 +109,7 @@ The fix is to add this line at the top of the ``tests/providers/apache/beam/oper File "", line 677, in _load_unlocked File "", line 728, in exec_module File "", line 219, in _call_with_frames_removed - File "/usr/local/lib/python3.7/site-packages/airflow/providers/mysql/transfers/s3_to_mysql.py", line 23, in + File "/usr/local/lib/python3.8/site-packages/airflow/providers/mysql/transfers/s3_to_mysql.py", line 23, in from airflow.providers.amazon.aws.hooks.s3 import S3Hook ModuleNotFoundError: No module named 'airflow.providers.amazon' diff --git a/airflow/providers/alibaba/cloud/log/oss_task_handler.py b/airflow/providers/alibaba/cloud/log/oss_task_handler.py index 512eda90c69e1..f70c6378cb5d9 100644 --- a/airflow/providers/alibaba/cloud/log/oss_task_handler.py +++ b/airflow/providers/alibaba/cloud/log/oss_task_handler.py @@ -21,10 +21,10 @@ import os import pathlib import shutil +from functools import cached_property from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.alibaba.cloud.hooks.oss import OSSHook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/alibaba/cloud/sensors/oss_key.py b/airflow/providers/alibaba/cloud/sensors/oss_key.py index 98b2c25beaee1..1395c76de3c3a 100644 --- a/airflow/providers/alibaba/cloud/sensors/oss_key.py +++ b/airflow/providers/alibaba/cloud/sensors/oss_key.py @@ -17,10 +17,10 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from urllib.parse import urlsplit -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.alibaba.cloud.hooks.oss import OSSHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/hooks/appflow.py b/airflow/providers/amazon/aws/hooks/appflow.py index 14dee2ef10206..aaaf1cc5ace18 100644 --- a/airflow/providers/amazon/aws/hooks/appflow.py +++ b/airflow/providers/amazon/aws/hooks/appflow.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook if TYPE_CHECKING: diff --git a/airflow/providers/amazon/aws/hooks/base_aws.py b/airflow/providers/amazon/aws/hooks/base_aws.py index c04b68704b1b8..0d871779de4bb 100644 --- a/airflow/providers/amazon/aws/hooks/base_aws.py +++ b/airflow/providers/amazon/aws/hooks/base_aws.py @@ -31,7 +31,7 @@ import os import uuid from copy import deepcopy -from functools import wraps +from functools import cached_property, wraps from os import PathLike from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, Union @@ -49,7 +49,6 @@ from dateutil.tz import tzlocal from slugify import slugify -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import ( AirflowException, diff --git a/airflow/providers/amazon/aws/hooks/glue_crawler.py b/airflow/providers/amazon/aws/hooks/glue_crawler.py index 83ca6b9167011..0393cadc9757a 100644 --- a/airflow/providers/amazon/aws/hooks/glue_crawler.py +++ b/airflow/providers/amazon/aws/hooks/glue_crawler.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from time import sleep -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/hooks/quicksight.py b/airflow/providers/amazon/aws/hooks/quicksight.py index 11ea728e5ef8c..74c77652a5ed9 100644 --- a/airflow/providers/amazon/aws/hooks/quicksight.py +++ b/airflow/providers/amazon/aws/hooks/quicksight.py @@ -18,11 +18,11 @@ from __future__ import annotations import time +from functools import cached_property from botocore.exceptions import ClientError from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/hooks/redshift_sql.py b/airflow/providers/amazon/aws/hooks/redshift_sql.py index e9c2b7fecc78b..11c7dbce26c0c 100644 --- a/airflow/providers/amazon/aws/hooks/redshift_sql.py +++ b/airflow/providers/amazon/aws/hooks/redshift_sql.py @@ -16,6 +16,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING import redshift_connector @@ -23,7 +24,6 @@ from sqlalchemy import create_engine from sqlalchemy.engine.url import URL -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.common.sql.hooks.sql import DbApiHook diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index d2f54cc780da3..5d1074b8402a6 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -18,10 +18,10 @@ from __future__ import annotations from datetime import datetime +from functools import cached_property import watchtower -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 20754075a2dac..a45c228ac0328 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -20,10 +20,10 @@ import os import pathlib import shutil +from functools import cached_property from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.utils.log.file_task_handler import FileTaskHandler diff --git a/airflow/providers/amazon/aws/operators/appflow.py b/airflow/providers/amazon/aws/operators/appflow.py index ec55ebbda4d11..f2fe75f39501e 100644 --- a/airflow/providers/amazon/aws/operators/appflow.py +++ b/airflow/providers/amazon/aws/operators/appflow.py @@ -17,10 +17,10 @@ from __future__ import annotations from datetime import datetime, timedelta +from functools import cached_property from time import sleep from typing import TYPE_CHECKING, cast -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.operators.python import ShortCircuitOperator diff --git a/airflow/providers/amazon/aws/operators/athena.py b/airflow/providers/amazon/aws/operators/athena.py index b7c0954f15557..5525641f23935 100644 --- a/airflow/providers/amazon/aws/operators/athena.py +++ b/airflow/providers/amazon/aws/operators/athena.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.athena import AthenaHook diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py index cbeb0cbcba1a9..e6cb549871276 100644 --- a/airflow/providers/amazon/aws/operators/batch.py +++ b/airflow/providers/amazon/aws/operators/batch.py @@ -26,9 +26,9 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook diff --git a/airflow/providers/amazon/aws/operators/ecs.py b/airflow/providers/amazon/aws/operators/ecs.py index eb031d24148f6..d4f0bdf045c76 100644 --- a/airflow/providers/amazon/aws/operators/ecs.py +++ b/airflow/providers/amazon/aws/operators/ecs.py @@ -20,11 +20,11 @@ import re import sys from datetime import timedelta +from functools import cached_property from typing import TYPE_CHECKING, Sequence import boto3 -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator, XCom from airflow.providers.amazon.aws.exceptions import EcsOperatorError, EcsTaskFailToStart diff --git a/airflow/providers/amazon/aws/operators/emr.py b/airflow/providers/amazon/aws/operators/emr.py index 3e36231993c12..55ad270d27cc6 100644 --- a/airflow/providers/amazon/aws/operators/emr.py +++ b/airflow/providers/amazon/aws/operators/emr.py @@ -19,6 +19,7 @@ import ast import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from uuid import uuid4 @@ -33,8 +34,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property - class EmrAddStepsOperator(BaseOperator): """ diff --git a/airflow/providers/amazon/aws/operators/glue_crawler.py b/airflow/providers/amazon/aws/operators/glue_crawler.py index 59ba2031fdd7e..426ca2f084d04 100644 --- a/airflow/providers/amazon/aws/operators/glue_crawler.py +++ b/airflow/providers/amazon/aws/operators/glue_crawler.py @@ -17,12 +17,12 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook diff --git a/airflow/providers/amazon/aws/operators/lambda_function.py b/airflow/providers/amazon/aws/operators/lambda_function.py index 4e3e475976ba8..d99651343680b 100644 --- a/airflow/providers/amazon/aws/operators/lambda_function.py +++ b/airflow/providers/amazon/aws/operators/lambda_function.py @@ -18,9 +18,9 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook diff --git a/airflow/providers/amazon/aws/operators/redshift_data.py b/airflow/providers/amazon/aws/operators/redshift_data.py index b0fad66faedfb..6d6ef9d103b77 100644 --- a/airflow/providers/amazon/aws/operators/redshift_data.py +++ b/airflow/providers/amazon/aws/operators/redshift_data.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.redshift_data import RedshiftDataHook diff --git a/airflow/providers/amazon/aws/operators/sagemaker.py b/airflow/providers/amazon/aws/operators/sagemaker.py index f4041b465a1ed..0d4ba9fcabb34 100644 --- a/airflow/providers/amazon/aws/operators/sagemaker.py +++ b/airflow/providers/amazon/aws/operators/sagemaker.py @@ -19,11 +19,11 @@ import json import time import warnings +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Sequence from botocore.exceptions import ClientError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook diff --git a/airflow/providers/amazon/aws/secrets/secrets_manager.py b/airflow/providers/amazon/aws/secrets/secrets_manager.py index f075f6e5e9376..1ccc95a21b4be 100644 --- a/airflow/providers/amazon/aws/secrets/secrets_manager.py +++ b/airflow/providers/amazon/aws/secrets/secrets_manager.py @@ -21,10 +21,10 @@ import json import re import warnings +from functools import cached_property from typing import Any from urllib.parse import unquote -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.utils import trim_none_values from airflow.secrets import BaseSecretsBackend diff --git a/airflow/providers/amazon/aws/secrets/systems_manager.py b/airflow/providers/amazon/aws/secrets/systems_manager.py index e4ec9a391f9bf..f15ee384aaacb 100644 --- a/airflow/providers/amazon/aws/secrets/systems_manager.py +++ b/airflow/providers/amazon/aws/secrets/systems_manager.py @@ -19,8 +19,8 @@ from __future__ import annotations import re +from functools import cached_property -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.utils import trim_none_values from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/amazon/aws/sensors/athena.py b/airflow/providers/amazon/aws/sensors/athena.py index 1954d15a8e004..40dc80a924bfb 100644 --- a/airflow/providers/amazon/aws/sensors/athena.py +++ b/airflow/providers/amazon/aws/sensors/athena.py @@ -17,12 +17,12 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.athena import AthenaHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/batch.py b/airflow/providers/amazon/aws/sensors/batch.py index 26a5e910a7e1a..594be1e55ed93 100644 --- a/airflow/providers/amazon/aws/sensors/batch.py +++ b/airflow/providers/amazon/aws/sensors/batch.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/cloud_formation.py b/airflow/providers/amazon/aws/sensors/cloud_formation.py index d2bd45592654f..df383cf2407a1 100644 --- a/airflow/providers/amazon/aws/sensors/cloud_formation.py +++ b/airflow/providers/amazon/aws/sensors/cloud_formation.py @@ -18,12 +18,12 @@ """This module contains sensors for AWS CloudFormation.""" from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.cloud_formation import CloudFormationHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/dms.py b/airflow/providers/amazon/aws/sensors/dms.py index 9e2e9ea63cc56..35563d4346292 100644 --- a/airflow/providers/amazon/aws/sensors/dms.py +++ b/airflow/providers/amazon/aws/sensors/dms.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Iterable, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.dms import DmsHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/dynamodb.py b/airflow/providers/amazon/aws/sensors/dynamodb.py index a63c97177edb5..26a38c0bbdadd 100644 --- a/airflow/providers/amazon/aws/sensors/dynamodb.py +++ b/airflow/providers/amazon/aws/sensors/dynamodb.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/ec2.py b/airflow/providers/amazon/aws/sensors/ec2.py index 5ce49c2c0ad71..08e8e59fb7088 100644 --- a/airflow/providers/amazon/aws/sensors/ec2.py +++ b/airflow/providers/amazon/aws/sensors/ec2.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook from airflow.providers.amazon.aws.triggers.ec2 import EC2StateSensorTrigger diff --git a/airflow/providers/amazon/aws/sensors/ecs.py b/airflow/providers/amazon/aws/sensors/ecs.py index d3cfacbd414a5..a150a8e87a739 100644 --- a/airflow/providers/amazon/aws/sensors/ecs.py +++ b/airflow/providers/amazon/aws/sensors/ecs.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence import boto3 -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.ecs import ( EcsClusterStates, diff --git a/airflow/providers/amazon/aws/sensors/eks.py b/airflow/providers/amazon/aws/sensors/eks.py index d01a21f3a30d8..e5be3340f7de4 100644 --- a/airflow/providers/amazon/aws/sensors/eks.py +++ b/airflow/providers/amazon/aws/sensors/eks.py @@ -18,9 +18,9 @@ from __future__ import annotations from abc import abstractmethod +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.eks import ( ClusterStates, diff --git a/airflow/providers/amazon/aws/sensors/emr.py b/airflow/providers/amazon/aws/sensors/emr.py index 8bb97f0ac46ae..ff134dc33d867 100644 --- a/airflow/providers/amazon/aws/sensors/emr.py +++ b/airflow/providers/amazon/aws/sensors/emr.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Iterable, Sequence from deprecated import deprecated @@ -29,8 +30,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property - class EmrBaseSensor(BaseSensorOperator): """ diff --git a/airflow/providers/amazon/aws/sensors/glacier.py b/airflow/providers/amazon/aws/sensors/glacier.py index 222027b2792be..0ae22fbce61c2 100644 --- a/airflow/providers/amazon/aws/sensors/glacier.py +++ b/airflow/providers/amazon/aws/sensors/glacier.py @@ -18,9 +18,9 @@ from __future__ import annotations from enum import Enum +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glacier import GlacierHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue.py b/airflow/providers/amazon/aws/sensors/glue.py index 761a51609bedc..30e44a9f8e585 100644 --- a/airflow/providers/amazon/aws/sensors/glue.py +++ b/airflow/providers/amazon/aws/sensors/glue.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glue import GlueJobHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py index d86136746687c..6a4856b6c950b 100644 --- a/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py +++ b/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.glue_catalog import GlueCatalogHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/glue_crawler.py b/airflow/providers/amazon/aws/sensors/glue_crawler.py index 6b8b4fcaea26d..b830fbffa16ec 100644 --- a/airflow/providers/amazon/aws/sensors/glue_crawler.py +++ b/airflow/providers/amazon/aws/sensors/glue_crawler.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.glue_crawler import GlueCrawlerHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/lambda_function.py b/airflow/providers/amazon/aws/sensors/lambda_function.py index 44cb487352e85..2febaba7a6cac 100644 --- a/airflow/providers/amazon/aws/sensors/lambda_function.py +++ b/airflow/providers/amazon/aws/sensors/lambda_function.py @@ -17,6 +17,7 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from airflow.providers.amazon.aws.hooks.lambda_function import LambdaHook @@ -25,7 +26,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/quicksight.py b/airflow/providers/amazon/aws/sensors/quicksight.py index 9145e886bf267..fed0faf3dadce 100644 --- a/airflow/providers/amazon/aws/sensors/quicksight.py +++ b/airflow/providers/amazon/aws/sensors/quicksight.py @@ -17,9 +17,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.quicksight import QuickSightHook from airflow.providers.amazon.aws.hooks.sts import StsHook diff --git a/airflow/providers/amazon/aws/sensors/rds.py b/airflow/providers/amazon/aws/sensors/rds.py index 50f197ef0c48d..45a48e965a35b 100644 --- a/airflow/providers/amazon/aws/sensors/rds.py +++ b/airflow/providers/amazon/aws/sensors/rds.py @@ -16,9 +16,9 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowNotFoundException from airflow.providers.amazon.aws.hooks.rds import RdsHook from airflow.providers.amazon.aws.utils.rds import RdsDbType diff --git a/airflow/providers/amazon/aws/sensors/redshift_cluster.py b/airflow/providers/amazon/aws/sensors/redshift_cluster.py index 653ccaf0015ef..9734e98bc75b3 100644 --- a/airflow/providers/amazon/aws/sensors/redshift_cluster.py +++ b/airflow/providers/amazon/aws/sensors/redshift_cluster.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/s3.py b/airflow/providers/amazon/aws/sensors/s3.py index 407a054184748..5d18b4619baee 100644 --- a/airflow/providers/amazon/aws/sensors/s3.py +++ b/airflow/providers/amazon/aws/sensors/s3.py @@ -21,6 +21,7 @@ import os import re from datetime import datetime +from functools import cached_property from typing import TYPE_CHECKING, Callable, Sequence from deprecated import deprecated @@ -28,7 +29,6 @@ if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.s3 import S3Hook from airflow.sensors.base import BaseSensorOperator, poke_mode_only diff --git a/airflow/providers/amazon/aws/sensors/sagemaker.py b/airflow/providers/amazon/aws/sensors/sagemaker.py index b02ea8902b136..7e8340ed90e3c 100644 --- a/airflow/providers/amazon/aws/sensors/sagemaker.py +++ b/airflow/providers/amazon/aws/sensors/sagemaker.py @@ -17,11 +17,11 @@ from __future__ import annotations import time +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.sagemaker import LogState, SageMakerHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/sensors/sqs.py b/airflow/providers/amazon/aws/sensors/sqs.py index 6dc032c3fe95b..be6698dcac334 100644 --- a/airflow/providers/amazon/aws/sensors/sqs.py +++ b/airflow/providers/amazon/aws/sensors/sqs.py @@ -19,13 +19,12 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Any, Collection, Sequence +from functools import cached_property +from typing import TYPE_CHECKING, Any, Collection, Literal, Sequence from deprecated import deprecated from jsonpath_ng import parse -from typing_extensions import Literal -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.base_aws import BaseAwsConnection from airflow.providers.amazon.aws.hooks.sqs import SqsHook diff --git a/airflow/providers/amazon/aws/sensors/step_function.py b/airflow/providers/amazon/aws/sensors/step_function.py index 2a0c8b10db993..e3345b4337312 100644 --- a/airflow/providers/amazon/aws/sensors/step_function.py +++ b/airflow/providers/amazon/aws/sensors/step_function.py @@ -17,11 +17,11 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Sequence from deprecated import deprecated -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.amazon.aws.hooks.step_function import StepFunctionHook from airflow.sensors.base import BaseSensorOperator diff --git a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py index 48067a666fad2..d7930222155c1 100644 --- a/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py @@ -25,12 +25,12 @@ from copy import copy from datetime import datetime from decimal import Decimal +from functools import cached_property from os.path import getsize from tempfile import NamedTemporaryFile from typing import IO, TYPE_CHECKING, Any, Callable, Sequence from uuid import uuid4 -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook from airflow.providers.amazon.aws.hooks.s3 import S3Hook diff --git a/airflow/providers/amazon/aws/transfers/s3_to_sql.py b/airflow/providers/amazon/aws/transfers/s3_to_sql.py index 99d4fab6bd4ed..e52ebbaa1bc68 100644 --- a/airflow/providers/amazon/aws/transfers/s3_to_sql.py +++ b/airflow/providers/amazon/aws/transfers/s3_to_sql.py @@ -16,10 +16,10 @@ # under the License. from __future__ import annotations +from functools import cached_property from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Callable, Iterable, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator diff --git a/airflow/providers/amazon/aws/transfers/sql_to_s3.py b/airflow/providers/amazon/aws/transfers/sql_to_s3.py index 8cee9b6cffb15..04aafdd67c0f5 100644 --- a/airflow/providers/amazon/aws/transfers/sql_to_s3.py +++ b/airflow/providers/amazon/aws/transfers/sql_to_s3.py @@ -20,9 +20,7 @@ import enum from collections import namedtuple from tempfile import NamedTemporaryFile -from typing import TYPE_CHECKING, Iterable, Mapping, Sequence - -from typing_extensions import Literal +from typing import TYPE_CHECKING, Iterable, Literal, Mapping, Sequence from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook @@ -157,11 +155,15 @@ def _fix_dtypes(df: DataFrame, file_format: FILE_FORMAT) -> None: notna_series = df[col].dropna().values if np.equal(notna_series, notna_series.astype(int)).all(): # set to dtype that retains integers and supports NaNs - df[col] = np.where(df[col].isnull(), None, df[col]) + # The type ignore can be removed here if https://github.com/numpy/numpy/pull/23690 + # is merged and released as currently NumPy does not consider None as valid for x/y. + df[col] = np.where(df[col].isnull(), None, df[col]) # type: ignore[call-overload] df[col] = df[col].astype(Int64Dtype()) elif np.isclose(notna_series, notna_series.astype(int)).all(): # set to float dtype that retains floats and supports NaNs - df[col] = np.where(df[col].isnull(), None, df[col]) + # The type ignore can be removed here if https://github.com/numpy/numpy/pull/23690 + # is merged and released + df[col] = np.where(df[col].isnull(), None, df[col]) # type: ignore[call-overload] df[col] = df[col].astype(Float64Dtype()) def execute(self, context: Context) -> None: diff --git a/airflow/providers/amazon/aws/triggers/batch.py b/airflow/providers/amazon/aws/triggers/batch.py index fb60b7ea916a8..dc858a80fd710 100644 --- a/airflow/providers/amazon/aws/triggers/batch.py +++ b/airflow/providers/amazon/aws/triggers/batch.py @@ -17,11 +17,11 @@ from __future__ import annotations import asyncio +from functools import cached_property from typing import Any from botocore.exceptions import WaiterError -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.batch_client import BatchClientHook from airflow.triggers.base import BaseTrigger, TriggerEvent diff --git a/airflow/providers/amazon/aws/triggers/ec2.py b/airflow/providers/amazon/aws/triggers/ec2.py index 79bae2895d94f..6372f2854d08c 100644 --- a/airflow/providers/amazon/aws/triggers/ec2.py +++ b/airflow/providers/amazon/aws/triggers/ec2.py @@ -17,9 +17,9 @@ from __future__ import annotations import asyncio +from functools import cached_property from typing import Any -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.ec2 import EC2Hook from airflow.triggers.base import BaseTrigger, TriggerEvent diff --git a/airflow/providers/amazon/aws/triggers/redshift_cluster.py b/airflow/providers/amazon/aws/triggers/redshift_cluster.py index 3224350e5403e..6b1a16bc827e1 100644 --- a/airflow/providers/amazon/aws/triggers/redshift_cluster.py +++ b/airflow/providers/amazon/aws/triggers/redshift_cluster.py @@ -17,11 +17,11 @@ from __future__ import annotations import asyncio +from functools import cached_property from typing import Any, AsyncIterator from botocore.exceptions import WaiterError -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftAsyncHook, RedshiftHook from airflow.triggers.base import BaseTrigger, TriggerEvent diff --git a/airflow/providers/amazon/aws/triggers/sagemaker.py b/airflow/providers/amazon/aws/triggers/sagemaker.py index 773a3243e9049..92266cad5ffdb 100644 --- a/airflow/providers/amazon/aws/triggers/sagemaker.py +++ b/airflow/providers/amazon/aws/triggers/sagemaker.py @@ -17,9 +17,9 @@ from __future__ import annotations +from functools import cached_property from typing import Any -from airflow.compat.functools import cached_property from airflow.providers.amazon.aws.hooks.sagemaker import SageMakerHook from airflow.triggers.base import BaseTrigger, TriggerEvent diff --git a/airflow/providers/amazon/aws/utils/connection_wrapper.py b/airflow/providers/amazon/aws/utils/connection_wrapper.py index 9a62dc2c84ef4..3fbc479abfe14 100644 --- a/airflow/providers/amazon/aws/utils/connection_wrapper.py +++ b/airflow/providers/amazon/aws/utils/connection_wrapper.py @@ -20,11 +20,11 @@ import warnings from copy import deepcopy from dataclasses import MISSING, InitVar, dataclass, field, fields +from functools import cached_property from typing import TYPE_CHECKING, Any from botocore.config import Config -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.amazon.aws.utils import trim_none_values from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/apache/flink/operators/flink_kubernetes.py b/airflow/providers/apache/flink/operators/flink_kubernetes.py index 3d365f40be57b..245964a730f4d 100644 --- a/airflow/providers/apache/flink/operators/flink_kubernetes.py +++ b/airflow/providers/apache/flink/operators/flink_kubernetes.py @@ -17,11 +17,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Sequence from kubernetes.client import CoreV1Api -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.cncf.kubernetes.hooks.kubernetes import KubernetesHook diff --git a/airflow/providers/apache/kafka/hooks/base.py b/airflow/providers/apache/kafka/hooks/base.py index eb5429a6a56b7..777beb116e136 100644 --- a/airflow/providers/apache/kafka/hooks/base.py +++ b/airflow/providers/apache/kafka/hooks/base.py @@ -16,11 +16,11 @@ # under the License. from __future__ import annotations +from functools import cached_property from typing import Any from confluent_kafka.admin import AdminClient -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook diff --git a/airflow/providers/arangodb/hooks/arangodb.py b/airflow/providers/arangodb/hooks/arangodb.py index 2f51ac061806a..23b3aa41245ed 100644 --- a/airflow/providers/arangodb/hooks/arangodb.py +++ b/airflow/providers/arangodb/hooks/arangodb.py @@ -18,12 +18,12 @@ """This module allows connecting to a ArangoDB.""" from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any from arango import AQLQueryExecuteError, ArangoClient as ArangoDBClient from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook if TYPE_CHECKING: diff --git a/airflow/providers/asana/hooks/asana.py b/airflow/providers/asana/hooks/asana.py index 544a5afb59961..9a8a83f5b682b 100644 --- a/airflow/providers/asana/hooks/asana.py +++ b/airflow/providers/asana/hooks/asana.py @@ -18,13 +18,12 @@ """Connect to Asana.""" from __future__ import annotations -from functools import wraps +from functools import cached_property, wraps from typing import Any from asana import Client # type: ignore[attr-defined] from asana.error import NotFoundError # type: ignore[attr-defined] -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py index bdd0cbc02c08a..03f45ae30b38c 100644 --- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py @@ -18,6 +18,7 @@ import contextlib import tempfile +from functools import cached_property from typing import TYPE_CHECKING, Any, Generator from asgiref.sync import sync_to_async @@ -27,7 +28,6 @@ from kubernetes_asyncio import client as async_client, config as async_config from urllib3.exceptions import HTTPError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.hooks.base import BaseHook from airflow.kubernetes.kube_client import _disable_verify_ssl, _enable_tcp_keepalive diff --git a/airflow/providers/cncf/kubernetes/operators/pod.py b/airflow/providers/cncf/kubernetes/operators/pod.py index d30794b066a1d..160169d05baba 100644 --- a/airflow/providers/cncf/kubernetes/operators/pod.py +++ b/airflow/providers/cncf/kubernetes/operators/pod.py @@ -25,13 +25,13 @@ import string from collections.abc import Container from contextlib import AbstractContextManager +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from kubernetes.client import CoreV1Api, models as k8s from slugify import slugify from urllib3.exceptions import HTTPError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowSkipException from airflow.kubernetes import pod_generator from airflow.kubernetes.pod_generator import PodGenerator diff --git a/airflow/providers/common/sql/hooks/sql.py b/airflow/providers/common/sql/hooks/sql.py index 48e1a996ff438..32bd42caed523 100644 --- a/airflow/providers/common/sql/hooks/sql.py +++ b/airflow/providers/common/sql/hooks/sql.py @@ -18,12 +18,11 @@ from contextlib import closing from datetime import datetime -from typing import Any, Callable, Iterable, Mapping, Sequence, cast +from typing import Any, Callable, Iterable, Mapping, Protocol, Sequence, cast import sqlparse from packaging.version import Version from sqlalchemy import create_engine -from typing_extensions import Protocol from airflow import AirflowException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/common/sql/operators/sql.py b/airflow/providers/common/sql/operators/sql.py index 723afe6b8a17c..43a7ff0f31b59 100644 --- a/airflow/providers/common/sql/operators/sql.py +++ b/airflow/providers/common/sql/operators/sql.py @@ -19,9 +19,9 @@ import ast import re +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, NoReturn, Sequence, SupportsAbs -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowFailException from airflow.hooks.base import BaseHook from airflow.models import BaseOperator, SkipMixin diff --git a/airflow/providers/databricks/hooks/databricks_base.py b/airflow/providers/databricks/hooks/databricks_base.py index 50ab2eff6a251..0d347eee4c02d 100644 --- a/airflow/providers/databricks/hooks/databricks_base.py +++ b/airflow/providers/databricks/hooks/databricks_base.py @@ -27,6 +27,7 @@ import copy import platform import time +from functools import cached_property from typing import Any from urllib.parse import urlsplit @@ -45,7 +46,6 @@ ) from airflow import __version__ -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook from airflow.models import Connection diff --git a/airflow/providers/databricks/operators/databricks.py b/airflow/providers/databricks/operators/databricks.py index bfeb1c42ba207..3ff1233622cc2 100644 --- a/airflow/providers/databricks/operators/databricks.py +++ b/airflow/providers/databricks/operators/databricks.py @@ -20,10 +20,10 @@ import time import warnings +from functools import cached_property from logging import Logger from typing import TYPE_CHECKING, Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator, BaseOperatorLink, XCom from airflow.providers.databricks.hooks.databricks import DatabricksHook, RunState diff --git a/airflow/providers/databricks/operators/databricks_repos.py b/airflow/providers/databricks/operators/databricks_repos.py index f42114d474267..6ca348ffe142a 100644 --- a/airflow/providers/databricks/operators/databricks_repos.py +++ b/airflow/providers/databricks/operators/databricks_repos.py @@ -19,10 +19,10 @@ from __future__ import annotations import re +from functools import cached_property from typing import TYPE_CHECKING, Sequence from urllib.parse import urlsplit -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.databricks.hooks.databricks import DatabricksHook diff --git a/airflow/providers/databricks/sensors/databricks_partition.py b/airflow/providers/databricks/sensors/databricks_partition.py index 94708df9950d0..b848415292bc7 100644 --- a/airflow/providers/databricks/sensors/databricks_partition.py +++ b/airflow/providers/databricks/sensors/databricks_partition.py @@ -21,11 +21,11 @@ from __future__ import annotations from datetime import datetime +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Sequence from databricks.sql.utils import ParamEscaper -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.common.sql.hooks.sql import fetch_all_handler from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook diff --git a/airflow/providers/databricks/sensors/databricks_sql.py b/airflow/providers/databricks/sensors/databricks_sql.py index 0cb6f2a88d431..8381f75d19024 100644 --- a/airflow/providers/databricks/sensors/databricks_sql.py +++ b/airflow/providers/databricks/sensors/databricks_sql.py @@ -20,9 +20,9 @@ from __future__ import annotations +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Iterable, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.common.sql.hooks.sql import fetch_all_handler from airflow.providers.databricks.hooks.databricks_sql import DatabricksSqlHook diff --git a/airflow/providers/dbt/cloud/hooks/dbt.py b/airflow/providers/dbt/cloud/hooks/dbt.py index f0f3e6d0dbdd2..e648f5a59564d 100644 --- a/airflow/providers/dbt/cloud/hooks/dbt.py +++ b/airflow/providers/dbt/cloud/hooks/dbt.py @@ -19,7 +19,7 @@ import json import time from enum import Enum -from functools import wraps +from functools import cached_property, wraps from inspect import signature from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast @@ -29,7 +29,6 @@ from requests.auth import AuthBase from requests.sessions import Session -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.http.hooks.http import HttpHook from airflow.typing_compat import TypedDict diff --git a/airflow/providers/docker/hooks/docker.py b/airflow/providers/docker/hooks/docker.py index d67846e9ec040..35a8077250e31 100644 --- a/airflow/providers/docker/hooks/docker.py +++ b/airflow/providers/docker/hooks/docker.py @@ -18,13 +18,13 @@ from __future__ import annotations import json +from functools import cached_property from typing import TYPE_CHECKING, Any from docker import APIClient, TLSConfig from docker.constants import DEFAULT_TIMEOUT_SECONDS from docker.errors import APIError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowNotFoundException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/docker/operators/docker.py b/airflow/providers/docker/operators/docker.py index de3ab20b082c6..55208ed6ab76e 100644 --- a/airflow/providers/docker/operators/docker.py +++ b/airflow/providers/docker/operators/docker.py @@ -23,6 +23,7 @@ import tarfile import warnings from collections.abc import Container +from functools import cached_property from io import BytesIO, StringIO from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Iterable, Sequence @@ -32,7 +33,6 @@ from docker.types import LogConfig, Mount from dotenv import dotenv_values -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning, AirflowSkipException from airflow.models import BaseOperator from airflow.providers.docker.hooks.docker import DockerHook diff --git a/airflow/providers/elasticsearch/hooks/elasticsearch.py b/airflow/providers/elasticsearch/hooks/elasticsearch.py index ae54db037db2d..63463a178216c 100644 --- a/airflow/providers/elasticsearch/hooks/elasticsearch.py +++ b/airflow/providers/elasticsearch/hooks/elasticsearch.py @@ -18,12 +18,12 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import Any from elasticsearch import Elasticsearch from es.elastic.api import Connection as ESConnection, connect -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.models.connection import Connection as AirflowConnection diff --git a/airflow/providers/facebook/ads/hooks/ads.py b/airflow/providers/facebook/ads/hooks/ads.py index 86048712c965b..e0a6e67778aa1 100644 --- a/airflow/providers/facebook/ads/hooks/ads.py +++ b/airflow/providers/facebook/ads/hooks/ads.py @@ -20,6 +20,7 @@ import time from enum import Enum +from functools import cached_property from typing import Any from facebook_business.adobjects.adaccount import AdAccount @@ -27,7 +28,6 @@ from facebook_business.adobjects.adsinsights import AdsInsights from facebook_business.api import FacebookAdsApi -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/ftp/operators/ftp.py b/airflow/providers/ftp/operators/ftp.py index 41b913a64f495..baec63373b6fa 100644 --- a/airflow/providers/ftp/operators/ftp.py +++ b/airflow/providers/ftp/operators/ftp.py @@ -19,10 +19,10 @@ from __future__ import annotations import os +from functools import cached_property from pathlib import Path from typing import Any, Sequence -from airflow.compat.functools import cached_property from airflow.models import BaseOperator from airflow.providers.ftp.hooks.ftp import FTPHook, FTPSHook diff --git a/airflow/providers/google/ads/hooks/ads.py b/airflow/providers/google/ads/hooks/ads.py index 44e2a7d1dcf10..3c132d8db06fe 100644 --- a/airflow/providers/google/ads/hooks/ads.py +++ b/airflow/providers/google/ads/hooks/ads.py @@ -18,6 +18,7 @@ """This module contains Google Ad hook.""" from __future__ import annotations +from functools import cached_property from tempfile import NamedTemporaryFile from typing import IO, Any @@ -30,7 +31,6 @@ from google.auth.exceptions import GoogleAuthError from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook from airflow.providers.google.common.hooks.base_google import get_field diff --git a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py index 0de1abfdd90ac..c2269e0d9dcec 100644 --- a/airflow/providers/google/cloud/_internal_client/secret_manager_client.py +++ b/airflow/providers/google/cloud/_internal_client/secret_manager_client.py @@ -17,12 +17,12 @@ from __future__ import annotations import re +from functools import cached_property import google from google.api_core.exceptions import InvalidArgument, NotFound, PermissionDenied from google.cloud.secretmanager_v1 import SecretManagerServiceClient -from airflow.compat.functools import cached_property from airflow.providers.google.common.consts import CLIENT_INFO from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py index e14bb9044fcbb..c8b2c950f964d 100644 --- a/airflow/providers/google/cloud/hooks/automl.py +++ b/airflow/providers/google/cloud/hooks/automl.py @@ -24,6 +24,7 @@ """ from __future__ import annotations +from functools import cached_property from typing import Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -49,7 +50,6 @@ from google.protobuf.field_mask_pb2 import FieldMask from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/hooks/compute_ssh.py b/airflow/providers/google/cloud/hooks/compute_ssh.py index d2b2971c6546e..d602b92c65e7e 100644 --- a/airflow/providers/google/cloud/hooks/compute_ssh.py +++ b/airflow/providers/google/cloud/hooks/compute_ssh.py @@ -18,13 +18,13 @@ import shlex import time +from functools import cached_property from io import StringIO from typing import Any from google.api_core.retry import exponential_sleep_generator from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.google.cloud.hooks.compute import ComputeEngineHook from airflow.providers.google.cloud.hooks.os_login import OSLoginHook from airflow.providers.ssh.hooks.ssh import SSHHook diff --git a/airflow/providers/google/cloud/hooks/kubernetes_engine.py b/airflow/providers/google/cloud/hooks/kubernetes_engine.py index 27e9d6043b184..a40a1b597e551 100644 --- a/airflow/providers/google/cloud/hooks/kubernetes_engine.py +++ b/airflow/providers/google/cloud/hooks/kubernetes_engine.py @@ -29,6 +29,7 @@ import json import time import warnings +from functools import cached_property from typing import Sequence import google.auth.credentials @@ -49,7 +50,6 @@ from urllib3.exceptions import HTTPError from airflow import version -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.providers.cncf.kubernetes.utils.pod_manager import PodOperatorHookProtocol from airflow.providers.google.common.consts import CLIENT_INFO diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index 88c9a0262d884..a12c76d547a12 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -27,6 +27,7 @@ import warnings from base64 import b64decode +from functools import cached_property from typing import Sequence from uuid import uuid4 @@ -46,7 +47,6 @@ ) from googleapiclient.errors import HttpError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/hooks/vision.py b/airflow/providers/google/cloud/hooks/vision.py index 20c0c81f75431..2b17dd2002db1 100644 --- a/airflow/providers/google/cloud/hooks/vision.py +++ b/airflow/providers/google/cloud/hooks/vision.py @@ -19,6 +19,7 @@ from __future__ import annotations from copy import deepcopy +from functools import cached_property from typing import Any, Callable, Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -35,7 +36,6 @@ from google.protobuf import field_mask_pb2 from google.protobuf.json_format import MessageToDict -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.providers.google.common.consts import CLIENT_INFO from airflow.providers.google.common.hooks.base_google import PROVIDE_PROJECT_ID, GoogleBaseHook diff --git a/airflow/providers/google/cloud/log/gcs_task_handler.py b/airflow/providers/google/cloud/log/gcs_task_handler.py index 303145310f10a..c8852a6e8ca3f 100644 --- a/airflow/providers/google/cloud/log/gcs_task_handler.py +++ b/airflow/providers/google/cloud/log/gcs_task_handler.py @@ -20,6 +20,7 @@ import logging import os import shutil +from functools import cached_property from pathlib import Path from typing import Collection @@ -27,7 +28,6 @@ from google.cloud import storage # type: ignore[attr-defined] from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import AirflowNotFoundException from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index ca3ad9bad35e1..a18a34a97b027 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -19,6 +19,7 @@ import logging from contextvars import ContextVar +from functools import cached_property from typing import Collection from urllib.parse import urlencode @@ -29,7 +30,6 @@ from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse -from airflow.compat.functools import cached_property from airflow.models import TaskInstance from airflow.providers.google.cloud.utils.credentials_provider import get_credentials_and_project_id from airflow.providers.google.common.consts import CLIENT_INFO diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py index d230e6c157aff..d9e013afa68c3 100644 --- a/airflow/providers/google/cloud/operators/bigquery_dts.py +++ b/airflow/providers/google/cloud/operators/bigquery_dts.py @@ -19,6 +19,7 @@ from __future__ import annotations import time +from functools import cached_property from typing import TYPE_CHECKING, Sequence from google.api_core.gapic_v1.method import DEFAULT, _MethodDefault @@ -31,7 +32,6 @@ ) from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id from airflow.providers.google.cloud.links.bigquery_dts import BigQueryDataTransferConfigLink from airflow.providers.google.cloud.operators.cloud_base import GoogleCloudBaseOperator diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index df32f68596ef3..5197fbcc2e3eb 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -24,10 +24,10 @@ import warnings from contextlib import ExitStack from enum import Enum +from functools import cached_property from typing import TYPE_CHECKING, Any, Sequence from airflow import AirflowException -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType from airflow.providers.google.cloud.hooks.dataflow import ( diff --git a/airflow/providers/google/cloud/operators/kubernetes_engine.py b/airflow/providers/google/cloud/operators/kubernetes_engine.py index 546eba1070635..043eda20ff1c1 100644 --- a/airflow/providers/google/cloud/operators/kubernetes_engine.py +++ b/airflow/providers/google/cloud/operators/kubernetes_engine.py @@ -19,13 +19,13 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Sequence from google.api_core.exceptions import AlreadyExists from google.cloud.container_v1.types import Cluster from kubernetes.client.models import V1Pod -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning try: diff --git a/airflow/providers/hashicorp/_internal_client/vault_client.py b/airflow/providers/hashicorp/_internal_client/vault_client.py index ea8aaf0071230..8a463cf1b9188 100644 --- a/airflow/providers/hashicorp/_internal_client/vault_client.py +++ b/airflow/providers/hashicorp/_internal_client/vault_client.py @@ -16,12 +16,13 @@ # under the License. from __future__ import annotations +from functools import cached_property + import hvac from hvac.api.auth_methods import Kubernetes from hvac.exceptions import InvalidPath, VaultError from requests import Response -from airflow.compat.functools import cached_property from airflow.utils.log.logging_mixin import LoggingMixin DEFAULT_KUBERNETES_JWT_PATH = "/var/run/secrets/kubernetes.io/serviceaccount/token" diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index aab84a04c5499..cb5d0cfcc909c 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -19,13 +19,13 @@ import os import shutil +from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any from azure.core.exceptions import HttpResponseError from packaging.version import Version -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.utils.log.file_task_handler import FileTaskHandler from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/microsoft/azure/secrets/key_vault.py b/airflow/providers/microsoft/azure/secrets/key_vault.py index 3ab8a663555ae..34b78b5031446 100644 --- a/airflow/providers/microsoft/azure/secrets/key_vault.py +++ b/airflow/providers/microsoft/azure/secrets/key_vault.py @@ -18,12 +18,12 @@ import re import warnings +from functools import cached_property from azure.core.exceptions import ResourceNotFoundError from azure.identity import DefaultAzureCredential from azure.keyvault.secrets import SecretClient -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.secrets import BaseSecretsBackend from airflow.utils.log.logging_mixin import LoggingMixin diff --git a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py b/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py index 7f8aa7eb5bb00..37d642036c520 100644 --- a/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py +++ b/airflow/providers/microsoft/azure/transfers/sftp_to_wasb.py @@ -20,13 +20,13 @@ import os from collections import namedtuple +from functools import cached_property from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Sequence if TYPE_CHECKING: from airflow.utils.context import Context -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException from airflow.models import BaseOperator from airflow.providers.microsoft.azure.hooks.wasb import WasbHook diff --git a/airflow/providers/salesforce/hooks/salesforce.py b/airflow/providers/salesforce/hooks/salesforce.py index 6d2ccd50277c4..7bd2620909d68 100644 --- a/airflow/providers/salesforce/hooks/salesforce.py +++ b/airflow/providers/salesforce/hooks/salesforce.py @@ -26,13 +26,13 @@ import logging import time +from functools import cached_property from typing import Any, Iterable import pandas as pd from requests import Session from simple_salesforce import Salesforce, api -from airflow.compat.functools import cached_property from airflow.hooks.base import BaseHook log = logging.getLogger(__name__) diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py index 2ba9d786dc71a..9d0bc588a377f 100644 --- a/airflow/providers/slack/hooks/slack.py +++ b/airflow/providers/slack/hooks/slack.py @@ -19,14 +19,13 @@ import json import warnings -from functools import wraps +from functools import cached_property, wraps from pathlib import Path from typing import TYPE_CHECKING, Any, Sequence from slack_sdk import WebClient from slack_sdk.errors import SlackApiError -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowNotFoundException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.providers.slack.utils import ConnectionExtraConfig diff --git a/airflow/providers/slack/hooks/slack_webhook.py b/airflow/providers/slack/hooks/slack_webhook.py index ba2356f59cf23..aef5f29e715a9 100644 --- a/airflow/providers/slack/hooks/slack_webhook.py +++ b/airflow/providers/slack/hooks/slack_webhook.py @@ -19,13 +19,12 @@ import json import warnings -from functools import wraps +from functools import cached_property, wraps from typing import TYPE_CHECKING, Any, Callable from urllib.parse import urlsplit from slack_sdk import WebhookClient -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.models import Connection diff --git a/airflow/providers/slack/notifications/slack.py b/airflow/providers/slack/notifications/slack.py index b48ed3670b06f..d1c0f256f101f 100644 --- a/airflow/providers/slack/notifications/slack.py +++ b/airflow/providers/slack/notifications/slack.py @@ -18,9 +18,9 @@ from __future__ import annotations import json +from functools import cached_property from typing import Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowOptionalProviderFeatureException try: diff --git a/airflow/providers/slack/operators/slack.py b/airflow/providers/slack/operators/slack.py index bd6ea4c1a3930..15348494e5c6f 100644 --- a/airflow/providers/slack/operators/slack.py +++ b/airflow/providers/slack/operators/slack.py @@ -19,9 +19,9 @@ import json import warnings +from functools import cached_property from typing import Any, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.slack.hooks.slack import SlackHook diff --git a/airflow/providers/slack/operators/slack_webhook.py b/airflow/providers/slack/operators/slack_webhook.py index cd11db7ae4a8e..4eac057af3e5c 100644 --- a/airflow/providers/slack/operators/slack_webhook.py +++ b/airflow/providers/slack/operators/slack_webhook.py @@ -18,9 +18,9 @@ from __future__ import annotations import warnings +from functools import cached_property from typing import TYPE_CHECKING, Sequence -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.models import BaseOperator from airflow.providers.slack.hooks.slack_webhook import SlackWebhookHook diff --git a/airflow/providers/ssh/hooks/ssh.py b/airflow/providers/ssh/hooks/ssh.py index 6d914c6eab691..21f743dd6ebea 100644 --- a/airflow/providers/ssh/hooks/ssh.py +++ b/airflow/providers/ssh/hooks/ssh.py @@ -21,6 +21,7 @@ import os import warnings from base64 import decodebytes +from functools import cached_property from io import StringIO from select import select from typing import Any, Sequence @@ -30,7 +31,6 @@ from sshtunnel import SSHTunnelForwarder from tenacity import Retrying, stop_after_attempt, wait_fixed, wait_random -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowException, AirflowProviderDeprecationWarning from airflow.hooks.base import BaseHook from airflow.utils.platform import getuser diff --git a/airflow/ti_deps/deps/trigger_rule_dep.py b/airflow/ti_deps/deps/trigger_rule_dep.py index 62588c15b5f0f..df0101a4f7379 100644 --- a/airflow/ti_deps/deps/trigger_rule_dep.py +++ b/airflow/ti_deps/deps/trigger_rule_dep.py @@ -130,7 +130,7 @@ def _evaluate_trigger_rule( upstream_tasks = {t.task_id: t for t in task.upstream_list} trigger_rule = task.trigger_rule - @functools.lru_cache() + @functools.lru_cache def _get_expanded_ti_count() -> int: """Get how many tis the current task is supposed to be expanded into. @@ -139,7 +139,7 @@ def _get_expanded_ti_count() -> int: """ return task.get_mapped_ti_count(ti.run_id, session=session) - @functools.lru_cache() + @functools.lru_cache def _get_relevant_upstream_map_indexes(upstream_id: str) -> int | range | None: """Get the given task's map indexes relevant to the current ti. diff --git a/airflow/timetables/_cron.py b/airflow/timetables/_cron.py index b1e315a7d14f1..49f5771966adc 100644 --- a/airflow/timetables/_cron.py +++ b/airflow/timetables/_cron.py @@ -17,6 +17,7 @@ from __future__ import annotations import datetime +from functools import cached_property from typing import Any from cron_descriptor import CasingTypeEnum, ExpressionDescriptor, FormatException, MissingFieldException @@ -24,7 +25,6 @@ from pendulum import DateTime from pendulum.tz.timezone import Timezone -from airflow.compat.functools import cached_property from airflow.exceptions import AirflowTimetableInvalid from airflow.utils.dates import cron_presets from airflow.utils.timezone import convert_to_utc, make_aware, make_naive diff --git a/airflow/typing_compat.py b/airflow/typing_compat.py index e7bc6de304e65..a17c6b621ec6f 100644 --- a/airflow/typing_compat.py +++ b/airflow/typing_compat.py @@ -28,17 +28,13 @@ ] import sys - -if sys.version_info >= (3, 8): - from typing import Protocol, TypedDict, runtime_checkable -else: - from typing_extensions import Protocol, TypedDict, runtime_checkable +from typing import Protocol, TypedDict, runtime_checkable # Literal in 3.8 is limited to one single argument, not e.g. "Literal[1, 2]". if sys.version_info >= (3, 9): from typing import Literal else: - from typing_extensions import Literal + from typing import Literal if sys.version_info >= (3, 10): from typing import ParamSpec, TypeGuard diff --git a/airflow/utils/context.py b/airflow/utils/context.py index 4543a8066a970..3eb4b2b234c46 100644 --- a/airflow/utils/context.py +++ b/airflow/utils/context.py @@ -31,6 +31,7 @@ KeysView, Mapping, MutableMapping, + SupportsIndex, ValuesView, ) @@ -186,7 +187,7 @@ def __init__(self, context: MutableMapping[str, Any] | None = None, **kwargs: An def __repr__(self) -> str: return repr(self._context) - def __reduce_ex__(self, protocol: int) -> tuple[Any, ...]: + def __reduce_ex__(self, protocol: SupportsIndex) -> tuple[Any, ...]: """Pickle the context as a dict. We are intentionally going through ``__getitem__`` in this function, diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 496ae8ba02e40..af3f45175ff5b 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -25,10 +25,9 @@ import zipfile from collections import OrderedDict from pathlib import Path -from typing import TYPE_CHECKING, Generator, NamedTuple, Pattern, overload +from typing import TYPE_CHECKING, Generator, NamedTuple, Pattern, Protocol, overload from pathspec.patterns import GitWildMatchPattern -from typing_extensions import Protocol from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning diff --git a/airflow/utils/log/file_task_handler.py b/airflow/utils/log/file_task_handler.py index 194045c62635a..f2ea57a28ea53 100644 --- a/airflow/utils/log/file_task_handler.py +++ b/airflow/utils/log/file_task_handler.py @@ -23,13 +23,13 @@ import warnings from contextlib import suppress from enum import Enum +from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Iterable from urllib.parse import urljoin import pendulum -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.executors.executor_loader import ExecutorLoader diff --git a/airflow/utils/log/log_reader.py b/airflow/utils/log/log_reader.py index 97fe173e39368..a4589ebca0afa 100644 --- a/airflow/utils/log/log_reader.py +++ b/airflow/utils/log/log_reader.py @@ -18,11 +18,11 @@ import logging import time +from functools import cached_property from typing import Iterator from sqlalchemy.orm.session import Session -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.models.taskinstance import TaskInstance from airflow.utils.helpers import render_log_filename diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index 381ba5aeaba77..54547c1e6e6e1 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -21,6 +21,7 @@ import logging import re import sys +from functools import cached_property from typing import ( TYPE_CHECKING, Any, @@ -37,7 +38,7 @@ ) from airflow import settings -from airflow.compat.functools import cache, cached_property +from airflow.compat.functools import cache from airflow.typing_compat import TypeGuard if TYPE_CHECKING: diff --git a/airflow/www/extensions/init_views.py b/airflow/www/extensions/init_views.py index 71efdbcf4b5f8..492f325bb9c99 100644 --- a/airflow/www/extensions/init_views.py +++ b/airflow/www/extensions/init_views.py @@ -18,6 +18,7 @@ import logging import warnings +from functools import cached_property from os import path from connexion import FlaskApi, ProblemException, Resolver @@ -26,7 +27,6 @@ from flask import Flask, request from airflow.api_connexion.exceptions import common_error_handler -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.exceptions import RemovedInAirflow3Warning from airflow.security import permissions diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index a26b5977fbcbb..7145128e21b50 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -23,6 +23,7 @@ import json import logging import re +from functools import cached_property from typing import Any from uuid import uuid4 @@ -72,7 +73,6 @@ from flask_login import AnonymousUserMixin, LoginManager, current_user from werkzeug.security import check_password_hash, generate_password_hash -from airflow.compat.functools import cached_property from airflow.configuration import conf from airflow.www.fab_security.sqla.models import Action, Permission, RegisterUser, Resource, Role, User diff --git a/airflow/www/views.py b/airflow/www/views.py index a43c5db7c2dde..7770246831319 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -30,7 +30,7 @@ import warnings from bisect import insort_left from collections import defaultdict -from functools import wraps +from functools import cached_property, wraps from json import JSONDecodeError from typing import Any, Callable, Collection, Iterator, Mapping, MutableMapping, Sequence from urllib.parse import unquote, urljoin, urlsplit @@ -84,7 +84,6 @@ set_dag_run_state_to_success, set_state, ) -from airflow.compat.functools import cached_property from airflow.configuration import AIRFLOW_CONFIG, conf from airflow.datasets import Dataset from airflow.exceptions import AirflowException, ParamValidationError, RemovedInAirflow3Warning diff --git a/chart/values.schema.json b/chart/values.schema.json index e90fffd024087..59194b8ac4b0b 100644 --- a/chart/values.schema.json +++ b/chart/values.schema.json @@ -2076,7 +2076,7 @@ "additionalProperties": false, "properties": { "enabled": { - "description": "Enable triggerer (requires Python 3.7+).", + "description": "Enable triggerer", "type": "boolean", "default": true }, diff --git a/constraints/README.md b/constraints/README.md index 1ddfc1a3dfafc..791450d1bd7c9 100644 --- a/constraints/README.md +++ b/constraints/README.md @@ -29,12 +29,12 @@ This allows you to iterate on dependencies without having to run `--upgrade-to-n Typical workflow in this case is: * download and copy the constraint file to the folder (for example via -[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.7.txt) +[The GitHub Raw Link](https://raw.githubusercontent.com/apache/airflow/constraints-main/constraints-3.8.txt) * modify the constraint file in "constraints" folder * build the image using this command ```bash -breeze ci-image build --python 3.7 --airflow-constraints-location constraints/constraints-3.7txt +breeze ci-image build --python 3.8 --airflow-constraints-location constraints/constraints-3.8txt ``` You can continue iterating and updating the constraint file (and rebuilding the image) @@ -46,7 +46,7 @@ pip freeze | sort | \ grep -v "apache_airflow" | \ grep -v "apache-airflow==" | \ grep -v "@" | \ - grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.7.txt + grep -v "/opt/airflow" > /opt/airflow/constraints/constraints-3.8.txt ``` If you are working with others on updating the dependencies, you can also commit the constraint diff --git a/dev/README_RELEASE_AIRFLOW.md b/dev/README_RELEASE_AIRFLOW.md index 174bc071f862a..309c6c49f48ef 100644 --- a/dev/README_RELEASE_AIRFLOW.md +++ b/dev/README_RELEASE_AIRFLOW.md @@ -571,7 +571,7 @@ Optionally it can be followed with constraints ```shell script pip install apache-airflow==rc \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.7.txt"` + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-/constraints-3.8.txt"` ``` Note that the constraints contain python version that you are installing it with. @@ -583,7 +583,7 @@ There is also an easy way of installation with Breeze if you have the latest sou Running the following command will use tmux inside breeze, create `admin` user and run Webserver & Scheduler: ```shell script -breeze start-airflow --use-airflow-version rc --python 3.7 --backend postgres +breeze start-airflow --use-airflow-version rc --python 3.8 --backend postgres ``` Once you install and run Airflow, you should perform any verification you see as necessary to check @@ -670,7 +670,7 @@ the older branches, you should set the "skip" field to true. ## Verify production images ```shell script -for PYTHON in 3.7 3.8 3.9 3.10 3.11 +for PYTHON in 3.8 3.9 3.10 3.11 do docker pull apache/airflow:${VERSION}-python${PYTHON} breeze prod-image verify --image-name apache/airflow:${VERSION}-python${PYTHON} diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 45f44e40f7dd3..4317543755368 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -721,7 +721,7 @@ pip install apache-airflow-providers-==rc ### Installing with Breeze ```shell -breeze start-airflow --use-airflow-version 2.2.4 --python 3.7 --backend postgres \ +breeze start-airflow --use-airflow-version 2.2.4 --python 3.8 --backend postgres \ --load-example-dags --load-default-connections ``` @@ -852,7 +852,7 @@ do svn mv "${file}" "${base_file//rc[0-9]/}" done -# Check which old packages will be removed (you need Python 3.7+ and dev/requirements.txt installed) +# Check which old packages will be removed (you need Python 3.8+ and dev/requirements.txt installed) python ${AIRFLOW_REPO_ROOT}/dev/provider_packages/remove_old_releases.py --directory . # Remove those packages diff --git a/dev/breeze/README.md b/dev/breeze/README.md index ad0f24771d291..69bdaf4cede6a 100644 --- a/dev/breeze/README.md +++ b/dev/breeze/README.md @@ -52,6 +52,6 @@ PLEASE DO NOT MODIFY THE HASH BELOW! IT IS AUTOMATICALLY UPDATED BY PRE-COMMIT. --------------------------------------------------------------------------------------------------------- -Package config hash: f799f7c7b42d8bb6dcee79f989d4af33b96b531bcc5c9a46002e444d8f4c4b316e7ab77f9dfd6a6b396cba8226a3cbfeb450128bd55ffa443d15139a4cc74d22 +Package config hash: 5a58d062de9c220a74215d4beedb98228f93c5b9fc1ef590b3176605ab50197e446e6e38e5e889e1c4e46ba720314c54a0b1efb87964b55f3856bdceaa9852ce --------------------------------------------------------------------------------------------------------- diff --git a/dev/breeze/SELECTIVE_CHECKS.md b/dev/breeze/SELECTIVE_CHECKS.md index ada5f359bb9e0..f2d2ab72b6725 100644 --- a/dev/breeze/SELECTIVE_CHECKS.md +++ b/dev/breeze/SELECTIVE_CHECKS.md @@ -122,8 +122,8 @@ Github Actions to pass the list of parameters to a command to execute | Output | Meaning of the output | Example value | List as string | |------------------------------------|---------------------------------------------------------------------------------------------------------|------------------------------------------------------------|----------------| | affected-providers-list-as-string | List of providers affected when they are selectively affected. | airbyte http | * | -| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.7', '3.8', '3.9', '3.10'] | | -| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.7 3.8 3.9 3.10 | * | +| all-python-versions | List of all python versions there are available in the form of JSON array | ['3.8', '3.9', '3.10'] | | +| all-python-versions-list-as-string | List of all python versions there are available in the form of space separated string | 3.8 3.9 3.10 | * | | basic-checks-only | Whether to run all static checks ("false") or only basic set of static checks ("true") | false | | | cache-directive | Which cache should be be used for images ("registry", "local" , "disabled") | registry | | | debug-resources | Whether resources usage should be printed during parallel job execution ("true"/ "false") | false | | @@ -135,14 +135,14 @@ Github Actions to pass the list of parameters to a command to execute | default-mssql-version | Which MsSQL version to use as default | 2017-latest | | | default-mysql-version | Which MySQL version to use as default | 5.7 | | | default-postgres-version | Which Postgres version to use as default | 10 | | -| default-python-version | Which Python version to use as default | 3.7 | | +| default-python-version | Which Python version to use as default | 3.8 | | | docs-build | Whether to build documentation ("true"/"false") | true | | | docs-filter-list-as-string | What filter to apply to docs building - based on which documentation packages should be built | --package-filter apache-airflow -package-filter-helm-chart | | | full-tests-needed | Whether this build runs complete set of tests or only subset (for faster PR builds) | false | | | helm-version | Which Helm version to use for tests | v3.9.4 | | | image-build | Whether CI image build is needed | true | | | kind-version | Which Kind version to use for tests | v0.16.0 | | -| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.7-v1.25.2 3.8-v1.26.4 | * | +| kubernetes-combos-list-as-string | All combinations of Python version and Kubernetes version to use for tests as space-separated string | 3.8-v1.25.2 3.9-v1.26.4 | * | | kubernetes-versions | All Kubernetes versions to use for tests as JSON array | ['v1.25.2'] | | | kubernetes-versions-list-as-string | All Kubernetes versions to use for tests as space-separated string | v1.25.2 | * | | mssql-exclude | Which versions of MsSQL to exclude for tests as JSON array | [] | | @@ -157,8 +157,8 @@ Github Actions to pass the list of parameters to a command to execute | parallel-test-types-list-as-string | Which test types should be run for unit tests | API Always Providers\[amazon\] Providers\[-amazon\] | * | | postgres-exclude | Which versions of Postgres to exclude for tests as JSON array | [] | | | postgres-versions | Which versions of Postgres to use for tests as JSON array | ['10'] | | -| python-versions | Which versions of Python to use for tests as JSON array | ['3.7'] | | -| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.7 | * | +| python-versions | Which versions of Python to use for tests as JSON array | ['3.8'] | | +| python-versions-list-as-string | Which versions of MySQL to use for tests as space-separated string | 3.8 | * | | run-kubernetes-tests | Whether Kubernetes tests should be run ("true"/"false") | true | | | run-tests | Whether unit tests should be run ("true"/"false") | true | | | run-www-tests | Whether WWW tests should be run ("true"/"false") | true | | diff --git a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md index 8aab0db417534..d0818ad2bb913 100644 --- a/dev/breeze/doc/adr/0002-implement-standalone-python-command.md +++ b/dev/breeze/doc/adr/0002-implement-standalone-python-command.md @@ -138,9 +138,9 @@ There are a few properties of Breeze/CI scripts that should be maintained though run a command and get everything done with the least number of prerequisites * The prerequisites for Breeze and CI are: - * Python 3.7+ (Python 3.7 end of life is Jun 2023) - * Docker (TBD which minimum version supported) - * Docker Compose (TBD which minimum version supported) + * Python 3.8+ (Python 3.8 end of life is October 2024) + * Docker (23.0+) + * Docker Compose (2.16.0+) * No other tools and CLI commands should be needed * The python requirements should be automatically installed when missing in a "Breeze" venv and updated automatically when needed. The number of Python dependencies needed to run Breeze and CI scripts diff --git a/dev/breeze/setup.cfg b/dev/breeze/setup.cfg index 4e59505678969..c1993ffe8ecef 100644 --- a/dev/breeze/setup.cfg +++ b/dev/breeze/setup.cfg @@ -33,7 +33,6 @@ classifiers = Environment :: Console Intended Audience :: Developers License :: OSI Approved :: Apache Software License - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 @@ -48,16 +47,14 @@ project_urls = [options] zip_safe = False include_package_data = True -python_requires = ~=3.7 +python_requires = ~=3.8 package_dir= =src packages = find: install_requires = - cached_property>=1.5.0;python_version<="3.7" click filelock inputimeout - importlib-metadata>=4.4; python_version < "3.8" jinja2 packaging pendulum diff --git a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py index 76ff59c34c84f..3df2936257aa9 100644 --- a/dev/breeze/src/airflow_breeze/commands/minor_release_command.py +++ b/dev/breeze/src/airflow_breeze/commands/minor_release_command.py @@ -173,7 +173,7 @@ def create_minor_version_branch(version_branch): create_branch(version_branch) # Build ci image if confirm_action("Build latest breeze image?"): - run_command(["breeze", "ci-image", "build", "--python", "3.7"], dry_run_override=DRY_RUN, check=True) + run_command(["breeze", "ci-image", "build", "--python", "3.8"], dry_run_override=DRY_RUN, check=True) # Update default branches update_default_branch(version_branch) # Commit changes diff --git a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py index ec6ab1db1f916..5c954583d5e7a 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py +++ b/dev/breeze/src/airflow_breeze/commands/release_candidate_command.py @@ -347,7 +347,7 @@ def publish_release_candidate(version, previous_version, github_token): git_clean() # Build the latest image if confirm_action("Build latest breeze image?"): - run_command(["breeze", "ci-image", "build", "--python", "3.7"], dry_run_override=DRY_RUN, check=True) + run_command(["breeze", "ci-image", "build", "--python", "3.8"], dry_run_override=DRY_RUN, check=True) # Create the tarball tarball_release(version, version_without_rc) # Create the artifacts diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py index 69f6f2a586165..fc5ba7c8e44e7 100644 --- a/dev/breeze/src/airflow_breeze/global_constants.py +++ b/dev/breeze/src/airflow_breeze/global_constants.py @@ -36,7 +36,7 @@ APACHE_AIRFLOW_GITHUB_REPOSITORY = "apache/airflow" # Checked before putting in build cache -ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] DEFAULT_PYTHON_MAJOR_MINOR_VERSION = ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS[0] ALLOWED_ARCHITECTURES = [Architecture.X86_64, Architecture.ARM] ALLOWED_BACKENDS = ["sqlite", "mysql", "postgres", "mssql"] @@ -174,7 +174,7 @@ def get_default_platform_machine() -> str: PYTHONDONTWRITEBYTECODE = True PRODUCTION_IMAGE = False -ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_PYTHON_MAJOR_MINOR_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] CURRENT_PYTHON_MAJOR_MINOR_VERSIONS = ALL_PYTHON_MAJOR_MINOR_VERSIONS CURRENT_POSTGRES_VERSIONS = ["11", "12", "13", "14", "15"] DEFAULT_POSTGRES_VERSION = CURRENT_POSTGRES_VERSIONS[0] diff --git a/dev/breeze/src/airflow_breeze/params/common_build_params.py b/dev/breeze/src/airflow_breeze/params/common_build_params.py index 220ef183f3970..9c3b56c24ef46 100644 --- a/dev/breeze/src/airflow_breeze/params/common_build_params.py +++ b/dev/breeze/src/airflow_breeze/params/common_build_params.py @@ -61,7 +61,7 @@ class CommonBuildParams: prepare_buildx_cache: bool = False python_image: str | None = None push: bool = False - python: str = "3.7" + python: str = "3.8" tag_as_latest: bool = False upgrade_to_newer_dependencies: bool = False upgrade_on_failure: bool = False diff --git a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py index 2b84090008152..65458e6208642 100644 --- a/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/kubernetes_utils.py @@ -170,11 +170,7 @@ def _download_tool_if_needed( get_console().print(f"[info]Downloading from:[/] {url}") if get_dry_run(): return - try: - # we can add missing_ok when we drop Python 3.7 - path.unlink() - except OSError: - pass + path.unlink(missing_ok=True) path.parent.mkdir(parents=True, exist_ok=True) num_tries = 4 if not uncompress_file: diff --git a/dev/breeze/src/airflow_breeze/utils/run_utils.py b/dev/breeze/src/airflow_breeze/utils/run_utils.py index f29ca43006545..5a46b5db6e03b 100644 --- a/dev/breeze/src/airflow_breeze/utils/run_utils.py +++ b/dev/breeze/src/airflow_breeze/utils/run_utils.py @@ -379,7 +379,7 @@ def check_if_image_exists(image: str) -> bool: def get_ci_image_for_pre_commits() -> str: github_repository = os.environ.get("GITHUB_REPOSITORY", APACHE_AIRFLOW_GITHUB_REPOSITORY) - python_version = "3.7" + python_version = "3.8" airflow_image = f"ghcr.io/{github_repository}/{AIRFLOW_BRANCH}/ci/python{python_version}" skip_image_pre_commits = os.environ.get("SKIP_IMAGE_PRE_COMMITS", "false") if skip_image_pre_commits[0].lower() == "t": diff --git a/dev/breeze/src/airflow_breeze/utils/selective_checks.py b/dev/breeze/src/airflow_breeze/utils/selective_checks.py index 984e406d33fc1..fde48b74ecf2d 100644 --- a/dev/breeze/src/airflow_breeze/utils/selective_checks.py +++ b/dev/breeze/src/airflow_breeze/utils/selective_checks.py @@ -20,33 +20,14 @@ import os import sys from enum import Enum - -from airflow_breeze.utils.exclude_from_matrix import excluded_combos -from airflow_breeze.utils.github_actions import get_ga_output -from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos -from airflow_breeze.utils.path_utils import ( - AIRFLOW_PROVIDERS_ROOT, - AIRFLOW_SOURCES_ROOT, - DOCS_DIR, - SYSTEM_TESTS_PROVIDERS_ROOT, - TESTS_PROVIDERS_ROOT, -) -from airflow_breeze.utils.provider_dependencies import DEPENDENCIES, get_related_providers - -if sys.version_info >= (3, 8): - from functools import cached_property -else: - # noinspection PyUnresolvedReferences - from cached_property import cached_property - -from functools import lru_cache +from functools import cached_property, lru_cache from re import match from typing import Any, Dict, List, TypeVar if sys.version_info >= (3, 9): from typing import Literal else: - from typing_extensions import Literal + from typing import Literal from airflow_breeze.global_constants import ( ALL_PYTHON_MAJOR_MINOR_VERSIONS, @@ -72,6 +53,17 @@ all_selective_test_types, ) from airflow_breeze.utils.console import get_console +from airflow_breeze.utils.exclude_from_matrix import excluded_combos +from airflow_breeze.utils.github_actions import get_ga_output +from airflow_breeze.utils.kubernetes_utils import get_kubernetes_python_combos +from airflow_breeze.utils.path_utils import ( + AIRFLOW_PROVIDERS_ROOT, + AIRFLOW_SOURCES_ROOT, + DOCS_DIR, + SYSTEM_TESTS_PROVIDERS_ROOT, + TESTS_PROVIDERS_ROOT, +) +from airflow_breeze.utils.provider_dependencies import DEPENDENCIES, get_related_providers FULL_TESTS_NEEDED_LABEL = "full tests needed" DEBUG_CI_RESOURCES_LABEL = "debug ci resources" diff --git a/dev/breeze/tests/test_cache.py b/dev/breeze/tests/test_cache.py index 1db27494abd07..27d7eef487f96 100644 --- a/dev/breeze/tests/test_cache.py +++ b/dev/breeze/tests/test_cache.py @@ -36,8 +36,8 @@ [ ("backend", "mysql", (True, ["sqlite", "mysql", "postgres", "mssql"]), None), ("backend", "xxx", (False, ["sqlite", "mysql", "postgres", "mssql"]), None), - ("python_major_minor_version", "3.8", (True, ["3.7", "3.8", "3.9", "3.10", "3.11"]), None), - ("python_major_minor_version", "3.5", (False, ["3.7", "3.8", "3.9", "3.10", "3.11"]), None), + ("python_major_minor_version", "3.8", (True, ["3.8", "3.9", "3.10", "3.11"]), None), + ("python_major_minor_version", "3.7", (False, ["3.8", "3.9", "3.10", "3.11"]), None), ("missing", "value", None, AttributeError), ], ) diff --git a/dev/breeze/tests/test_exclude_from_matrix.py b/dev/breeze/tests/test_exclude_from_matrix.py index 49bb0e57345d9..7280e0dd550ac 100644 --- a/dev/breeze/tests/test_exclude_from_matrix.py +++ b/dev/breeze/tests/test_exclude_from_matrix.py @@ -24,13 +24,13 @@ @pytest.mark.parametrize( "list_1, list_2, expected_representative_list", [ - (["3.6", "3.7"], ["1", "2"], [("3.6", "1"), ("3.7", "2")]), - (["3.6", "3.7"], ["1", "2", "3"], [("3.6", "1"), ("3.7", "2"), ("3.6", "3")]), - (["3.6", "3.7"], ["1", "2", "3", "4"], [("3.6", "1"), ("3.7", "2"), ("3.6", "3"), ("3.7", "4")]), + (["3.8", "3.9"], ["1", "2"], [("3.8", "1"), ("3.9", "2")]), + (["3.8", "3.9"], ["1", "2", "3"], [("3.8", "1"), ("3.9", "2"), ("3.8", "3")]), + (["3.8", "3.9"], ["1", "2", "3", "4"], [("3.8", "1"), ("3.9", "2"), ("3.8", "3"), ("3.9", "4")]), ( - ["3.6", "3.7", "3.8"], + ["3.8", "3.9", "3.10"], ["1", "2", "3", "4"], - [("3.6", "1"), ("3.7", "2"), ("3.8", "3"), ("3.6", "4")], + [("3.8", "1"), ("3.9", "2"), ("3.10", "3"), ("3.8", "4")], ), ], ) diff --git a/dev/breeze/tests/test_selective_checks.py b/dev/breeze/tests/test_selective_checks.py index 7a23c3c551e65..621c1009022df 100644 --- a/dev/breeze/tests/test_selective_checks.py +++ b/dev/breeze/tests/test_selective_checks.py @@ -55,10 +55,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -75,10 +75,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("airflow/api/file.py",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -98,10 +98,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "amazon common.sql google postgres", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -119,10 +119,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("tests/providers/apache/beam/file.py",), { "affected-providers-list-as-string": "apache.beam google", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -140,10 +140,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("docs/file.rst",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "false", @@ -164,10 +164,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "amazon common.sql google postgres", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -192,10 +192,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): { "affected-providers-list-as-string": "airbyte apache.livy " "dbt.cloud dingding discord http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -219,10 +219,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": "airbyte http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -245,10 +245,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -266,10 +266,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("setup.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -289,10 +289,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("generated/provider_dependencies.json",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -312,10 +312,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.sql exasol ftp google imap " "mongo mysql postgres salesforce ssh", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -333,10 +333,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): ("tests/providers/airbyte/__init__.py",), { "affected-providers-list-as-string": "airbyte http", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -354,10 +354,10 @@ def assert_outputs_are_printed(expected_outputs: dict[str, str], stderr: str): "affected-providers-list-as-string": "amazon apache.hive cncf.kubernetes " "common.sql exasol ftp google imap " "mongo mysql postgres salesforce ssh", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", - "python-versions": "['3.7']", - "python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", + "python-versions": "['3.8']", + "python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -397,10 +397,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -424,10 +424,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -449,10 +449,10 @@ def test_expected_output_pull_request_main( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -474,10 +474,10 @@ def test_expected_output_pull_request_main( "v2-3-stable", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", - "python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", + "python-versions": "['3.8', '3.9', '3.10', '3.11']", + "python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "run-tests": "true", "docs-build": "true", @@ -515,8 +515,8 @@ def test_expected_output_full_tests_needed( ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -538,8 +538,8 @@ def test_expected_output_full_tests_needed( "affected-providers-list-as-string": "amazon apache.beam apache.cassandra cncf.kubernetes " "common.sql facebook google hashicorp microsoft.azure microsoft.mssql " "mysql oracle postgres presto salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "needs-helm-tests": "false", "image-build": "true", "run-tests": "true", @@ -564,8 +564,8 @@ def test_expected_output_full_tests_needed( "cncf.kubernetes common.sql facebook google " "hashicorp microsoft.azure microsoft.mssql mysql oracle postgres presto " "salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -586,8 +586,8 @@ def test_expected_output_full_tests_needed( ), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -624,8 +624,8 @@ def test_expected_output_pull_request_v2_3( ("INTHEWILD.md",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "false", "needs-helm-tests": "false", "run-tests": "false", @@ -641,8 +641,8 @@ def test_expected_output_pull_request_v2_3( ("tests/system/any_file.py",), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -665,8 +665,8 @@ def test_expected_output_pull_request_v2_3( "cncf.kubernetes common.sql " "facebook google hashicorp microsoft.azure microsoft.mssql mysql " "oracle postgres presto salesforce sftp ssh trino", - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -708,8 +708,8 @@ def test_expected_output_pull_request_v2_3( ), { "affected-providers-list-as-string": None, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -726,8 +726,8 @@ def test_expected_output_pull_request_v2_3( ("airflow/models/test.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -746,8 +746,8 @@ def test_expected_output_pull_request_v2_3( ("airflow/file.py",), { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7']", - "all-python-versions-list-as-string": "3.7", + "all-python-versions": "['3.8']", + "all-python-versions-list-as-string": "3.8", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -787,8 +787,8 @@ def test_expected_output_pull_request_target( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -807,8 +807,8 @@ def test_expected_output_pull_request_target( "v2-3-stable", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "false", "run-tests": "true", @@ -826,8 +826,8 @@ def test_expected_output_pull_request_target( "main", { "affected-providers-list-as-string": ALL_PROVIDERS_AFFECTED, - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", @@ -878,8 +878,8 @@ def test_no_commit_provided_trigger_full_build_for_any_event_type(github_event): ) assert_outputs_are_printed( { - "all-python-versions": "['3.7', '3.8', '3.9', '3.10', '3.11']", - "all-python-versions-list-as-string": "3.7 3.8 3.9 3.10 3.11", + "all-python-versions": "['3.8', '3.9', '3.10', '3.11']", + "all-python-versions-list-as-string": "3.8 3.9 3.10 3.11", "image-build": "true", "needs-helm-tests": "true", "run-tests": "true", diff --git a/dev/check_files.py b/dev/check_files.py index 158ac5c046a38..c8085c806670b 100644 --- a/dev/check_files.py +++ b/dev/check_files.py @@ -33,7 +33,7 @@ """ AIRFLOW_DOCKER = """\ -FROM python:3.7 +FROM python:3.8 # Upgrade RUN pip install "apache-airflow=={}" diff --git a/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 b/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 index a4023b675876a..5fb6a9c74d321 100644 --- a/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 +++ b/dev/provider_packages/SETUP_TEMPLATE.cfg.jinja2 @@ -63,7 +63,7 @@ python_tag=py3 [options] zip_safe = False include_package_data = True -python_requires = ~=3.7 +python_requires = ~=3.8 packages = find: setup_requires = {{ SETUP_REQUIREMENTS }} install_requires = {{ INSTALL_REQUIREMENTS }} diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py index bf7da691c432e..76fd3f99a1275 100755 --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -52,7 +52,7 @@ from rich.syntax import Syntax from yaml import safe_load -ALL_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] MIN_AIRFLOW_VERSION = "2.4.0" # In case you have some providers that you want to have different min-airflow version for, @@ -1116,7 +1116,7 @@ def get_provider_jinja_context( supported_python_versions = [ p for p in ALL_PYTHON_VERSIONS if p not in provider_details.excluded_python_versions ] - python_requires = "~=3.7" + python_requires = "~=3.8" for p in provider_details.excluded_python_versions: python_requires += f", !={p}" min_airflow_version = MIN_AIRFLOW_VERSION diff --git a/dev/retag_docker_images.py b/dev/retag_docker_images.py index ecc304eaa8b4c..ba71d3ac284ef 100755 --- a/dev/retag_docker_images.py +++ b/dev/retag_docker_images.py @@ -31,7 +31,7 @@ import rich_click as click -PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +PYTHON_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] GHCR_IO_PREFIX = "ghcr.io" diff --git a/dev/stats/get_important_pr_candidates.py b/dev/stats/get_important_pr_candidates.py index 28a8081e01f7a..760e9ced0e2ec 100755 --- a/dev/stats/get_important_pr_candidates.py +++ b/dev/stats/get_important_pr_candidates.py @@ -21,9 +21,9 @@ import math import pickle import re -import sys import textwrap from datetime import datetime +from functools import cached_property import pendulum import rich_click as click @@ -31,11 +31,6 @@ from github.PullRequest import PullRequest from rich.console import Console -if sys.version_info >= (3, 8): - from functools import cached_property -else: - from cached_property import cached_property - logger = logging.getLogger(__name__) diff --git a/dev/validate_version_added_fields_in_config.py b/dev/validate_version_added_fields_in_config.py index 40679331b7ff1..d68ce36a0de3a 100755 --- a/dev/validate_version_added_fields_in_config.py +++ b/dev/validate_version_added_fields_in_config.py @@ -83,7 +83,7 @@ def parse_config_template_old_format(config_content: str) -> set[tuple[str, str, } -@functools.lru_cache() +@functools.lru_cache def fetch_config_options_for_version(version_str: str) -> set[tuple[str, str]]: r = requests.get( f"https://raw.githubusercontent.com/apache/airflow/{version_str}/airflow/config_templates/config.yml" diff --git a/docker_tests/docker_tests_utils.py b/docker_tests/docker_tests_utils.py index 01cfb1dc603da..8d95805f6459c 100644 --- a/docker_tests/docker_tests_utils.py +++ b/docker_tests/docker_tests_utils.py @@ -20,7 +20,7 @@ from docker_tests.command_utils import run_command -DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.7" +DEFAULT_PYTHON_MAJOR_MINOR_VERSION = "3.8" docker_image = os.environ.get( "DOCKER_IMAGE", f"ghcr.io/apache/airflow/main/prod/python{DEFAULT_PYTHON_MAJOR_MINOR_VERSION}:latest" @@ -87,11 +87,11 @@ def display_dependency_conflict_message(): CI image: - breeze ci-image build --upgrade-to-newer-dependencies --python 3.7 + breeze ci-image build --upgrade-to-newer-dependencies --python 3.8 Production image: - breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.7 + breeze ci-image build --production-image --upgrade-to-newer-dependencies --python 3.8 * You will see error messages there telling which requirements are conflicting and which packages caused the conflict. Add the limitation that caused the conflict to EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS diff --git a/docs/apache-airflow-providers/installing-from-pypi.rst b/docs/apache-airflow-providers/installing-from-pypi.rst index 7d3605915580a..f053138705960 100644 --- a/docs/apache-airflow-providers/installing-from-pypi.rst +++ b/docs/apache-airflow-providers/installing-from-pypi.rst @@ -45,6 +45,6 @@ Typical command to install airflow from PyPI looks like below (you need to use t .. code-block:: - pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.3.0/constraints-3.7.txt" + pip install "apache-airflow-providers-celery" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.3.0/constraints-3.8.txt" This is an example, see :doc:`apache-airflow:installation/installing-from-pypi` for more examples, including how to upgrade the providers. diff --git a/docs/apache-airflow/administration-and-deployment/modules_management.rst b/docs/apache-airflow/administration-and-deployment/modules_management.rst index e3cf953e46043..ec1a9b11e4195 100644 --- a/docs/apache-airflow/administration-and-deployment/modules_management.rst +++ b/docs/apache-airflow/administration-and-deployment/modules_management.rst @@ -56,10 +56,10 @@ by running an interactive terminal as in the example below: >>> from pprint import pprint >>> pprint(sys.path) ['', - '/home/arch/.pyenv/versions/3.7.4/lib/python37.zip', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7/lib-dynload', - '/home/arch/venvs/airflow/lib/python3.7/site-packages'] + '/home/arch/.pyenv/versions/3.8.4/lib/python37.zip', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8/lib-dynload', + '/home/arch/venvs/airflow/lib/python3.8/site-packages'] ``sys.path`` is initialized during program startup. The first precedence is given to the current directory, i.e, ``path[0]`` is the directory containing @@ -290,10 +290,10 @@ The ``sys.path`` variable will look like below: >>> pprint(sys.path) ['', '/home/arch/projects/airflow_operators' - '/home/arch/.pyenv/versions/3.7.4/lib/python37.zip', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7', - '/home/arch/.pyenv/versions/3.7.4/lib/python3.7/lib-dynload', - '/home/arch/venvs/airflow/lib/python3.7/site-packages'] + '/home/arch/.pyenv/versions/3.8.4/lib/python37.zip', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8', + '/home/arch/.pyenv/versions/3.8.4/lib/python3.8/lib-dynload', + '/home/arch/venvs/airflow/lib/python3.8/site-packages'] As we can see that our provided directory is now added to the path, let's try to import the package now: diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst index 4477afdc1ae59..a4e95cd78ffac 100644 --- a/docs/apache-airflow/extra-packages-ref.rst +++ b/docs/apache-airflow/extra-packages-ref.rst @@ -100,7 +100,7 @@ with a consistent set of dependencies based on constraint files provided by Airf :substitutions: pip install apache-airflow[google,amazon,apache.spark]==|version| \ - --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt" + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt" Note, that this will install providers in the versions that were released at the time of Airflow |version| release. You can later upgrade those providers manually if you want to use latest versions of the providers. diff --git a/docs/apache-airflow/installation/installing-from-pypi.rst b/docs/apache-airflow/installation/installing-from-pypi.rst index 0ae5991f10e81..193bea6900cb6 100644 --- a/docs/apache-airflow/installation/installing-from-pypi.rst +++ b/docs/apache-airflow/installation/installing-from-pypi.rst @@ -44,7 +44,7 @@ Typical command to install airflow from PyPI looks like below: .. code-block:: - pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt" + pip install "apache-airflow[celery]==|version|" --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt" This is an example, see further for more explanation. @@ -77,7 +77,7 @@ You can create the URL to the file substituting the variables in the template be where: - ``AIRFLOW_VERSION`` - Airflow version (e.g. :subst-code:`|version|`) or ``main``, ``2-0``, for latest development version -- ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.7`` +- ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.9`` There is also a ``constraints-no-providers`` constraint file, which contains just constraints required to install Airflow core. This allows to install and upgrade airflow separately and independently from providers. @@ -93,7 +93,7 @@ constraints always points to the "latest" released Airflow version constraints: .. code-block:: - https://raw.githubusercontent.com/apache/airflow/constraints-latest/constraints-3.7.txt + https://raw.githubusercontent.com/apache/airflow/constraints-latest/constraints-3.8.txt Fixing Constraint files at release time @@ -208,9 +208,9 @@ If you don't want to install any extra providers, initially you can use the comm AIRFLOW_VERSION=|version| PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" - # For example: 3.7 + # For example: 3.8 CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt" - # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.7.txt + # For example: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-no-providers-3.8.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" @@ -235,7 +235,7 @@ Symbol not found: ``_Py_GetArgcArgv`` ===================================== If you see ``Symbol not found: _Py_GetArgcArgv`` while starting or importing Airflow, this may mean that you are using an incompatible version of Python. -For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.7``: ``/usr/local/opt/python@3.7/Frameworks/Python.framework/Versions/3.7``). +For a homebrew installed version of Python, this is generally caused by using Python in ``/usr/local/opt/bin`` rather than the Frameworks installation (e.g. for ``python 3.8``: ``/usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8``). The crux of the issue is that a library Airflow depends on, ``setproctitle``, uses a non-public Python API which is not available from the standard installation ``/usr/local/opt/`` (which symlinks to a path under ``/usr/local/Cellar``). @@ -244,9 +244,9 @@ An easy fix is just to ensure you use a version of Python that has a dylib of th .. code-block:: bash - # Note: these instructions are for python3.7 but can be loosely modified for other versions - brew install python@3.7 - virtualenv -p /usr/local/opt/python@3.7/Frameworks/Python.framework/Versions/3.7/bin/python3 .toy-venv + # Note: these instructions are for python3.8 but can be loosely modified for other versions + brew install python@3.8 + virtualenv -p /usr/local/opt/python@3.8/Frameworks/Python.framework/Versions/3.8/bin/python3 .toy-venv source .toy-venv/bin/activate pip install apache-airflow python diff --git a/docs/apache-airflow/installation/prerequisites.rst b/docs/apache-airflow/installation/prerequisites.rst index 3df5bb7f8ea6b..573bbb8164360 100644 --- a/docs/apache-airflow/installation/prerequisites.rst +++ b/docs/apache-airflow/installation/prerequisites.rst @@ -20,7 +20,7 @@ Prerequisites Airflow is tested with: -* Python: 3.7, 3.8, 3.9, 3.10, 3.11 +* Python: 3.8, 3.9, 3.10, 3.11 * Databases: diff --git a/docs/apache-airflow/installation/supported-versions.rst b/docs/apache-airflow/installation/supported-versions.rst index 00607489a4158..106745bf818e1 100644 --- a/docs/apache-airflow/installation/supported-versions.rst +++ b/docs/apache-airflow/installation/supported-versions.rst @@ -61,8 +61,8 @@ They are based on the official release schedule of Python and Kubernetes, nicely 2. The "oldest" supported version of Python/Kubernetes is the default one. "Default" is only meaningful in terms of "smoke tests" in CI PRs which are run using this default version and default reference image available in DockerHub. Currently the ``apache/airflow:latest`` and ``apache/airflow:2.5.2`` images - are Python 3.7 images, however, in the first MINOR/MAJOR release of Airflow released after 27.06.2023, they will - become Python 3.8 images. + are Python 3.8 images, however, in the first MINOR/MAJOR release of Airflow released after 14.19.2023, + they will become Python 3.9 images. 3. We support a new version of Python/Kubernetes in main after they are officially released, as soon as we make them work in our CI pipeline (which might not be immediate due to dependencies catching up with diff --git a/docs/apache-airflow/start.rst b/docs/apache-airflow/start.rst index 2ea8c6d4b9c48..9a21bd113bcf1 100644 --- a/docs/apache-airflow/start.rst +++ b/docs/apache-airflow/start.rst @@ -24,8 +24,8 @@ This quick start guide will help you bootstrap an Airflow standalone instance on .. note:: - Successful installation requires a Python 3 environment. Starting with Airflow 2.3.0, Airflow is tested with Python 3.7, 3.8, 3.9, 3.10. Note that - Python 3.11 is not yet supported. + Successful installation requires a Python 3 environment. Starting with Airflow 2.3.0, Airflow is tested with Python 3.8, 3.9, 3.10. + Note that Python 3.11 is not yet supported. Only ``pip`` installation is currently officially supported. @@ -65,7 +65,7 @@ constraint files to enable reproducible installation, so using ``pip`` and const PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - # For example this would install |version| with python 3.7: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.7.txt + # For example this would install |version| with python 3.8: https://raw.githubusercontent.com/apache/airflow/constraints-|version|/constraints-3.8.txt pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" diff --git a/docs/docker-stack/README.md b/docs/docker-stack/README.md index 98776d4b29d48..ce877622cbfe9 100644 --- a/docs/docker-stack/README.md +++ b/docs/docker-stack/README.md @@ -33,9 +33,9 @@ for all the supported Python versions. You can find the following images there (Assuming Airflow version `2.7.0.dev0`): -* `apache/airflow:latest` - the latest released Airflow image with default Python version (3.7 currently) +* `apache/airflow:latest` - the latest released Airflow image with default Python version (3.8 currently) * `apache/airflow:latest-pythonX.Y` - the latest released Airflow image with specific Python version -* `apache/airflow:2.7.0.dev0` - the versioned Airflow image with default Python version (3.7 currently) +* `apache/airflow:2.7.0.dev0` - the versioned Airflow image with default Python version (3.8 currently) * `apache/airflow:2.7.0.dev0-pythonX.Y` - the versioned Airflow image with specific Python version Those are "reference" regular images. They contain the most common set of extras, dependencies and providers that are @@ -45,9 +45,9 @@ You can also use "slim" images that contain only core airflow and are about half but you need to add all the [Reference for package extras](https://airflow.apache.org/docs/apache-airflow/stable/extra-packages-ref.html) and providers that you need separately via [Building the image](https://airflow.apache.org/docs/docker-stack/build.html#build-build-image). -* `apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.7 currently) +* `apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.8 currently) * `apache/airflow:slim-latest-pythonX.Y` - the latest released Airflow image with specific Python version -* `apache/airflow:slim-2.7.0.dev0` - the versioned Airflow image with default Python version (3.7 currently) +* `apache/airflow:slim-2.7.0.dev0` - the versioned Airflow image with default Python version (3.8 currently) * `apache/airflow:slim-2.7.0.dev0-pythonX.Y` - the versioned Airflow image with specific Python version The Apache Airflow image provided as convenience package is optimized for size, and diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst index 8d5c8224dea5d..6da9cdf145e2e 100644 --- a/docs/docker-stack/build-arg-ref.rst +++ b/docs/docker-stack/build-arg-ref.rst @@ -30,7 +30,7 @@ Those are the most common arguments that you use when you want to build a custom +------------------------------------------+------------------------------------------+---------------------------------------------+ | Build argument | Default value | Description | +==========================================+==========================================+=============================================+ -| ``PYTHON_BASE_IMAGE`` | ``python:3.7-slim-bullseye`` | Base python image. | +| ``PYTHON_BASE_IMAGE`` | ``python:3.8-slim-bullseye`` | Base python image. | +------------------------------------------+------------------------------------------+---------------------------------------------+ | ``AIRFLOW_VERSION`` | :subst-code:`|airflow-version|` | version of Airflow. | +------------------------------------------+------------------------------------------+---------------------------------------------+ diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst index 4193271bbcc1f..f6f14ead56669 100644 --- a/docs/docker-stack/build.rst +++ b/docs/docker-stack/build.rst @@ -291,13 +291,13 @@ Naming conventions for the images: +----------------+-----------------------+---------------------------------+--------------------------------------+ | Image | Python | Standard image | Slim image | +================+=======================+=================================+======================================+ -| Latest default | 3.7 | apache/airflow:latest | apache/airflow:slim-latest | +| Latest default | 3.8 | apache/airflow:latest | apache/airflow:slim-latest | +----------------+-----------------------+---------------------------------+--------------------------------------+ -| Default | 3.7 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | +| Default | 3.8 | apache/airflow:X.Y.Z | apache/airflow:slim-X.Y.Z | +----------------+-----------------------+---------------------------------+--------------------------------------+ -| Latest | 3.7,3.8,3.9,3.10,3.11 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | +| Latest | 3.8,3.9,3.10,3.11 | apache/airflow:latest-pythonN.M | apache/airflow:slim-latest-pythonN.M | +----------------+-----------------------+---------------------------------+--------------------------------------+ -| Specific | 3.7,3.8,3.9,3.10,3.11 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | +| Specific | 3.8,3.9,3.10,3.11 | apache/airflow:X.Y.Z-pythonN.M | apache/airflow:slim-X.Y.Z-pythonN.M | +----------------+-----------------------+---------------------------------+--------------------------------------+ * The "latest" image is always the latest released stable version available. @@ -668,7 +668,7 @@ Building from PyPI packages This is the basic way of building the custom images from sources. -The following example builds the production image in version ``3.7`` with latest PyPI-released Airflow, +The following example builds the production image in version ``3.8`` with latest PyPI-released Airflow, with default set of Airflow extras and dependencies. The latest PyPI-released Airflow constraints are used automatically. .. exampleinclude:: docker-examples/customizing/stable-airflow.sh @@ -676,7 +676,7 @@ with default set of Airflow extras and dependencies. The latest PyPI-released Ai :start-after: [START build] :end-before: [END build] -The following example builds the production image in version ``3.7`` with default extras from ``2.3.0`` Airflow +The following example builds the production image in version ``3.8`` with default extras from ``2.3.0`` Airflow package. The ``2.3.0`` constraints are used automatically. .. exampleinclude:: docker-examples/customizing/pypi-selected-version.sh @@ -711,7 +711,7 @@ have more complex dependencies to build. Building optimized images ......................... -The following example the production image in version ``3.7`` with additional airflow extras from ``2.0.2`` +The following example the production image in version ``3.8`` with additional airflow extras from ``2.0.2`` PyPI package but it includes additional apt dev and runtime dependencies. The dev dependencies are those that require ``build-essential`` and usually need to involve recompiling @@ -739,7 +739,7 @@ a branch or tag in your repository and use the tag or branch in the URL that you In case of GitHub builds you need to pass the constraints reference manually in case you want to use specific constraints, otherwise the default ``constraints-main`` is used. -The following example builds the production image in version ``3.7`` with default extras from the latest main version and +The following example builds the production image in version ``3.8`` with default extras from the latest main version and constraints are taken from latest version of the constraints-main branch in GitHub. .. exampleinclude:: docker-examples/customizing/github-main.sh diff --git a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh index 5e264a758f36e..29e8762cf1c24 100755 --- a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh +++ b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh @@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \ --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \ diff --git a/docs/docker-stack/docker-examples/customizing/custom-sources.sh b/docs/docker-stack/docker-examples/customizing/custom-sources.sh index 50f06545323b4..7299dce829fb3 100755 --- a/docs/docker-stack/docker-examples/customizing/custom-sources.sh +++ b/docs/docker-stack/docker-examples/customizing/custom-sources.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . -f Dockerfile \ --pull \ --platform 'linux/amd64' \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \ --build-arg ADDITIONAL_PYTHON_DEPS=" \ diff --git a/docs/docker-stack/docker-examples/customizing/github-main.sh b/docs/docker-stack/docker-examples/customizing/github-main.sh index c78c91c35be2e..3a7e1ec073b7e 100755 --- a/docs/docker-stack/docker-examples/customizing/github-main.sh +++ b/docs/docker-stack/docker-examples/customizing/github-main.sh @@ -30,7 +30,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/main.tar.gz#egg=apache-airflow" \ --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \ --tag "my-github-main:0.0.1" diff --git a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh index f8823fa570d61..0323a5103166d 100755 --- a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh +++ b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh @@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ diff --git a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh index 0bb943599e30c..d0c62c11b8363 100755 --- a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh +++ b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh @@ -30,7 +30,7 @@ export AIRFLOW_VERSION=2.3.4 export DOCKER_BUILDKIT=1 docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --tag "my-pypi-selected-version:0.0.1" # [END build] diff --git a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh index f811a268b6e47..012841bffed50 100755 --- a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh +++ b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh @@ -26,11 +26,11 @@ cp "${AIRFLOW_SOURCES}/Dockerfile" "${TEMP_DOCKER_DIR}" # [START download] mkdir -p docker-context-files -export AIRFLOW_VERSION="2.2.4" +export AIRFLOW_VERSION="2.5.3" rm docker-context-files/*.whl docker-context-files/*.tar.gz docker-context-files/*.txt || true -curl -Lo "docker-context-files/constraints-3.7.txt" \ - "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.7.txt" +curl -Lo "docker-context-files/constraints-3.8.txt" \ + "https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-3.8.txt" echo echo "Make sure you use the right python version here (should be same as in constraints)!" @@ -38,7 +38,7 @@ echo python --version pip download --dest docker-context-files \ - --constraint docker-context-files/constraints-3.7.txt \ + --constraint docker-context-files/constraints-3.8.txt \ "apache-airflow[async,celery,elasticsearch,kubernetes,postgres,redis,ssh,statsd,virtualenv]==${AIRFLOW_VERSION}" # [END download] @@ -47,7 +47,7 @@ export DOCKER_BUILDKIT=1 docker build . \ --pull \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bullseye" \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg INSTALL_MYSQL_CLIENT="false" \ @@ -56,7 +56,7 @@ docker build . \ --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \ --build-arg DOCKER_CONTEXT_FILES="docker-context-files" \ --build-arg INSTALL_PACKAGES_FROM_CONTEXT="true" \ - --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.7.txt" \ + --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.8.txt" \ --tag airflow-my-restricted-environment:0.0.1 # [END build] diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst index 2b9aa64b1bb2f..8b741da1b7fc4 100644 --- a/docs/docker-stack/entrypoint.rst +++ b/docs/docker-stack/entrypoint.rst @@ -132,7 +132,7 @@ if you specify extra arguments. For example: .. code-block:: bash - docker run -it apache/airflow:2.7.0.dev0-python3.7 bash -c "ls -la" + docker run -it apache/airflow:2.7.0.dev0-python3.8 bash -c "ls -la" total 16 drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. @@ -144,7 +144,7 @@ you pass extra parameters. For example: .. code-block:: bash - > docker run -it apache/airflow:2.7.0.dev0-python3.7 python -c "print('test')" + > docker run -it apache/airflow:2.7.0.dev0-python3.8 python -c "print('test')" test If first argument equals to "airflow" - the rest of the arguments is treated as an airflow command @@ -152,13 +152,13 @@ to execute. Example: .. code-block:: bash - docker run -it apache/airflow:2.7.0.dev0-python3.7 airflow webserver + docker run -it apache/airflow:2.7.0.dev0-python3.8 airflow webserver If there are any other arguments - they are simply passed to the "airflow" command .. code-block:: bash - > docker run -it apache/airflow:2.7.0.dev0-python3.7 help + > docker run -it apache/airflow:2.7.0.dev0-python3.8 help usage: airflow [-h] GROUP_OR_COMMAND ... positional arguments: diff --git a/docs/docker-stack/index.rst b/docs/docker-stack/index.rst index e7cf2cce81534..67c914da9f00c 100644 --- a/docs/docker-stack/index.rst +++ b/docs/docker-stack/index.rst @@ -50,9 +50,9 @@ for all the supported Python versions. You can find the following images there (Assuming Airflow version :subst-code:`|airflow-version|`): -* :subst-code:`apache/airflow:latest` - the latest released Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:latest` - the latest released Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:latest-pythonX.Y` - the latest released Airflow image with specific Python version -* :subst-code:`apache/airflow:|airflow-version|` - the versioned Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:|airflow-version|` - the versioned Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:|airflow-version|-pythonX.Y` - the versioned Airflow image with specific Python version Those are "reference" regular images. They contain the most common set of extras, dependencies and providers that are @@ -62,9 +62,9 @@ You can also use "slim" images that contain only core airflow and are about half but you need to add all the :doc:`apache-airflow:extra-packages-ref` and providers that you need separately via :ref:`Building the image `. -* :subst-code:`apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:slim-latest` - the latest released Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:slim-latest-pythonX.Y` - the latest released Airflow image with specific Python version -* :subst-code:`apache/airflow:slim-|airflow-version|` - the versioned Airflow image with default Python version (3.7 currently) +* :subst-code:`apache/airflow:slim-|airflow-version|` - the versioned Airflow image with default Python version (3.8 currently) * :subst-code:`apache/airflow:slim-|airflow-version|-pythonX.Y` - the versioned Airflow image with specific Python version The Apache Airflow image provided as convenience package is optimized for size, and diff --git a/images/breeze/output-commands-hash.txt b/images/breeze/output-commands-hash.txt index 79046e5eb0e78..d6961793049e7 100644 --- a/images/breeze/output-commands-hash.txt +++ b/images/breeze/output-commands-hash.txt @@ -1,7 +1,7 @@ # This file is automatically generated by pre-commit. If you have a conflict with this file # Please do not solve it but run `breeze setup regenerate-command-images`. # This command should fix the conflict and regenerate help images that you have conflict with. -main:5bd7ecf7e8fdd34879f24ad8ac390405 +main:b27f70a44bf5697c3852fa932120774d build-docs:3b89efaf5551b1782227cd382c019990 ci:fix-ownership:fee2c9ec9ef19686792002ae054fecdd ci:free-space:47234aa0a60b0efd84972e6e797379f8 @@ -9,56 +9,56 @@ ci:get-workflow-info:01ee34c33ad62fa5dc33e0ac8773223f ci:resource-check:1d4fe47dff9fc64ac1648ec4beb2d85c ci:selective-check:8a39978ee69d496dae2533d37a48b137 ci:2868dbcdd482663e9d6ccd00055b9cac -ci-image:build:4c7702285269907eaa7d48ab7fd242e3 -ci-image:pull:68fb55a81db9fc1615424c9bef438bd7 -ci-image:verify:3c2588eddc2bef7dd14c142a1f56a56f -ci-image:587a537fed951c401d2a3cd82ce1b3de +ci-image:build:0028d028f86dc407233cddb063440b55 +ci-image:pull:3b0789fe9ec4cf6131bbec69903334d2 +ci-image:verify:915a5d346ce6abca6d58443737df6199 +ci-image:8acb2b04df97f0171f9f4147e1478a34 cleanup:231de69d5f47ba29c883164e4575e310 compile-www-assets:c8a8c4f002f7246d0541897fc7c70313 down:99efb2055475cd40656d5498aee74ef6 exec:42bbd3c1659128b0341ae118c3482da2 -k8s:build-k8s-image:7ef9f8d1fa725b7d6dc7daad72d66825 -k8s:configure-cluster:1c64a309a58dea6baf61a5ed223f1ac5 -k8s:create-cluster:49b63399485b06ffe5424c737a1e8ab3 -k8s:delete-cluster:0741154a3988a3c8591ff9af409ddf10 -k8s:deploy-airflow:1e5038d63fe6b77f017fb47ab470a860 -k8s:k9s:e2ffb041b003faabfafe2aff3b48f023 -k8s:logs:89503ad44dc0b480146ca2978719f585 -k8s:run-complete-tests:6adc95e5f48cee11aebeac6bcd3c7e7f +k8s:build-k8s-image:a8fae58f9ec8cf0541d3f0b47cca5c5a +k8s:configure-cluster:b64fa4bd8a9e6b65f68334029e1abd3a +k8s:create-cluster:57ba532fca29ee88f09ef255ed6f15db +k8s:delete-cluster:cde553e781ead5abb38d23a7aa70415f +k8s:deploy-airflow:4c08c43a309657bed31da5f21391e3b3 +k8s:k9s:db8c9188399654cd26c6775fa455e157 +k8s:logs:3df4d8d354053bf12239f6063fef7d68 +k8s:run-complete-tests:89f5cbe325c40e41bde24fff22c5b072 k8s:setup-env:b0ea72ef1819f831b1f80e8bd4d299ce -k8s:shell:f9f4a460a0b94df5d28c7522a19f76b8 -k8s:status:f4a5a7c657825ce6ab76e7d9152d98f4 -k8s:tests:3ae800a20447049eafdc10db122a10bf -k8s:upload-k8s-image:aa691f32e5963fe7a0435ef2ca8b59ae -k8s:aa5348c18c987567bbd7800fc2aff378 -prod-image:build:4e0aac2327f8e068c0db70ecdd11dfa3 -prod-image:pull:722a843b29ca251aed1a5e001cf48db2 -prod-image:verify:4edd26d1126761777e934eae2b8d57dd -prod-image:9ff9f57f7b7ff65718e95f5468cdb615 +k8s:shell:bdbeeeeb98e57a044e22b87e64d840bf +k8s:status:24a9a337f69a68153dd7afa2b2e25018 +k8s:tests:29aeaf52b081a0bb9a2764c638a6dc42 +k8s:upload-k8s-image:815ef797f73838fa9c5daf4459f68571 +k8s:db5b8bdd821571df1284b5fb6ae744c8 +prod-image:build:a7f7e915189369309eeaaea62ce46625 +prod-image:pull:451622b1399f3b181ec2b8620b856e3f +prod-image:verify:2d952d5c1f3df371e31ddfda63616fa8 +prod-image:ae04fe3aa01789ddf767295c905add04 release-management:create-minor-branch:6a01066dce15e09fb269a8385626657c -release-management:generate-constraints:0c8e5c1b57b486724760d4afdfd571f7 +release-management:generate-constraints:876bbc97000b5146ed29d031ec2b2fa1 release-management:generate-issue-content-providers:421c1b186818a6251c16f7f3b7807292 release-management:install-provider-packages:5838b06b78e3c5c6e8380024867a1a8d release-management:prepare-airflow-package:3ac14ea6d2b09614959c0ec4fd564789 release-management:prepare-provider-documentation:13f5fa922825a922c3525b5a57b2a80c release-management:prepare-provider-packages:cf41c33c6d6121efef1f1d97333e8710 -release-management:release-prod-images:dff5d068a253dcc3ac1e65a6af150e3d +release-management:release-prod-images:9bf4731e91c435e9df17199c395a23fb release-management:start-rc-process:6aafbaceabd7b67b9a1af4c2f59abc4c release-management:start-release:acb384d86e02ff5fde1bf971897be17c release-management:verify-provider-packages:566c60fb1bfdc5ed7c4be590736891b2 -release-management:dbf5a361d4e573c4dcf730237b2351b0 +release-management:e30735d4b3903a168d8542fca0d92b6d setup:autocomplete:03343478bf1d0cf9c101d454cdb63b68 setup:check-all-params-in-groups:1a6a07dcef12a55bf42bcbcef0d86adc -setup:config:584023d1d225879a5d19e347fe9db1d3 +setup:config:2db1944d8fcb030d4db329a1ff28dd19 setup:regenerate-command-images:873006338375c95a3d683da214366191 setup:self-upgrade:d02f70c7a230eae3463ceec2056b63fa setup:version:123b462a421884dc2320ffc5e54b2478 -setup:df504f6ad27442c86f9afaa8f82c2eef -shell:e566b8314bc7fee8580afdbacb55cd3a -start-airflow:3bf24a94653fa9236a70a94a499b5b7c +setup:20e12e09d796ffb8c5b45daed12eac2f +shell:0b778f51e9bff94b4402b0967c9e4446 +start-airflow:ccf33a13ca55e0894d33a35cdc941a05 static-checks:9985d1db64592e29ab71b8a000ce302e -testing:docker-compose-tests:fb26a48e083b0a5c6eae2ebd052eaf72 +testing:docker-compose-tests:70167e67853cacd9ca784695d65a7846 testing:helm-tests:936cf28fd84ce4ff5113795fdae9624b -testing:integration-tests:372b0e421ebcefb1ea7a1008658134c3 -testing:tests:7ea8d4eaaa62e5ae32fe636b54d96903 -testing:fe3c5b6f3c634e29be1302580ffe686f +testing:integration-tests:35f0ac57157bf8fe227fd080cf216622 +testing:tests:e10175d901c49117cecaab2680cbc617 +testing:4f192de479d7958b441fce157b546e36 diff --git a/images/breeze/output-commands.svg b/images/breeze/output-commands.svg index c6642829144b5..382b9f150a324 100644 --- a/images/breeze/output-commands.svg +++ b/images/breeze/output-commands.svg @@ -1,4 +1,4 @@ - +