diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml
index da76fcbeffdaf..42dc2da0acc5d 100644
--- a/.github/workflows/build-images.yml
+++ b/.github/workflows/build-images.yml
@@ -180,6 +180,7 @@ jobs:
RUNS_ON: "${{ needs.build-info.outputs.runs-on }}"
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -258,6 +259,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
INCLUDE_NOT_READY_PROVIDERS: "true"
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 1cb19201ddb05..461be73498d82 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -282,6 +282,7 @@ jobs:
# Force more parallelism for build even on public images
PARALLELISM: 6
VERSION_SUFFIX_FOR_PYPI: "dev0"
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -1863,6 +1864,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -1898,6 +1900,58 @@ jobs:
PYTHON_VERSIONS: ${{needs.build-info.outputs.all-python-versions-list-as-string}}
DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }}
+ build-prod-images-pip:
+ strategy:
+ matrix:
+ python-version: ${{ fromJson(needs.build-info.outputs.python-versions) }}
+ timeout-minutes: 80
+ name: ${{needs.build-info.outputs.build-job-description}} PROD image pip (main) ${{matrix.python-version}}
+ runs-on: ["ubuntu-22.04"]
+ needs: [build-info, build-ci-images]
+ env:
+ DEFAULT_BRANCH: ${{ needs.build-info.outputs.default-branch }}
+ DEFAULT_CONSTRAINTS_BRANCH: ${{ needs.build-info.outputs.default-constraints-branch }}
+ RUNS_ON: "${{needs.build-info.outputs.runs-on}}"
+ BACKEND: sqlite
+ VERSION_SUFFIX_FOR_PYPI: "dev0"
+ DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "false"
+ steps:
+ - name: Cleanup repo
+ run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
+ if: >
+ needs.build-info.outputs.in-workflow-build == 'true' &&
+ needs.build-info.outputs.default-branch == 'main'
+ - uses: actions/checkout@v4
+ with:
+ ref: ${{ needs.build-info.outputs.targetCommitSha }}
+ persist-credentials: false
+ if: >
+ needs.build-info.outputs.in-workflow-build == 'true' &&
+ needs.build-info.outputs.default-branch == 'main'
+ - name: "Install Breeze"
+ uses: ./.github/actions/breeze
+ with:
+ python-version: ${{ env.REPRODUCIBLE_PYTHON_VERSION }}
+ if: >
+ needs.build-info.outputs.in-workflow-build == 'true' &&
+ needs.build-info.outputs.default-branch == 'main'
+ - name: Build PROD Image pip ${{ matrix.python-version }}:${{env.IMAGE_TAG}}
+ uses: ./.github/actions/build-prod-images
+ if: >
+ needs.build-info.outputs.in-workflow-build == 'true' &&
+ needs.build-info.outputs.default-branch == 'main'
+ with:
+ build-provider-packages: ${{ needs.build-info.outputs.default-branch == 'main' }}
+ chicken-egg-providers: ${{ needs.build-info.outputs.chicken-egg-providers }}
+ python-version: ${{ matrix.python-version }}
+ env:
+ UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgrade-to-newer-dependencies }}
+ DOCKER_CACHE: ${{ needs.build-info.outputs.cache-directive }}
+ PYTHON_VERSIONS: ${{needs.build-info.outputs.all-python-versions-list-as-string}}
+ DEBUG_RESOURCES: ${{ needs.build-info.outputs.debug-resources }}
+ IMAGE_TAG: "pip-${{ github.event.pull_request.head.sha || github.sha }}"
+
build-prod-images-bullseye:
strategy:
matrix:
@@ -1914,6 +1968,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -1970,6 +2025,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -2027,6 +2083,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -2078,6 +2135,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -2134,6 +2192,7 @@ jobs:
BACKEND: sqlite
VERSION_SUFFIX_FOR_PYPI: "dev0"
DEBUG_RESOURCES: ${{needs.build-info.outputs.debug-resources}}
+ USE_UV: "true"
steps:
- name: Cleanup repo
run: docker run -v "${GITHUB_WORKSPACE}:/workspace" -u 0:0 bash -c "rm -rf /workspace/*"
@@ -2530,6 +2589,7 @@ jobs:
RUNS_ON: "${{needs.build-info.outputs.runs-on}}"
# Force more parallelism for build even on small instances
PARALLELISM: 6
+ USE_UV: "true"
if: >
needs.build-info.outputs.in-workflow-build == 'true' &&
needs.build-info.outputs.canary-run != 'true'
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a5a8efe06aef9..bdef4440abbcd 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -452,7 +452,7 @@ repos:
name: Update extras in documentation
entry: ./scripts/ci/pre_commit/pre_commit_insert_extras.py
language: python
- files: ^setup\.py$|^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^airflow/providers/.*/provider\.yaml$
+ files: ^contributing-docs/12_airflow_dependencies_and_extras.rst$|^INSTALL$|^airflow/providers/.*/provider\.yaml$|^Dockerfile.*
pass_filenames: false
additional_dependencies: ['rich>=12.4.4', 'tomli']
- id: check-extras-order
diff --git a/Dockerfile b/Dockerfile
index 34d80b2e2cd8c..cc824b91b05b0 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -23,8 +23,9 @@
# airflow-build-image - there all airflow dependencies can be installed (and
# built - for those dependencies that require
# build essentials). Airflow is installed there with
-# --user switch so that all the dependencies are
-# installed to ${HOME}/.local
+# ${HOME}/.local virtualenv which is also considered
+# As --user folder by python when creating venv with
+# --system-site-packages
#
# main - this is the actual production image that is much
# smaller because it does not contain all the build
@@ -35,7 +36,7 @@
# much smaller.
#
# Use the same builder frontend version for everyone
-ARG AIRFLOW_EXTRAS="aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,ftp,google,google-auth,graphviz,grpc,hashicorp,http,ldap,microsoft-azure,mysql,odbc,openlineage,pandas,postgres,redis,sendgrid,sftp,slack,snowflake,ssh,statsd,virtualenv"
+ARG AIRFLOW_EXTRAS="aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,ftp,google,google-auth,graphviz,grpc,hashicorp,http,ldap,microsoft-azure,mysql,odbc,openlineage,pandas,postgres,redis,sendgrid,sftp,slack,snowflake,ssh,statsd,uv,virtualenv"
ARG ADDITIONAL_AIRFLOW_EXTRAS=""
ARG ADDITIONAL_PYTHON_DEPS=""
@@ -54,6 +55,16 @@ ARG AIRFLOW_USE_UV="false"
ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow"
ARG AIRFLOW_IMAGE_README_URL="https://raw.githubusercontent.com/apache/airflow/main/docs/docker-stack/README.md"
+# By default we install latest airflow from PyPI so we do not need to copy sources of Airflow
+# from the host - so we are using Dockerfile and copy it to /Dockerfile in target image
+# because this is the only file we know exists locally. This way you can build the image in PyPI with
+# **just** the Dockerfile and no need for any other files from Airflow repository.
+# However, in case of breeze/development use we use latest sources and we override those
+# SOURCES_FROM/TO with "." and "/opt/airflow" respectively - so that sources of Airflow (and all providers)
+# are used to build the PROD image used in tests.
+ARG AIRFLOW_SOURCES_FROM="Dockerfile"
+ARG AIRFLOW_SOURCES_TO="/Dockerfile"
+
# By default latest released version of airflow is installed (when empty) but this value can be overridden
# and we can install version according to specification (For example ==2.0.2 or <3.0.0).
ARG AIRFLOW_VERSION_SPECIFICATION=""
@@ -419,10 +430,7 @@ COPY <<"EOF" /install_packaging_tools.sh
common::get_colors
common::get_packaging_tool
-common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::show_packaging_tool_version_and_location
-
common::install_packaging_tools
EOF
@@ -457,23 +465,23 @@ function install_airflow_dependencies_from_branch_tip() {
set +x
common::install_packaging_tools
set -x
+ echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}"
# Uninstall airflow and providers to keep only the dependencies. In the future when
# planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this
# flag and skip the remove step.
- ${PACKAGING_TOOL_CMD} freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
+ pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true
set +x
echo
echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}"
echo
set +x
- ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow || true
+ ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow
set -x
}
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -513,10 +521,15 @@ function common::get_packaging_tool() {
echo
export PACKAGING_TOOL="uv"
export PACKAGING_TOOL_CMD="uv pip"
- export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
- export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
- export RESOLUTION_HIGHEST_FLAG="--resolution highest"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--resolution lowest-direct"
+ if [[ -z ${VIRTUAL_ENV=} ]]; then
+ export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
+ export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
+ else
+ export EXTRA_INSTALL_FLAGS=""
+ export EXTRA_UNINSTALL_FLAGS=""
+ fi
+ export UPGRADE_EAGERLY="--upgrade --resolution highest"
+ export UPGRADE_IF_NEEDED="--upgrade --resolution lowest-direct"
else
echo
echo "${COLOR_BLUE}Using 'pip' to install Airflow${COLOR_RESET}"
@@ -525,8 +538,8 @@ function common::get_packaging_tool() {
export PACKAGING_TOOL_CMD="pip"
export EXTRA_INSTALL_FLAGS="--root-user-action ignore"
export EXTRA_UNINSTALL_FLAGS="--yes"
- export RESOLUTION_HIGHEST_FLAG="--upgrade-strategy eager"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--upgrade --upgrade-strategy only-if-needed"
+ export UPGRADE_EAGERLY="--upgrade --upgrade-strategy eager"
+ export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed"
fi
}
@@ -538,14 +551,6 @@ function common::get_airflow_version_specification() {
fi
}
-function common::override_pip_version_if_needed() {
- if [[ -n ${AIRFLOW_VERSION} ]]; then
- if [[ ${AIRFLOW_VERSION} =~ ^2\.0.* || ${AIRFLOW_VERSION} =~ ^1\.* ]]; then
- export AIRFLOW_PIP_VERSION=24.0
- fi
- fi
-}
-
function common::get_constraints_location() {
if [[ -f "${HOME}/constraints.txt" ]]; then
# constraints are already downloaded, do not calculate/override again
@@ -592,6 +597,15 @@ function common::show_packaging_tool_version_and_location() {
}
function common::install_packaging_tools() {
+ if [[ "${VIRTUAL_ENV=}" != "" ]]; then
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools in venv: ${VIRTUAL_ENV}${COLOR_RESET}"
+ echo
+ else
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools for system Python installation: $(which python)${COLOR_RESET}"
+ echo
+ fi
if [[ ! ${AIRFLOW_PIP_VERSION} =~ [0-9.]* ]]; then
echo
echo "${COLOR_BLUE}Installing pip version from spec ${AIRFLOW_PIP_VERSION}${COLOR_RESET}"
@@ -684,6 +698,7 @@ COPY <<"EOF" /install_from_docker_context_files.sh
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
+
function install_airflow_and_providers_from_docker_context_files(){
if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then
AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}
@@ -699,46 +714,36 @@ function install_airflow_and_providers_from_docker_context_files(){
exit 1
fi
- # shellcheck disable=SC2206
- local packaging_flags=(
- # Don't quote this -- if it is empty we don't want it to create an
- # empty array element
- --find-links="file:///docker-context-files"
- )
+ # This is needed to get package names for local context packages
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} --constraint ${HOME}/constraints.txt packaging
- # Find Apache Airflow packages in docker-context files
- local reinstalling_apache_airflow_package
- reinstalling_apache_airflow_package=$(ls \
- /docker-context-files/apache?airflow?[0-9]*.{whl,tar.gz} 2>/dev/null || true)
- # Add extras when installing airflow
- if [[ -n "${reinstalling_apache_airflow_package}" ]]; then
- # When a provider depends on a dev version of Airflow, we need to
- # specify `apache-airflow==$VER`, otherwise pip will look for it on
- # pip, and fail to find it
-
- # This will work as long as the wheel file is correctly named, which it
- # will be if it was build by wheel tooling
- local ver
- ver=$(basename "$reinstalling_apache_airflow_package" | cut -d "-" -f 2)
- reinstalling_apache_airflow_package="apache-airflow[${AIRFLOW_EXTRAS}]==$ver"
+ if [[ -n ${AIRFLOW_EXTRAS=} ]]; then
+ AIRFLOW_EXTRAS_TO_INSTALL="[${AIRFLOW_EXTRAS}]"
+ else
+ AIRFLOW_EXTRAS_TO_INSTALL=""
fi
- if [[ -z "${reinstalling_apache_airflow_package}" && ${AIRFLOW_VERSION=} != "" ]]; then
+ # Find Apache Airflow package in docker-context files
+ readarray -t install_airflow_package < <(EXTRAS="${AIRFLOW_EXTRAS_TO_INSTALL}" \
+ python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?[0-9]*.{whl,tar.gz} 2>/dev/null || true)
+ echo
+ echo "${COLOR_BLUE}Found airflow packages in docker-context-files folder: ${install_airflow_package[*]}${COLOR_RESET}"
+ echo
+
+ if [[ -z "${install_airflow_package[*]}" && ${AIRFLOW_VERSION=} != "" ]]; then
# When we install only provider packages from docker-context files, we need to still
# install airflow from PyPI when AIRFLOW_VERSION is set. This handles the case where
# pre-release dockerhub image of airflow is built, but we want to install some providers from
# docker-context files
- reinstalling_apache_airflow_package="apache-airflow[${AIRFLOW_EXTRAS}]==${AIRFLOW_VERSION}"
- fi
- # Find Apache Airflow packages in docker-context files
- local reinstalling_apache_airflow_providers_packages
- reinstalling_apache_airflow_providers_packages=$(ls \
- /docker-context-files/apache?airflow?providers*.{whl,tar.gz} 2>/dev/null || true)
- if [[ -z "${reinstalling_apache_airflow_package}" && \
- -z "${reinstalling_apache_airflow_providers_packages}" ]]; then
- return
+ install_airflow_package=("apache-airflow[${AIRFLOW_EXTRAS}]==${AIRFLOW_VERSION}")
fi
+ # Find Provider packages in docker-context files
+ readarray -t installing_providers_packages< <(python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?providers*.{whl,tar.gz} 2>/dev/null || true)
+ echo
+ echo "${COLOR_BLUE}Found provider packages in docker-context-files folder: ${installing_providers_packages[*]}${COLOR_RESET}"
+ echo
+
if [[ ${USE_CONSTRAINTS_FOR_CONTEXT_PACKAGES=} == "true" ]]; then
local python_version
python_version=$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')
@@ -750,19 +755,19 @@ function install_airflow_and_providers_from_docker_context_files(){
echo
# force reinstall all airflow + provider packages with constraints found in
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" --upgrade \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade \
${ADDITIONAL_PIP_INSTALL_FLAGS} --constraint "${local_constraints_file}" \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
else
echo
echo "${COLOR_BLUE}Installing docker-context-files packages with constraints from GitHub${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
--constraint "${HOME}/constraints.txt" \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
fi
else
@@ -770,9 +775,9 @@ function install_airflow_and_providers_from_docker_context_files(){
echo "${COLOR_BLUE}Installing docker-context-files packages without constraints${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
fi
common::install_packaging_tools
@@ -799,7 +804,6 @@ function install_all_other_packages_from_docker_context_files() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -808,6 +812,41 @@ install_airflow_and_providers_from_docker_context_files
install_all_other_packages_from_docker_context_files
EOF
+# The content below is automatically copied from scripts/docker/get_package_specs.py
+COPY <<"EOF" /get_package_specs.py
+#!/usr/bin/env python
+from __future__ import annotations
+
+import os
+import sys
+from pathlib import Path
+
+from packaging.utils import (
+ InvalidSdistFilename,
+ InvalidWheelFilename,
+ parse_sdist_filename,
+ parse_wheel_filename,
+)
+
+
+def print_package_specs(extras: str = "") -> None:
+ for package_path in sys.argv[1:]:
+ try:
+ package, _, _, _ = parse_wheel_filename(Path(package_path).name)
+ except InvalidWheelFilename:
+ try:
+ package, _ = parse_sdist_filename(Path(package_path).name)
+ except InvalidSdistFilename:
+ print(f"Could not parse package name from {package_path}", file=sys.stderr)
+ continue
+ print(f"{package}{extras} @ file://{package_path}")
+
+
+if __name__ == "__main__":
+ print_package_specs(extras=os.environ.get("EXTRAS", ""))
+EOF
+
+
# The content below is automatically copied from scripts/docker/install_airflow.sh
COPY <<"EOF" /install_airflow.sh
#!/usr/bin/env bash
@@ -815,12 +854,21 @@ COPY <<"EOF" /install_airflow.sh
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
function install_airflow() {
- # Coherence check for editable installation mode.
- if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then
+ # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method
+ local installation_command_flags
+ if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then
+ # When installing from sources - we always use `--editable` mode
+ installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}] @ ${AIRFLOW_VERSION_SPECIFICATION/apache-airflow @//}"
+ else
+ echo
+ echo "${COLOR_RED}The '${INSTALLATION_METHOD}' installation method is not supported${COLOR_RESET}"
+ echo
+ echo "${COLOR_YELLOW}Supported methods are ('.', 'apache-airflow', 'apache-airflow @ URL')${COLOR_RESET}"
echo
- echo "${COLOR_RED}ERROR! You can only use --editable flag when installing airflow from sources!${COLOR_RESET}"
- echo "${COLOR_RED} Current installation method is '${AIRFLOW_INSTALLATION_METHOD} and should be '.'${COLOR_RESET}"
exit 1
fi
# Remove mysql from extras if client is not going to be installed
@@ -841,14 +889,10 @@ function install_airflow() {
${PACKAGING_TOOL_CMD} freeze | grep apache-airflow | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
set +x
echo
- echo "${COLOR_BLUE}Installing all packages with eager upgrade with ${AIRFLOW_INSTALL_EDITABLE_FLAG} mode${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages in eager upgrade mode. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
common::install_packaging_tools
echo
@@ -857,14 +901,11 @@ function install_airflow() {
pip check
else
echo
- echo "${COLOR_BLUE}Installing all packages with constraints${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages with constraints. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
# Install all packages with constraints
- if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- --constraint "${HOME}/constraints.txt"; then
+ if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} --constraint "${HOME}/constraints.txt"; then
set +x
echo
echo "${COLOR_YELLOW}Likely pyproject.toml has new dependencies conflicting with constraints.${COLOR_RESET}"
@@ -872,10 +913,7 @@ function install_airflow() {
echo "${COLOR_BLUE}Falling back to no-constraints, lowest-direct resolution installation.${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_LOWEST_DIRECT_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags}
fi
set +x
common::install_packaging_tools
@@ -890,7 +928,6 @@ function install_airflow() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -913,7 +950,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies while upgrading to newer dependencies${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
@@ -927,7 +964,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies upgrading only if needed${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "${RESOLUTION_LOWEST_DIRECT_FLAG}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS}
set +x
@@ -942,13 +979,53 @@ function install_additional_dependencies() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
install_additional_dependencies
EOF
+# The content below is automatically copied from scripts/docker/create_prod_venv.sh
+COPY <<"EOF" /create_prod_venv.sh
+#!/usr/bin/env bash
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
+
+function create_prod_venv() {
+ echo
+ echo "${COLOR_BLUE}Removing ${HOME}/.local and re-creating it as virtual environment.${COLOR_RESET}"
+ rm -rf ~/.local
+ python -m venv ~/.local
+ echo "${COLOR_BLUE}The ${HOME}/.local virtualenv created.${COLOR_RESET}"
+}
+
+common::get_colors
+common::get_packaging_tool
+common::show_packaging_tool_version_and_location
+create_prod_venv
+common::install_packaging_tools
+EOF
+
+# The content below is automatically copied from scripts/docker/create_prod_venv.sh
+COPY <<"EOF" /create_prod_venv.sh
+#!/usr/bin/env bash
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
+
+function create_prod_venv() {
+ echo
+ echo "${COLOR_BLUE}Removing ${HOME}/.local and re-creating it as virtual environment.${COLOR_RESET}"
+ rm -rf ~/.local
+ python -m venv ~/.local
+ echo "${COLOR_BLUE}The ${HOME}/.local virtualenv created.${COLOR_RESET}"
+}
+
+common::get_colors
+common::get_packaging_tool
+common::show_packaging_tool_version_and_location
+create_prod_venv
+common::install_packaging_tools
+EOF
+
+
# The content below is automatically copied from scripts/docker/entrypoint_prod.sh
COPY <<"EOF" /entrypoint_prod.sh
@@ -1330,9 +1407,6 @@ ARG INSTALL_MYSQL_CLIENT="true"
ARG INSTALL_MYSQL_CLIENT_TYPE="mariadb"
ARG INSTALL_MSSQL_CLIENT="true"
ARG INSTALL_POSTGRES_CLIENT="true"
-ARG AIRFLOW_PIP_VERSION
-ARG AIRFLOW_UV_VERSION
-ARG AIRFLOW_USE_UV
ENV INSTALL_MYSQL_CLIENT=${INSTALL_MYSQL_CLIENT} \
INSTALL_MYSQL_CLIENT_TYPE=${INSTALL_MYSQL_CLIENT_TYPE} \
@@ -1353,9 +1427,6 @@ ENV PATH=${PATH}:/opt/mssql-tools/bin
# By default we do not install from docker context files but if we decide to install from docker context
# files, we should override those variables to "docker-context-files"
ARG DOCKER_CONTEXT_FILES="Dockerfile"
-
-COPY ${DOCKER_CONTEXT_FILES} /docker-context-files
-
ARG AIRFLOW_HOME
ARG AIRFLOW_USER_HOME_DIR
ARG AIRFLOW_UID
@@ -1364,6 +1435,8 @@ RUN adduser --gecos "First Last,RoomNumber,WorkPhone,HomePhone" --disabled-passw
--quiet "airflow" --uid "${AIRFLOW_UID}" --gid "0" --home "${AIRFLOW_USER_HOME_DIR}" && \
mkdir -p ${AIRFLOW_HOME} && chown -R "airflow:0" "${AIRFLOW_USER_HOME_DIR}" ${AIRFLOW_HOME}
+COPY --chown=${AIRFLOW_UID}:0 ${DOCKER_CONTEXT_FILES} /docker-context-files
+
USER airflow
ARG AIRFLOW_REPO=apache/airflow
@@ -1395,11 +1468,8 @@ ARG AIRFLOW_VERSION_SPECIFICATION
ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow"
# By default we do not upgrade to latest dependencies
ARG UPGRADE_TO_NEWER_DEPENDENCIES="false"
-# By default we install latest airflow from PyPI so we do not need to copy sources of Airflow
-# but in case of breeze/CI builds we use latest sources and we override those
-# those SOURCES_FROM/TO with "." and "/opt/airflow" respectively
-ARG AIRFLOW_SOURCES_FROM="Dockerfile"
-ARG AIRFLOW_SOURCES_TO="/Dockerfile"
+ARG AIRFLOW_SOURCES_FROM
+ARG AIRFLOW_SOURCES_TO
RUN if [[ -f /docker-context-files/pip.conf ]]; then \
@@ -1413,6 +1483,10 @@ RUN if [[ -f /docker-context-files/pip.conf ]]; then \
# Additional PIP flags passed to all pip install commands except reinstalling pip itself
ARG ADDITIONAL_PIP_INSTALL_FLAGS=""
+ARG AIRFLOW_PIP_VERSION
+ARG AIRFLOW_UV_VERSION
+ARG AIRFLOW_USE_UV
+
ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \
AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \
AIRFLOW_USE_UV=${AIRFLOW_USE_UV} \
@@ -1430,21 +1504,19 @@ ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \
AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE} \
AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} \
DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} \
- PATH=${PATH}:${AIRFLOW_USER_HOME_DIR}/.local/bin \
+ PATH=${AIRFLOW_USER_HOME_DIR}/.local/bin:${PATH} \
PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} \
ADDITIONAL_PIP_INSTALL_FLAGS=${ADDITIONAL_PIP_INSTALL_FLAGS} \
AIRFLOW_USER_HOME_DIR=${AIRFLOW_USER_HOME_DIR} \
AIRFLOW_HOME=${AIRFLOW_HOME} \
AIRFLOW_UID=${AIRFLOW_UID} \
- AIRFLOW_INSTALL_EDITABLE_FLAG="" \
- UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} \
- # By default PIP installs everything to ~/.local
- PIP_USER="true"
+ UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES}
+
# Copy all scripts required for installation - changing any of those should lead to
# rebuilding from here
COPY --from=scripts common.sh install_packaging_tools.sh \
- install_airflow_dependencies_from_branch_tip.sh /scripts/docker/
+ install_airflow_dependencies_from_branch_tip.sh create_prod_venv.sh /scripts/docker/
# We can set this value to true in case we want to install .whl/.tar.gz packages placed in the
# docker-context-files folder. This can be done for both additional packages you want to install
@@ -1463,13 +1535,19 @@ ARG USE_CONSTRAINTS_FOR_CONTEXT_PACKAGES="false"
ARG AIRFLOW_CI_BUILD_EPOCH="10"
ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH}
+
# In case of Production build image segment we want to pre-install main version of airflow
# dependencies from GitHub so that we do not have to always reinstall it from the scratch.
# The Airflow and providers are uninstalled, only dependencies remain
# the cache is only used when "upgrade to newer dependencies" is not set to automatically
# account for removed dependencies (we do not install them in the first place) and in case
# INSTALL_PACKAGES_FROM_CONTEXT is not set (because then caching it from main makes no sense).
+
+# By default PIP installs everything to ~/.local and it's also treated as VIRTUALENV
+ENV VIRTUAL_ENV="${AIRFLOW_USER_HOME_DIR}/.local"
+
RUN bash /scripts/docker/install_packaging_tools.sh; \
+ bash /scripts/docker/create_prod_venv.sh; \
if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \
${INSTALL_PACKAGES_FROM_CONTEXT} == "false" && \
${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \
@@ -1492,7 +1570,7 @@ ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} \
WORKDIR ${AIRFLOW_HOME}
COPY --from=scripts install_from_docker_context_files.sh install_airflow.sh \
- install_additional_dependencies.sh /scripts/docker/
+ install_additional_dependencies.sh create_prod_venv.sh get_package_specs.py /scripts/docker/
# Useful for creating a cache id based on the underlying architecture, preventing the use of cached python packages from
# an incorrect architecture.
@@ -1502,7 +1580,7 @@ ARG PIP_CACHE_EPOCH="9"
# hadolint ignore=SC2086, SC2010, DL3042
RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \
- if [[ ${INSTALL_PACKAGES_FROM_CONTEXT} == "true" ]]; then \
+ if [[ ${INSTALL_PACKAGES_FROM_CONTEXT} == "true" ]]; then \
bash /scripts/docker/install_from_docker_context_files.sh; \
fi; \
if ! airflow version 2>/dev/null >/dev/null; then \
@@ -1514,8 +1592,8 @@ RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$P
find "${AIRFLOW_USER_HOME_DIR}/.local/" -name '*.pyc' -print0 | xargs -0 rm -f || true ; \
find "${AIRFLOW_USER_HOME_DIR}/.local/" -type d -name '__pycache__' -print0 | xargs -0 rm -rf || true ; \
# make sure that all directories and files in .local are also group accessible
- find "${AIRFLOW_USER_HOME_DIR}/.local" -executable -print0 | xargs --null chmod g+x; \
- find "${AIRFLOW_USER_HOME_DIR}/.local" -print0 | xargs --null chmod g+rw
+ find "${AIRFLOW_USER_HOME_DIR}/.local" -executable ! -type l -print0 | xargs --null chmod g+x; \
+ find "${AIRFLOW_USER_HOME_DIR}/.local" ! -type l -print0 | xargs --null chmod g+rw
# In case there is a requirements.txt file in "docker-context-files" it will be installed
# during the build additionally to whatever has been installed so far. It is recommended that
@@ -1523,7 +1601,7 @@ RUN --mount=type=cache,id=$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$P
# hadolint ignore=DL3042
RUN --mount=type=cache,id=additional-requirements-$PYTHON_BASE_IMAGE-$AIRFLOW_PIP_VERSION-$TARGETARCH-$PIP_CACHE_EPOCH,target=/tmp/.cache/pip,uid=${AIRFLOW_UID} \
if [[ -f /docker-context-files/requirements.txt ]]; then \
- pip install --user -r /docker-context-files/requirements.txt; \
+ pip install -r /docker-context-files/requirements.txt; \
fi
##############################################################################################
@@ -1545,9 +1623,6 @@ LABEL org.apache.airflow.distro="debian" \
org.apache.airflow.uid="${AIRFLOW_UID}"
ARG PYTHON_BASE_IMAGE
-ARG AIRFLOW_PIP_VERSION
-ARG AIRFLOW_UV_VERSION
-ARG AIRFLOW_USE_UV
ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \
# Make sure noninteractive debian install is used and language variables set
@@ -1588,6 +1663,7 @@ ARG AIRFLOW_HOME
# By default PIP installs everything to ~/.local
ENV PATH="${AIRFLOW_USER_HOME_DIR}/.local/bin:${PATH}" \
+ VIRTUAL_ENV="${AIRFLOW_USER_HOME_DIR}/.local" \
AIRFLOW_UID=${AIRFLOW_UID} \
AIRFLOW_USER_HOME_DIR=${AIRFLOW_USER_HOME_DIR} \
AIRFLOW_HOME=${AIRFLOW_HOME}
@@ -1613,20 +1689,24 @@ RUN bash /scripts/docker/install_mysql.sh prod \
&& mkdir -pv "${AIRFLOW_HOME}/logs" \
&& chown -R airflow:0 "${AIRFLOW_USER_HOME_DIR}" "${AIRFLOW_HOME}" \
&& chmod -R g+rw "${AIRFLOW_USER_HOME_DIR}" "${AIRFLOW_HOME}" \
- && find "${AIRFLOW_HOME}" -executable -print0 | xargs --null chmod g+x \
- && find "${AIRFLOW_USER_HOME_DIR}" -executable -print0 | xargs --null chmod g+x
+ && find "${AIRFLOW_HOME}" -executable ! -type l -print0 | xargs --null chmod g+x \
+ && find "${AIRFLOW_USER_HOME_DIR}" -executable ! -type l -print0 | xargs --null chmod g+x
+
+ARG AIRFLOW_SOURCES_FROM
+ARG AIRFLOW_SOURCES_TO
COPY --from=airflow-build-image --chown=airflow:0 \
"${AIRFLOW_USER_HOME_DIR}/.local" "${AIRFLOW_USER_HOME_DIR}/.local"
+COPY --from=airflow-build-image --chown=airflow:0 \
+ "${AIRFLOW_USER_HOME_DIR}/constraints.txt" "${AIRFLOW_USER_HOME_DIR}/constraints.txt"
+# In case of editable build also copy airflow sources so that they are available in the main image
+# For regular image (non-editable) this will be just Dockerfile copied to /Dockerfile
+COPY --from=airflow-build-image --chown=airflow:0 "${AIRFLOW_SOURCES_TO}" "${AIRFLOW_SOURCES_TO}"
+
COPY --from=scripts entrypoint_prod.sh /entrypoint
COPY --from=scripts clean-logs.sh /clean-logs
COPY --from=scripts airflow-scheduler-autorestart.sh /airflow-scheduler-autorestart
-
-ARG AIRFLOW_PIP_VERSION
-ARG AIRFLOW_UV_VERSION
-ARG AIRFLOW_USE_UV
-
# Make /etc/passwd root-group-writeable so that user can be dynamically added by OpenShift
# See https://github.com/apache/airflow/issues/9248
# Set default groups for airflow and root user
@@ -1638,12 +1718,13 @@ RUN chmod a+rx /entrypoint /clean-logs \
# make sure that the venv is activated for all users
# including plain sudo, sudo with --interactive flag
-RUN sed --in-place=.bak "s/secure_path=\"/secure_path=\"\/.venv\/bin:/" /etc/sudoers
+RUN sed --in-place=.bak "s/secure_path=\"/secure_path=\"$(echo -n ${AIRFLOW_USER_HOME_DIR} | \
+ sed 's/\//\\\//g')\/.local\/bin:/" /etc/sudoers
ARG AIRFLOW_VERSION
-
-COPY --from=scripts install_packaging_tools.sh /scripts/docker/
-RUN bash /scripts/docker/install_packaging_tools.sh
+ARG AIRFLOW_PIP_VERSION
+ARG AIRFLOW_UV_VERSION
+ARG AIRFLOW_USE_UV
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
# to learn more about the way how signals are handled by the image
@@ -1652,7 +1733,6 @@ ENV DUMB_INIT_SETSID="1" \
PS1="(airflow)" \
AIRFLOW_VERSION=${AIRFLOW_VERSION} \
AIRFLOW__CORE__LOAD_EXAMPLES="false" \
- PIP_USER="true" \
PATH="/root/bin:${PATH}" \
AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \
AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \
@@ -1698,5 +1778,6 @@ LABEL org.apache.airflow.distro="debian" \
org.opencontainers.image.ref.name="airflow" \
org.opencontainers.image.title="Production Airflow Image" \
org.opencontainers.image.description="Reference, production-ready Apache Airflow image"
+
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"]
CMD []
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 233dd86efd335..cc6e3786dbedb 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -377,10 +377,7 @@ COPY <<"EOF" /install_packaging_tools.sh
common::get_colors
common::get_packaging_tool
-common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::show_packaging_tool_version_and_location
-
common::install_packaging_tools
EOF
@@ -415,23 +412,23 @@ function install_airflow_dependencies_from_branch_tip() {
set +x
common::install_packaging_tools
set -x
+ echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}"
# Uninstall airflow and providers to keep only the dependencies. In the future when
# planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this
# flag and skip the remove step.
- ${PACKAGING_TOOL_CMD} freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
+ pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true
set +x
echo
echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}"
echo
set +x
- ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow || true
+ ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow
set -x
}
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -471,10 +468,15 @@ function common::get_packaging_tool() {
echo
export PACKAGING_TOOL="uv"
export PACKAGING_TOOL_CMD="uv pip"
- export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
- export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
- export RESOLUTION_HIGHEST_FLAG="--resolution highest"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--resolution lowest-direct"
+ if [[ -z ${VIRTUAL_ENV=} ]]; then
+ export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
+ export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
+ else
+ export EXTRA_INSTALL_FLAGS=""
+ export EXTRA_UNINSTALL_FLAGS=""
+ fi
+ export UPGRADE_EAGERLY="--upgrade --resolution highest"
+ export UPGRADE_IF_NEEDED="--upgrade --resolution lowest-direct"
else
echo
echo "${COLOR_BLUE}Using 'pip' to install Airflow${COLOR_RESET}"
@@ -483,8 +485,8 @@ function common::get_packaging_tool() {
export PACKAGING_TOOL_CMD="pip"
export EXTRA_INSTALL_FLAGS="--root-user-action ignore"
export EXTRA_UNINSTALL_FLAGS="--yes"
- export RESOLUTION_HIGHEST_FLAG="--upgrade-strategy eager"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--upgrade --upgrade-strategy only-if-needed"
+ export UPGRADE_EAGERLY="--upgrade --upgrade-strategy eager"
+ export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed"
fi
}
@@ -496,14 +498,6 @@ function common::get_airflow_version_specification() {
fi
}
-function common::override_pip_version_if_needed() {
- if [[ -n ${AIRFLOW_VERSION} ]]; then
- if [[ ${AIRFLOW_VERSION} =~ ^2\.0.* || ${AIRFLOW_VERSION} =~ ^1\.* ]]; then
- export AIRFLOW_PIP_VERSION=24.0
- fi
- fi
-}
-
function common::get_constraints_location() {
if [[ -f "${HOME}/constraints.txt" ]]; then
# constraints are already downloaded, do not calculate/override again
@@ -550,6 +544,15 @@ function common::show_packaging_tool_version_and_location() {
}
function common::install_packaging_tools() {
+ if [[ "${VIRTUAL_ENV=}" != "" ]]; then
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools in venv: ${VIRTUAL_ENV}${COLOR_RESET}"
+ echo
+ else
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools for system Python installation: $(which python)${COLOR_RESET}"
+ echo
+ fi
if [[ ! ${AIRFLOW_PIP_VERSION} =~ [0-9.]* ]]; then
echo
echo "${COLOR_BLUE}Installing pip version from spec ${AIRFLOW_PIP_VERSION}${COLOR_RESET}"
@@ -655,12 +658,21 @@ COPY <<"EOF" /install_airflow.sh
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
function install_airflow() {
- # Coherence check for editable installation mode.
- if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then
+ # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method
+ local installation_command_flags
+ if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then
+ # When installing from sources - we always use `--editable` mode
+ installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}] @ ${AIRFLOW_VERSION_SPECIFICATION/apache-airflow @//}"
+ else
+ echo
+ echo "${COLOR_RED}The '${INSTALLATION_METHOD}' installation method is not supported${COLOR_RESET}"
+ echo
+ echo "${COLOR_YELLOW}Supported methods are ('.', 'apache-airflow', 'apache-airflow @ URL')${COLOR_RESET}"
echo
- echo "${COLOR_RED}ERROR! You can only use --editable flag when installing airflow from sources!${COLOR_RESET}"
- echo "${COLOR_RED} Current installation method is '${AIRFLOW_INSTALLATION_METHOD} and should be '.'${COLOR_RESET}"
exit 1
fi
# Remove mysql from extras if client is not going to be installed
@@ -681,14 +693,10 @@ function install_airflow() {
${PACKAGING_TOOL_CMD} freeze | grep apache-airflow | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
set +x
echo
- echo "${COLOR_BLUE}Installing all packages with eager upgrade with ${AIRFLOW_INSTALL_EDITABLE_FLAG} mode${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages in eager upgrade mode. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
common::install_packaging_tools
echo
@@ -697,14 +705,11 @@ function install_airflow() {
pip check
else
echo
- echo "${COLOR_BLUE}Installing all packages with constraints${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages with constraints. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
# Install all packages with constraints
- if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- --constraint "${HOME}/constraints.txt"; then
+ if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} --constraint "${HOME}/constraints.txt"; then
set +x
echo
echo "${COLOR_YELLOW}Likely pyproject.toml has new dependencies conflicting with constraints.${COLOR_RESET}"
@@ -712,10 +717,7 @@ function install_airflow() {
echo "${COLOR_BLUE}Falling back to no-constraints, lowest-direct resolution installation.${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_LOWEST_DIRECT_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags}
fi
set +x
common::install_packaging_tools
@@ -730,7 +732,6 @@ function install_airflow() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -753,7 +754,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies while upgrading to newer dependencies${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
@@ -767,7 +768,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies upgrading only if needed${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "${RESOLUTION_LOWEST_DIRECT_FLAG}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS}
set +x
@@ -782,7 +783,6 @@ function install_additional_dependencies() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
@@ -1243,18 +1243,11 @@ ENV AIRFLOW_REPO=${AIRFLOW_REPO}\
AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \
AIRFLOW_UV_VERSION=${AIRFLOW_UV_VERSION} \
AIRFLOW_USE_UV=${AIRFLOW_USE_UV} \
-# In the CI image we always:
-# * install MySQL, MsSQL
-# * install airflow from current sources, not from PyPI package
-# * install airflow without `--user` flag
-# * install airflow in editable mode
-# * install always current version of airflow
INSTALL_MYSQL_CLIENT="true" \
INSTALL_MYSQL_CLIENT_TYPE=${INSTALL_MYSQL_CLIENT_TYPE} \
INSTALL_MSSQL_CLIENT="true" \
INSTALL_POSTGRES_CLIENT="true" \
AIRFLOW_INSTALLATION_METHOD="." \
- AIRFLOW_INSTALL_EDITABLE_FLAG="--editable" \
AIRFLOW_VERSION_SPECIFICATION="" \
PIP_NO_CACHE_DIR=${PIP_NO_CACHE_DIR} \
PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} \
diff --git a/INSTALL b/INSTALL
index 0f1ba985901fc..a1e2034b69937 100644
--- a/INSTALL
+++ b/INSTALL
@@ -255,7 +255,7 @@ microsoft-mssql, microsoft-psrp, microsoft-winrm, mongo, mssql, mysql, neo4j, od
openfaas, openlineage, opensearch, opsgenie, oracle, otel, pagerduty, pandas, papermill, password,
pgvector, pinecone, pinot, postgres, presto, pydantic, qdrant, rabbitmq, redis, s3, s3fs,
salesforce, samba, saml, segment, sendgrid, sentry, sftp, singularity, slack, smtp, snowflake,
-spark, sqlite, ssh, statsd, tableau, tabular, telegram, teradata, trino, vertica, virtualenv,
+spark, sqlite, ssh, statsd, tableau, tabular, telegram, teradata, trino, uv, vertica, virtualenv,
weaviate, webhdfs, winrm, yandex, zendesk
# END REGULAR EXTRAS HERE
diff --git a/contributing-docs/12_airflow_dependencies_and_extras.rst b/contributing-docs/12_airflow_dependencies_and_extras.rst
index 6dcb92575a37a..bf16efd91de12 100644
--- a/contributing-docs/12_airflow_dependencies_and_extras.rst
+++ b/contributing-docs/12_airflow_dependencies_and_extras.rst
@@ -211,7 +211,7 @@ microsoft-mssql, microsoft-psrp, microsoft-winrm, mongo, mssql, mysql, neo4j, od
openfaas, openlineage, opensearch, opsgenie, oracle, otel, pagerduty, pandas, papermill, password,
pgvector, pinecone, pinot, postgres, presto, pydantic, qdrant, rabbitmq, redis, s3, s3fs,
salesforce, samba, saml, segment, sendgrid, sentry, sftp, singularity, slack, smtp, snowflake,
-spark, sqlite, ssh, statsd, tableau, tabular, telegram, teradata, trino, vertica, virtualenv,
+spark, sqlite, ssh, statsd, tableau, tabular, telegram, teradata, trino, uv, vertica, virtualenv,
weaviate, webhdfs, winrm, yandex, zendesk
.. END REGULAR EXTRAS HERE
diff --git a/contributing-docs/testing/k8s_tests.rst b/contributing-docs/testing/k8s_tests.rst
index 8a7383f90c2be..4c00ec42a6893 100644
--- a/contributing-docs/testing/k8s_tests.rst
+++ b/contributing-docs/testing/k8s_tests.rst
@@ -358,7 +358,14 @@ Should show the status of current KinD cluster.
Cluster healthy: airflow-python-3.8-v1.24.2
5. Build the image base on PROD Airflow image. You need to build the PROD image first (the command will
- guide you if you did not - either by running the build separately or passing ``--rebuild-base-image`` flag
+ guide you if you did not) either by running the build separately or passing ``--rebuild-base-image``
+ flag. Generally speaking you should not need to rebuild the base image unless you changed some
+ dependencies in ``pyproject.toml``.
+
+ Note, that this command by default uses ``--use-uv`` flag to use ``uv`` to build the image instead of
+ ``pip``. This is much faster (50% faster) to rebuild the image and iterate with your code but if you
+ built your PROD image without ``--use-uv`` flag the first build might be a bit longer. You can also switch
+ to using a ``pip`` based image by specifyin ``--no-use-uv`` flag together with ``--rebuid-base-image``.
.. code-block:: bash
diff --git a/dev/breeze/doc/images/output_ci-image_build.svg b/dev/breeze/doc/images/output_ci-image_build.svg
index f617eb05e72aa..48b98e5904acc 100644
--- a/dev/breeze/doc/images/output_ci-image_build.svg
+++ b/dev/breeze/doc/images/output_ci-image_build.svg
@@ -390,7 +390,7 @@
│--python-imageIf specified this is the base python image used to build the image. Should be ││something like: python:VERSION-slim-bookworm. ││(TEXT) │
-│--use-uv/--no-use-uvUse uv instead of pip as packaging tool.│
+│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv]│╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯╭─ Selecting constraint location (for power users) ────────────────────────────────────────────────────────────────────╮│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT)│
diff --git a/dev/breeze/doc/images/output_ci-image_build.txt b/dev/breeze/doc/images/output_ci-image_build.txt
index b59fb9fc0b24d..d32ec3ed79812 100644
--- a/dev/breeze/doc/images/output_ci-image_build.txt
+++ b/dev/breeze/doc/images/output_ci-image_build.txt
@@ -1 +1 @@
-f535999147ac00393852eb3b28d7125b
+775924a9beade1c361b7b0d127e21321
diff --git a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg
index 362f24d0e8360..c54802ce41769 100644
--- a/dev/breeze/doc/images/output_k8s_build-k8s-image.svg
+++ b/dev/breeze/doc/images/output_k8s_build-k8s-image.svg
@@ -1,4 +1,4 @@
-Build Production image. Include building multiple images for all or selected Python versions sequentially.╭─ Basic usage ────────────────────────────────────────────────────────────────────────────────────────────────────────╮
-│--python-pPython major/minor version used in Airflow image for images.│
-│(>3.8< | 3.9 | 3.10 | 3.11) │
-│[default: 3.8] │
-│--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT)│
-│--image-tagTag the image after building it.(TEXT)[default: latest]│
-│--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when │
-│you build or pull image with --image-tag. │
-│--docker-cache-cCache option for image used during the build.(registry | local | disabled)│
-│[default: registry] │
-│--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT)│
-│--build-progressBuild progress.(auto | plain | tty)[default: auto]│
-│--docker-hostOptional - docker host to use when running docker commands. When set, the `--builder`│
-│option is ignored when building images. │
-│(TEXT) │
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮
-│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│
-│--parallelismMaximum number of processes to use while running the operation in parallel.│
-│(INTEGER RANGE) │
-│[default: 4; 1<=x<=8] │
-│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│
-│[default: 3.8 3.9 3.10 3.11] │
-│--skip-cleanupSkip cleanup of temporary files created during parallel run.│
-│--debug-resourcesWhether to show resource information while running in parallel.│
-│--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default).│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Advanced build options (for power users) ───────────────────────────────────────────────────────────────────────────╮
-│--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip` │
-│itself). │
-│(TEXT) │
-│--commit-shaCommit SHA that is used to build the images.(TEXT)│
-│--debian-versionDebian version used in Airflow image as base for building images.│
-│(bookworm | bullseye) │
-│[default: bookworm] │
-│--python-imageIf specified this is the base python image used to build the image. Should be │
-│something like: python:VERSION-slim-bookworm. │
-│(TEXT) │
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Selecting constraint location (for power users) ────────────────────────────────────────────────────────────────────╮
-│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT)│
-│--airflow-constraints-modeMode of constraints for Airflow for PROD image building. │
-│(constraints | constraints-no-providers | constraints-source-providers)│
-│[default: constraints] │
-│--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT)│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Choosing dependencies and extras (for power users) ─────────────────────────────────────────────────────────────────╮
-│--airflow-extrasExtras to install by default. │
-│(TEXT) │
-│[default: │
-│aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,…│
-│--additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT)│
-│--additional-python-depsAdditional python dependencies to use when building the images.(TEXT)│
-│--dev-apt-depsApt dev dependencies to use when building the images.(TEXT)│
-│--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT)│
-│--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT)│
-│--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT)│
-│--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT)│
-│--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT)│
-│--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT)│
-│--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT)│
-│--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT)│
-│--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT)│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Advanced customization options (for specific customization needs) ──────────────────────────────────────────────────╮
-│--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow)[default: .]│
-│--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT)│
-│--install-packages-from-contextInstall wheels from local docker-context-files when building image. │
-│Implies --disable-airflow-repo-cache. │
-│--install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql)│
-│--cleanup-contextClean up docker context files before running build (cannot be used │
-│together with --install-packages-from-context). │
-│--use-constraints-for-context-packagesUses constraints for context packages installation - either from │
-│constraints store in docker-context-files or from github. │
-│--disable-airflow-repo-cacheDisable cache from Airflow repository during building.│
-│--disable-mysql-client-installationDo not install MySQL client.│
-│--disable-mssql-client-installationDo not install MsSQl client.│
-│--disable-postgres-client-installationDo not install Postgres client.│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮
-│--builderBuildx builder used to perform `docker buildx build` commands.(TEXT)│
-│[default: autodetect] │
-│--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)│
-│--pushPush image after building it.│
-│--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the │
-│image). │
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮
-│--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│
-│--github-tokenThe token used to authenticate to GitHub.(TEXT)│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
-╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮
-│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│
-│--dry-run-DIf dry-run is set, commands are only printed, not executed.│
-│--verbose-vPrint verbose information about performed steps.│
-│--help-hShow this message and exit.│
-╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+│--build-progressBuild progress.(auto | plain | tty)[default: auto]│
+│--docker-cache-cCache option for image used during the build.(registry | local | disabled)│
+│[default: registry] │
+│--docker-hostOptional - docker host to use when running docker commands. When set, the `--builder`│
+│option is ignored when building images. │
+│(TEXT) │
+│--image-tagTag the image after building it.(TEXT)[default: latest]│
+│--install-airflow-version-VInstall version of Airflow from PyPI.(TEXT)│
+│--python-pPython major/minor version used in Airflow image for images.│
+│(>3.8< | 3.9 | 3.10 | 3.11) │
+│[default: 3.8] │
+│--tag-as-latestTags the image as latest and update checksum of all files after pulling. Useful when │
+│you build or pull image with --image-tag. │
+│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: no-use-uv]│
+│--version-suffix-for-pypiVersion suffix used for PyPI packages (alpha, beta, rc1, etc.).(TEXT)│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Building images in parallel ────────────────────────────────────────────────────────────────────────────────────────╮
+│--run-in-parallelRun the operation in parallel on all or selected subset of parameters.│
+│--parallelismMaximum number of processes to use while running the operation in parallel.│
+│(INTEGER RANGE) │
+│[default: 4; 1<=x<=8] │
+│--python-versionsSpace separated list of python versions used for build with multiple versions.(TEXT)│
+│[default: 3.8 3.9 3.10 3.11] │
+│--skip-cleanupSkip cleanup of temporary files created during parallel run.│
+│--debug-resourcesWhether to show resource information while running in parallel.│
+│--include-success-outputsWhether to include outputs of successful parallel runs (skipped by default).│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Advanced build options (for power users) ───────────────────────────────────────────────────────────────────────────╮
+│--additional-pip-install-flagsAdditional flags added to `pip install` commands (except reinstalling `pip` │
+│itself). │
+│(TEXT) │
+│--commit-shaCommit SHA that is used to build the images.(TEXT)│
+│--debian-versionDebian version used in Airflow image as base for building images.│
+│(bookworm | bullseye) │
+│[default: bookworm] │
+│--python-imageIf specified this is the base python image used to build the image. Should be │
+│something like: python:VERSION-slim-bookworm. │
+│(TEXT) │
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Selecting constraint location (for power users) ────────────────────────────────────────────────────────────────────╮
+│--airflow-constraints-locationLocation of airflow constraints to use (remote URL or local context file).(TEXT)│
+│--airflow-constraints-modeMode of constraints for Airflow for PROD image building. │
+│(constraints | constraints-no-providers | constraints-source-providers)│
+│[default: constraints] │
+│--airflow-constraints-referenceConstraint reference to use when building the image.(TEXT)│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Choosing dependencies and extras (for power users) ─────────────────────────────────────────────────────────────────╮
+│--airflow-extrasExtras to install by default. │
+│(TEXT) │
+│[default: │
+│aiobotocore,amazon,async,celery,cncf-kubernetes,common-io,docker,elasticsearch,…│
+│--additional-airflow-extrasAdditional extra package while installing Airflow in the image.(TEXT)│
+│--additional-python-depsAdditional python dependencies to use when building the images.(TEXT)│
+│--dev-apt-depsApt dev dependencies to use when building the images.(TEXT)│
+│--additional-dev-apt-depsAdditional apt dev dependencies to use when building the images.(TEXT)│
+│--dev-apt-commandCommand executed before dev apt deps are installed.(TEXT)│
+│--additional-dev-apt-commandAdditional command executed before dev apt deps are installed.(TEXT)│
+│--additional-dev-apt-envAdditional environment variables set when adding dev dependencies.(TEXT)│
+│--runtime-apt-depsApt runtime dependencies to use when building the images.(TEXT)│
+│--additional-runtime-apt-depsAdditional apt runtime dependencies to use when building the images.(TEXT)│
+│--runtime-apt-commandCommand executed before runtime apt deps are installed.(TEXT)│
+│--additional-runtime-apt-commandAdditional command executed before runtime apt deps are installed.(TEXT)│
+│--additional-runtime-apt-envAdditional environment variables set when adding runtime dependencies.(TEXT)│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Advanced customization options (for specific customization needs) ──────────────────────────────────────────────────╮
+│--installation-methodInstall Airflow from: sources or PyPI.(. | apache-airflow)[default: .]│
+│--install-airflow-referenceInstall Airflow using GitHub tag or branch.(TEXT)│
+│--install-packages-from-contextInstall wheels from local docker-context-files when building image. │
+│Implies --disable-airflow-repo-cache. │
+│--install-mysql-client-typeWhich client to choose when installing.(mariadb | mysql)│
+│--cleanup-contextClean up docker context files before running build (cannot be used │
+│together with --install-packages-from-context). │
+│--use-constraints-for-context-packagesUses constraints for context packages installation - either from │
+│constraints store in docker-context-files or from github. │
+│--disable-airflow-repo-cacheDisable cache from Airflow repository during building.│
+│--disable-mysql-client-installationDo not install MySQL client.│
+│--disable-mssql-client-installationDo not install MsSQl client.│
+│--disable-postgres-client-installationDo not install Postgres client.│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Preparing cache and push (for maintainers and CI) ──────────────────────────────────────────────────────────────────╮
+│--builderBuildx builder used to perform `docker buildx build` commands.(TEXT)│
+│[default: autodetect] │
+│--platformPlatform for Airflow image.(linux/amd64 | linux/arm64 | linux/amd64,linux/arm64)│
+│--pushPush image after building it.│
+│--prepare-buildx-cachePrepares build cache (this is done as separate per-platform steps instead of building the │
+│image). │
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Github authentication ──────────────────────────────────────────────────────────────────────────────────────────────╮
+│--github-repository-gGitHub repository used to pull, push run images.(TEXT)[default: apache/airflow]│
+│--github-tokenThe token used to authenticate to GitHub.(TEXT)│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
+╭─ Common options ─────────────────────────────────────────────────────────────────────────────────────────────────────╮
+│--answer-aForce answer to questions.(y | n | q | yes | no | quit)│
+│--dry-run-DIf dry-run is set, commands are only printed, not executed.│
+│--verbose-vPrint verbose information about performed steps.│
+│--help-hShow this message and exit.│
+╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
diff --git a/dev/breeze/doc/images/output_prod-image_build.txt b/dev/breeze/doc/images/output_prod-image_build.txt
index 86876deb7fc75..914e4431506e3 100644
--- a/dev/breeze/doc/images/output_prod-image_build.txt
+++ b/dev/breeze/doc/images/output_prod-image_build.txt
@@ -1 +1 @@
-07693b2597b00fdb156949c753dae783
+346557d2a2db8a07c6387d8bb3b41472
diff --git a/dev/breeze/doc/images/output_release-management_generate-constraints.svg b/dev/breeze/doc/images/output_release-management_generate-constraints.svg
index 96976e4d5567c..2cb3768a00e26 100644
--- a/dev/breeze/doc/images/output_release-management_generate-constraints.svg
+++ b/dev/breeze/doc/images/output_release-management_generate-constraints.svg
@@ -185,7 +185,7 @@
│--python-pPython major/minor version used in Airflow image for images.││(>3.8< | 3.9 | 3.10 | 3.11) ││[default: 3.8] │
-│--use-uv/--no-use-uvUse uv instead of pip as packaging tool.│
+│--use-uv/--no-use-uvUse uv instead of pip as packaging tool to build the image.[default: use-uv]│╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯╭─ Parallel running ───────────────────────────────────────────────────────────────────────────────────────────────────╮│--debug-resourcesWhether to show resource information while running in parallel.│
diff --git a/dev/breeze/doc/images/output_release-management_generate-constraints.txt b/dev/breeze/doc/images/output_release-management_generate-constraints.txt
index 06c7108c2e15e..2891a4b17b896 100644
--- a/dev/breeze/doc/images/output_release-management_generate-constraints.txt
+++ b/dev/breeze/doc/images/output_release-management_generate-constraints.txt
@@ -1 +1 @@
-839a8605b97ed77f000b907f62752037
+040797cc2ecc3d56fcbb981bbe970c33
diff --git a/dev/breeze/src/airflow_breeze/commands/common_options.py b/dev/breeze/src/airflow_breeze/commands/common_options.py
index af30a9d2995e8..6222e28cbd11b 100644
--- a/dev/breeze/src/airflow_breeze/commands/common_options.py
+++ b/dev/breeze/src/airflow_breeze/commands/common_options.py
@@ -341,7 +341,16 @@ def _set_default_from_parent(ctx: click.core.Context, option: click.core.Option,
"--use-uv/--no-use-uv",
is_flag=True,
default=True,
- help="Use uv instead of pip as packaging tool.",
+ show_default=True,
+ help="Use uv instead of pip as packaging tool to build the image.",
+ envvar="USE_UV",
+)
+option_use_uv_default_disabled = click.option(
+ "--use-uv/--no-use-uv",
+ is_flag=True,
+ default=False,
+ show_default=True,
+ help="Use uv instead of pip as packaging tool to build the image.",
envvar="USE_UV",
)
option_pydantic = click.option(
diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
index 9ae00220d8125..d9a6ddf19fad1 100644
--- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands.py
@@ -29,6 +29,7 @@
import click
from airflow_breeze.commands.common_options import (
+ option_answer,
option_debug_resources,
option_dry_run,
option_include_success_outputs,
@@ -37,6 +38,7 @@
option_python_versions,
option_run_in_parallel,
option_skip_cleanup,
+ option_use_uv,
option_verbose,
)
from airflow_breeze.commands.production_image_commands import run_build_production_image
@@ -555,9 +557,10 @@ def _rebuild_k8s_image(
python: str,
image_tag: str,
rebuild_base_image: bool,
+ use_uv: bool,
output: Output | None,
) -> tuple[int, str]:
- params = BuildProdParams(python=python, image_tag=image_tag)
+ params = BuildProdParams(python=python, image_tag=image_tag, use_uv=use_uv)
if rebuild_base_image:
run_build_production_image(prod_image_params=params, output=output)
else:
@@ -583,6 +586,7 @@ def _rebuild_k8s_image(
docker_image_for_kubernetes_tests = f"""
FROM {params.airflow_image_name_with_tag}
+COPY . /opt/airflow/
COPY airflow/example_dags/ /opt/airflow/dags/
COPY airflow/providers/cncf/kubernetes/kubernetes_executor_templates/ /opt/airflow/pod_templates/
@@ -627,27 +631,30 @@ def _upload_k8s_image(python: str, kubernetes_version: str, output: Output | Non
name="build-k8s-image",
help="Build k8s-ready airflow image (optionally all images in parallel).",
)
-@option_python
+@option_answer
+@option_debug_resources
+@option_dry_run
@option_image_tag
+@option_include_success_outputs
+@option_parallelism
+@option_python
+@option_python_versions
@option_rebuild_base_image
@option_run_in_parallel
-@option_parallelism
@option_skip_cleanup
-@option_debug_resources
-@option_include_success_outputs
-@option_python_versions
+@option_use_uv
@option_verbose
-@option_dry_run
def build_k8s_image(
- python: str,
+ debug_resources: bool,
image_tag: str,
+ include_success_outputs: bool,
+ parallelism: int,
+ python: str,
+ python_versions: str,
rebuild_base_image: bool,
run_in_parallel: bool,
- parallelism: int,
skip_cleanup: bool,
- debug_resources: bool,
- include_success_outputs: bool,
- python_versions: str,
+ use_uv: bool,
):
result = create_virtualenv(force_venv_setup=False)
if result.returncode != 0:
@@ -669,6 +676,7 @@ def build_k8s_image(
"python": _python,
"image_tag": image_tag,
"rebuild_base_image": rebuild_base_image,
+ "use_uv": use_uv,
"output": outputs[index],
},
)
@@ -686,6 +694,7 @@ def build_k8s_image(
python=python,
image_tag=image_tag,
rebuild_base_image=rebuild_base_image,
+ use_uv=use_uv,
output=None,
)
if return_code == 0:
@@ -1502,6 +1511,7 @@ def _run_complete_tests(
executor: str,
image_tag: str,
rebuild_base_image: bool,
+ use_uv: bool,
upgrade: bool,
wait_time_in_seconds: int,
force_recreate_cluster: bool,
@@ -1516,6 +1526,7 @@ def _run_complete_tests(
python=python,
output=output,
image_tag=image_tag,
+ use_uv=use_uv,
rebuild_base_image=rebuild_base_image,
)
if returncode != 0:
@@ -1629,45 +1640,47 @@ def _run_complete_tests(
ignore_unknown_options=True,
),
)
-@option_python
-@option_kubernetes_version
+@option_debug_resources
+@option_dry_run
@option_executor
+@option_force_recreate_cluster
+@option_force_venv_setup
@option_image_tag
+@option_include_success_outputs
+@option_kubernetes_version
+@option_kubernetes_versions
+@option_parallelism_cluster
+@option_python
+@option_python_versions
@option_rebuild_base_image
-@option_upgrade
-@option_wait_time_in_seconds
-@option_force_venv_setup
-@option_force_recreate_cluster
@option_run_in_parallel
-@option_parallelism_cluster
@option_skip_cleanup
-@option_debug_resources
-@option_include_success_outputs
+@option_upgrade
@option_use_standard_naming
-@option_python_versions
-@option_kubernetes_versions
+@option_use_uv
@option_verbose
-@option_dry_run
+@option_wait_time_in_seconds
@click.argument("test_args", nargs=-1, type=click.Path())
def run_complete_tests(
- python: str,
- kubernetes_version: str,
+ debug_resources: bool,
executor: str,
- image_tag: str,
- rebuild_base_image: bool,
- upgrade: bool,
- wait_time_in_seconds: int,
force_recreate_cluster: bool,
force_venv_setup: bool,
- run_in_parallel: bool,
- parallelism: int,
- skip_cleanup: bool,
- debug_resources: bool,
+ image_tag: str,
include_success_outputs: bool,
- use_standard_naming: bool,
- python_versions: str,
+ kubernetes_version: str,
kubernetes_versions: str,
+ parallelism: int,
+ python: str,
+ python_versions: str,
+ rebuild_base_image: bool,
+ run_in_parallel: bool,
+ skip_cleanup: bool,
test_args: tuple[str, ...],
+ upgrade: bool,
+ use_standard_naming: bool,
+ use_uv: bool,
+ wait_time_in_seconds: int,
):
result = create_virtualenv(force_venv_setup=force_venv_setup)
if result.returncode != 0:
@@ -1697,6 +1710,7 @@ def run_complete_tests(
"executor": executor,
"image_tag": image_tag,
"rebuild_base_image": rebuild_base_image,
+ "use_uv": use_uv,
"upgrade": upgrade,
"wait_time_in_seconds": wait_time_in_seconds,
"force_recreate_cluster": force_recreate_cluster,
@@ -1723,6 +1737,7 @@ def run_complete_tests(
executor=executor,
image_tag=image_tag,
rebuild_base_image=rebuild_base_image,
+ use_uv=use_uv,
upgrade=upgrade,
wait_time_in_seconds=wait_time_in_seconds,
force_recreate_cluster=force_recreate_cluster,
diff --git a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py
index 314fc92f2ebb7..84099ae9ec866 100644
--- a/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py
+++ b/dev/breeze/src/airflow_breeze/commands/kubernetes_commands_config.py
@@ -71,8 +71,9 @@
"name": "Build image flags",
"options": [
"--python",
- "--rebuild-base-image",
"--image-tag",
+ "--rebuild-base-image",
+ "--use-uv",
],
},
{
@@ -230,8 +231,9 @@
{
"name": "Build image flags",
"options": [
- "--rebuild-base-image",
"--image-tag",
+ "--rebuild-base-image",
+ "--use-uv",
],
},
{
diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
index a98462c1630f5..911e8b56a5b08 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands.py
@@ -68,6 +68,7 @@
option_python_versions,
option_run_in_parallel,
option_skip_cleanup,
+ option_use_uv_default_disabled,
option_verbose,
option_version_suffix_for_pypi,
)
@@ -244,6 +245,7 @@ def prod_image():
@option_runtime_apt_deps
@option_skip_cleanup
@option_tag_as_latest
+@option_use_uv_default_disabled
@option_verbose
@option_version_suffix_for_pypi
def build(
@@ -296,6 +298,7 @@ def build(
skip_cleanup: bool,
tag_as_latest: bool,
use_constraints_for_context_packages: bool,
+ use_uv: bool,
version_suffix_for_pypi: str,
):
"""
@@ -354,6 +357,7 @@ def run_build(prod_image_params: BuildProdParams) -> None:
runtime_apt_deps=runtime_apt_deps,
tag_as_latest=tag_as_latest,
use_constraints_for_context_packages=use_constraints_for_context_packages,
+ use_uv=use_uv,
version_suffix_for_pypi=version_suffix_for_pypi,
)
if platform:
diff --git a/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py b/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py
index df780dbbd8c29..3be1c16026493 100644
--- a/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py
+++ b/dev/breeze/src/airflow_breeze/commands/production_image_commands_config.py
@@ -29,14 +29,15 @@
{
"name": "Basic usage",
"options": [
- "--python",
- "--install-airflow-version",
+ "--build-progress",
+ "--docker-cache",
+ "--docker-host",
"--image-tag",
+ "--install-airflow-version",
+ "--python",
"--tag-as-latest",
- "--docker-cache",
+ "--use-uv",
"--version-suffix-for-pypi",
- "--build-progress",
- "--docker-host",
],
},
{
diff --git a/dev/breeze/src/airflow_breeze/global_constants.py b/dev/breeze/src/airflow_breeze/global_constants.py
index e667f186bbb33..25b25bf21a38e 100644
--- a/dev/breeze/src/airflow_breeze/global_constants.py
+++ b/dev/breeze/src/airflow_breeze/global_constants.py
@@ -441,6 +441,7 @@ def get_airflow_extras():
"snowflake",
"ssh",
"statsd",
+ "uv",
"virtualenv",
# END OF EXTRAS LIST UPDATED BY PRE COMMIT
]
diff --git a/dev/breeze/src/airflow_breeze/params/build_prod_params.py b/dev/breeze/src/airflow_breeze/params/build_prod_params.py
index 342ac0c1435d1..b5cb80d2167e1 100644
--- a/dev/breeze/src/airflow_breeze/params/build_prod_params.py
+++ b/dev/breeze/src/airflow_breeze/params/build_prod_params.py
@@ -55,6 +55,7 @@ class BuildProdParams(CommonBuildParams):
runtime_apt_command: str | None = None
runtime_apt_deps: str | None = None
use_constraints_for_context_packages: bool = False
+ use_uv: bool = True
@property
def airflow_version(self) -> str:
@@ -206,6 +207,7 @@ def prepare_arguments_for_docker_build_command(self) -> list[str]:
self._req_arg("AIRFLOW_IMAGE_README_URL", self.airflow_image_readme_url)
self._req_arg("AIRFLOW_IMAGE_REPOSITORY", self.airflow_image_repository)
self._req_arg("AIRFLOW_PRE_CACHED_PIP_PACKAGES", self.airflow_pre_cached_pip_packages)
+ self._opt_arg("AIRFLOW_USE_UV", self.use_uv)
self._req_arg("AIRFLOW_VERSION", self.airflow_version)
self._req_arg("BUILD_ID", self.build_id)
self._req_arg("CONSTRAINTS_GITHUB_REPOSITORY", self.constraints_github_repository)
diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst
index 42d75f62fd287..67d2f6577739b 100644
--- a/docs/apache-airflow/extra-packages-ref.rst
+++ b/docs/apache-airflow/extra-packages-ref.rst
@@ -85,6 +85,8 @@ python dependencies for the provided package.
+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics |
+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
+| uv | ``pip install 'apache-airflow[uv]'`` | Install uv - fast, Rust-based package installer (experimental) |
++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
| virtualenv | ``pip install 'apache-airflow[virtualenv]'`` | Running python tasks in local virtualenv |
+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+
diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst
index 9a63e67213d26..a451746f57a11 100644
--- a/docs/docker-stack/build-arg-ref.rst
+++ b/docs/docker-stack/build-arg-ref.rst
@@ -49,7 +49,8 @@ Those are the most common arguments that you use when you want to build a custom
+------------------------------------------+------------------------------------------+---------------------------------------------+
| ``AIRFLOW_UV_VERSION`` | ```` | UV version used. |
+------------------------------------------+------------------------------------------+---------------------------------------------+
-| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV. |
+| ``AIRFLOW_USE_UV`` | ``false`` | Whether to use UV to build the image. |
+| | | This is an experimental feature. |
+------------------------------------------+------------------------------------------+---------------------------------------------+
| ``ADDITIONAL_PIP_INSTALL_FLAGS`` | | additional ``pip`` flags passed to the |
| | | installation commands (except when |
@@ -113,6 +114,7 @@ List of default extras in the production Dockerfile:
* snowflake
* ssh
* statsd
+* uv
* virtualenv
.. END OF EXTRAS LIST UPDATED BY PRE COMMIT
diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst
index b94aaff59ff87..4c7772690ef8d 100644
--- a/docs/docker-stack/build.rst
+++ b/docs/docker-stack/build.rst
@@ -334,7 +334,7 @@ Naming conventions for the images:
Important notes for the base images
-----------------------------------
-You should be aware, about a few things:
+You should be aware, about a few things
* The production image of airflow uses "airflow" user, so if you want to add some of the tools
as ``root`` user, you need to switch to it with ``USER`` directive of the Dockerfile and switch back to
@@ -342,14 +342,19 @@ You should be aware, about a few things:
`best practices of Dockerfiles `_
to make sure your image is lean and small.
-* The PyPI dependencies in Apache Airflow are installed in the user library, of the "airflow" user, so
- PIP packages are installed to ``~/.local`` folder as if the ``--user`` flag was specified when running PIP.
- Note also that using ``--no-cache-dir`` is a good idea that can help to make your image smaller.
+* You can use regular ``pip install`` commands (and as of Dockerfile coming in Airflow 2.9 also
+ ``uv pip install`` - experimental) to install PyPI packages. Regular ``install`` commands should be used,
+ however you should remember to add ``apache-airflow==${AIRFLOW_VERSION}`` to the command to avoid
+ accidentally upgrading or downgrading the version of Apache Airflow. Depending on the scenario you might
+ also use constraints file. As of Dockerfile available in Airflow 2.9.0, the constraints file used to
+ build the image is available in ``${HOME}/constraints.txt.``
-.. note::
- Only as of ``2.0.1`` image the ``--user`` flag is turned on by default by setting ``PIP_USER`` environment
- variable to ``true``. This can be disabled by un-setting the variable or by setting it to ``false``. In the
- 2.0.0 image you had to add the ``--user`` flag as ``pip install --user`` command.
+* The PyPI dependencies in Apache Airflow are installed in the ``~/.local`` virtualenv, of the "airflow" user,
+ so PIP packages are installed to ``~/.local`` folder as if the ``--user`` flag was specified when running
+ PIP. This has the effect that when you create a virtualenv with ``--system-site-packages`` flag, the
+ virtualenv created will automatically have all the same packages installed as local airflow installation.
+ Note also that using ``--no-cache-dir`` in ``pip`` or ``--no-cache`` in ``uv`` is a good idea that can
+ help to make your image smaller.
* If your apt, or PyPI dependencies require some of the ``build-essential`` or other packages that need
to compile your python dependencies, then your best choice is to follow the "Customize the image" route,
@@ -373,23 +378,10 @@ You should be aware, about a few things:
``umask 0002`` is set as default when you enter the image, so any directories you create by default
in runtime, will have ``GID=0`` and will be group-writable.
-.. note::
- When you build image for Airflow version < ``2.1`` (for example 2.0.2 or 1.10.15) the image is built with
- PIP 20.2.4 because ``PIP21+`` is only supported for ``Airflow 2.1+``
-
-.. note::
- Only as of ``2.0.2`` the default group of ``airflow`` user is ``root``. Previously it was ``airflow``,
- so if you are building your images based on an earlier image, you need to manually change the default
- group for airflow user:
-
-.. code-block:: docker
-
- RUN usermod -g 0 airflow
-
Examples of image extending
---------------------------
-Example of customizing Airflow Provider packages
+Example of setting own Airflow Provider packages
................................................
The :ref:`Airflow Providers ` are released independently of core
@@ -432,6 +424,32 @@ The following example adds ``lxml`` python package from PyPI to the image.
:start-after: [START Dockerfile]
:end-before: [END Dockerfile]
+Example of adding ``PyPI`` package with constraints
+...................................................
+
+The following example adds ``lxml`` python package from PyPI to the image with constraints that were
+used to install airflow. This allows you to use the version of packages that you know were tested with the
+given version of Airflow. You can also use it if you do not want to use potentially newer versions
+that were released after the version of Airflow you are using.
+
+.. exampleinclude:: docker-examples/extending/add-pypi-packages-constraints/Dockerfile
+ :language: Dockerfile
+ :start-after: [START Dockerfile]
+ :end-before: [END Dockerfile]
+
+
+Example of adding ``PyPI`` package with uv
+..........................................
+
+The following example adds ``lxml`` python package from PyPI to the image using ``uv``. This is an
+experimental feature as ``uv`` is a very fast but also very new tool in the Python ecosystem.
+
+.. exampleinclude:: docker-examples/extending/add-pypi-packages-uv/Dockerfile
+ :language: Dockerfile
+ :start-after: [START Dockerfile]
+ :end-before: [END Dockerfile]
+
+
Example of adding packages from requirements.txt
................................................
@@ -775,6 +793,19 @@ The following example builds the production image in version ``3.8`` based on ``
:end-before: [END build]
+.. _image-build-uv:
+
+Building prod images using UV as the package installer
+......................................................
+
+The following example builds the production image in default settings, but uses ``uv`` to build the image.
+This is an experimental feature as ``uv`` is a very fast but also very new tool in the Python ecosystem.
+
+.. exampleinclude:: docker-examples/customizing/use-uv.sh
+ :language: bash
+ :start-after: [START build]
+ :end-before: [END build]
+
.. _image-build-mysql:
Building images with MySQL client
diff --git a/docs/docker-stack/changelog.rst b/docs/docker-stack/changelog.rst
index 90930afdda34c..2e7e4fd28c4fe 100644
--- a/docs/docker-stack/changelog.rst
+++ b/docs/docker-stack/changelog.rst
@@ -45,13 +45,25 @@ Airflow 2.9
``apache/airflow:slim-2.9.0-python-3.8`` images respectively so while the change is potentially
breaking, it is very easy to switch to the previous behaviour.
-Airflow 2.9
-~~~~~~~~~~~
-
-The ``gosu`` binary was removed from the image. This is a potentially breaking change for users who relied on
-``gosu`` to change the user in the container. The ``gosu`` binary was removed because it was a source of
-security vulnerabilities as it was linked against older go standard libraries.
-
+ * The ``PIP_USER`` flag is removed and replaced by ``VIRTUAL_ENV`` pointing to ``~/.local`` where Airflow
+ is installed. This has the effect that the Airflow installation is treated as a regular virtual environment,
+ but unlike a regular virtualenv, the ``~/.local`` directory is seen as ``system level`` and when the
+ worker creates dynamically the virtualenv with ``--system-site-packages`` flag, the Airflow installation and all
+ packages there are also present in the new virtualenv. When you do not use the flag, they are not
+ copied there which is a backwards-compatible behaviour with having ``PIP_USER`` set.
+
+ * The image contains latest ``uv`` binary (latest at the moment of release) - which is a new faster
+ replacement for ``pip``. While the image is still using ``pip`` by default, you can use ``uv``
+ to install packages and - experimentally - you can also build custom images with
+ ``--arg AIRFLOW_USE_UV=true`` which will us ``uv`` to perform the installation. This is an experimental
+ support, as ``uv`` is very fast but also a very new feature in the Python ecosystem.
+
+ * Constraints used to install the image are available in "${HOME}/constraints.txt" now - you can use them
+ to install additional packages in the image without having to find out which constraints you should use.
+
+ * The ``gosu`` binary was removed from the image. This is a potentially breaking change for users who relied on
+ ``gosu`` to change the user in the container. The ``gosu`` binary was removed because it was a source of
+ security vulnerabilities as it was linked against older Go standard libraries.
Airflow 2.8
~~~~~~~~~~~
diff --git a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
index 4821e83ef3152..70c1bf9e331b4 100755
--- a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
+++ b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh
@@ -26,14 +26,14 @@ pushd "${TEMP_DOCKER_DIR}"
cp "${AIRFLOW_SOURCES}/Dockerfile" "${TEMP_DOCKER_DIR}"
# [START build]
-export AIRFLOW_VERSION=2.2.4
+export AIRFLOW_VERSION=2.8.2
export DOCKER_BUILDKIT=1
docker build . \
--pull \
--build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \
--build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \
- --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \
+ --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py==3.1.5" \
--build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \
--build-arg ADDITIONAL_RUNTIME_APT_DEPS="openmpi-common" \
--tag "my-build-essential-image:0.0.1"
diff --git a/docs/docker-stack/docker-examples/customizing/custom-pip.sh b/docs/docker-stack/docker-examples/customizing/custom-pip.sh
index aae3b6b198a3e..54802d7b40099 100755
--- a/docs/docker-stack/docker-examples/customizing/custom-pip.sh
+++ b/docs/docker-stack/docker-examples/customizing/custom-pip.sh
@@ -35,8 +35,6 @@ export DOCKER_BUILDKIT=1
docker build . \
--build-arg DOCKER_CONTEXT_FILES=./docker-context-files \
--tag "my-custom-pip-verbose-airflow:0.0.1"
-docker run -it my-beautifulsoup4-airflow:0.0.1 python -c 'import bs4; import sys; sys.exit(0)' && \
- echo "Success! Beautifulsoup4 installed" && echo
# [END build]
docker rmi --force "my-custom-pip-verbose-airflow:0.0.1"
popd
diff --git a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
index b0fc6716d0342..e88117d493072 100755
--- a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
+++ b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh
@@ -30,7 +30,7 @@ export DOCKER_BUILDKIT=1
docker build . \
--pull \
--build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \
- --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/main.tar.gz#egg=apache-airflow" \
+ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/potiuk/airflow/archive/main.tar.gz" \
--build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \
--build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \
--tag "github-different-repository-image:0.0.1"
diff --git a/docs/docker-stack/docker-examples/customizing/github-main.sh b/docs/docker-stack/docker-examples/customizing/github-main.sh
index ef1da74773772..666b57081c0ae 100755
--- a/docs/docker-stack/docker-examples/customizing/github-main.sh
+++ b/docs/docker-stack/docker-examples/customizing/github-main.sh
@@ -31,7 +31,7 @@ export DOCKER_BUILDKIT=1
docker build . \
--pull \
--build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \
- --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/main.tar.gz#egg=apache-airflow" \
+ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/main.tar.gz" \
--build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-main" \
--tag "my-github-main:0.0.1"
# [END build]
diff --git a/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh b/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
index 5a7f634a4b5dd..ab1ca26501143 100755
--- a/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
+++ b/docs/docker-stack/docker-examples/customizing/github-v2-2-test.sh
@@ -32,7 +32,7 @@ export DOCKER_BUILDKIT=1
docker build . \
--pull \
--build-arg PYTHON_BASE_IMAGE="python:3.8-slim-bookworm" \
- --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/v2-2-test.tar.gz#egg=apache-airflow" \
+ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow @ https://github.com/apache/airflow/archive/v2-2-test.tar.gz" \
--build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-2" \
--tag "my-github-v2-2:0.0.1"
# [END build]
diff --git a/docs/docker-stack/docker-examples/customizing/use-uv.sh b/docs/docker-stack/docker-examples/customizing/use-uv.sh
new file mode 100755
index 0000000000000..4c5ffdc1aad57
--- /dev/null
+++ b/docs/docker-stack/docker-examples/customizing/use-uv.sh
@@ -0,0 +1,35 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# This is an example docker build script. It is not intended for PRODUCTION use
+set -euo pipefail
+AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)"
+TEMP_DOCKER_DIR=$(mktemp -d)
+pushd "${TEMP_DOCKER_DIR}"
+
+cp "${AIRFLOW_SOURCES}/Dockerfile" "${TEMP_DOCKER_DIR}"
+# [START build]
+
+export DOCKER_BUILDKIT=1
+docker build . \
+ --build-arg AIRFLOW_USE_UV="true" \
+ --tag "my-custom-use-uv-airflow:0.0.1"
+# [END build]
+docker rmi --force "my-custom-use-uv-airflow:0.0.1"
+popd
+rm -rf "${TEMP_DOCKER_DIR}"
diff --git a/docs/docker-stack/docker-examples/extending/add-pypi-packages-constraints/Dockerfile b/docs/docker-stack/docker-examples/extending/add-pypi-packages-constraints/Dockerfile
new file mode 100644
index 0000000000000..cd7baf973c164
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/add-pypi-packages-constraints/Dockerfile
@@ -0,0 +1,20 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.9.0.dev0
+RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" lxml --constraint "${HOME}/constraints.txt"
+# [END Dockerfile]
diff --git a/docs/docker-stack/docker-examples/extending/add-pypi-packages-uv/Dockerfile b/docs/docker-stack/docker-examples/extending/add-pypi-packages-uv/Dockerfile
new file mode 100644
index 0000000000000..7cf6066842631
--- /dev/null
+++ b/docs/docker-stack/docker-examples/extending/add-pypi-packages-uv/Dockerfile
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This is an example Dockerfile. It is not intended for PRODUCTION use
+# [START Dockerfile]
+FROM apache/airflow:2.9.0.dev0
+
+# The `uv` tools is Rust packaging tool that is much faster than `pip` and other installer
+# Support for uv as installation tool is experimental
+
+RUN uv pip install --no-cache "apache-airflow==${AIRFLOW_VERSION}" lxml
+# [END Dockerfile]
diff --git a/pyproject.toml b/pyproject.toml
index 96fe7fddc121b..b7939145ecad1 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -248,6 +248,9 @@ sentry = [
statsd = [
"statsd>=3.3.0",
]
+uv = [
+ "uv>=0.1.13",
+]
virtualenv = [
"virtualenv",
]
@@ -992,6 +995,7 @@ all = [
"apache-airflow[saml]",
"apache-airflow[sentry]",
"apache-airflow[statsd]",
+ "apache-airflow[uv]",
"apache-airflow[virtualenv]",
# Apache no provider extras
"apache-airflow[apache-atlas]",
diff --git a/scripts/ci/pre_commit/pre_commit_update_installers.py b/scripts/ci/pre_commit/pre_commit_update_installers.py
index ca5beb02041dd..1efc899c8046c 100755
--- a/scripts/ci/pre_commit/pre_commit_update_installers.py
+++ b/scripts/ci/pre_commit/pre_commit_update_installers.py
@@ -30,6 +30,7 @@
AIRFLOW_SOURCES_ROOT_PATH / "Dockerfile",
AIRFLOW_SOURCES_ROOT_PATH / "Dockerfile.ci",
AIRFLOW_SOURCES_ROOT_PATH / "scripts" / "docker" / "common.sh",
+ AIRFLOW_SOURCES_ROOT_PATH / "pyproject.toml",
]
@@ -43,6 +44,7 @@ def get_latest_pypi_version(package_name: str) -> str:
PIP_PATTERN = re.compile(r"AIRFLOW_PIP_VERSION=[0-9.]+")
UV_PATTERN = re.compile(r"AIRFLOW_UV_VERSION=[0-9.]+")
+UV_GREATER_PATTERN = re.compile(r'"uv>=[0-9]+[0-9.]+"')
if __name__ == "__main__":
pip_version = get_latest_pypi_version("pip")
@@ -57,6 +59,7 @@ def get_latest_pypi_version(package_name: str) -> str:
new_content = file_content
new_content = re.sub(PIP_PATTERN, f"AIRFLOW_PIP_VERSION={pip_version}", new_content, re.MULTILINE)
new_content = re.sub(UV_PATTERN, f"AIRFLOW_UV_VERSION={uv_version}", new_content, re.MULTILINE)
+ new_content = re.sub(UV_GREATER_PATTERN, f'"uv>={uv_version}"', new_content, re.MULTILINE)
if new_content != file_content:
file.write_text(new_content)
console.print(f"[bright_blue]Updated {file}")
diff --git a/scripts/docker/common.sh b/scripts/docker/common.sh
index 25fd4ef9a60a3..a6b92dee63b1e 100644
--- a/scripts/docker/common.sh
+++ b/scripts/docker/common.sh
@@ -46,10 +46,15 @@ function common::get_packaging_tool() {
echo
export PACKAGING_TOOL="uv"
export PACKAGING_TOOL_CMD="uv pip"
- export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
- export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
- export RESOLUTION_HIGHEST_FLAG="--resolution highest"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--resolution lowest-direct"
+ if [[ -z ${VIRTUAL_ENV=} ]]; then
+ export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
+ export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
+ else
+ export EXTRA_INSTALL_FLAGS=""
+ export EXTRA_UNINSTALL_FLAGS=""
+ fi
+ export UPGRADE_EAGERLY="--upgrade --resolution highest"
+ export UPGRADE_IF_NEEDED="--upgrade --resolution lowest-direct"
else
echo
echo "${COLOR_BLUE}Using 'pip' to install Airflow${COLOR_RESET}"
@@ -58,8 +63,8 @@ function common::get_packaging_tool() {
export PACKAGING_TOOL_CMD="pip"
export EXTRA_INSTALL_FLAGS="--root-user-action ignore"
export EXTRA_UNINSTALL_FLAGS="--yes"
- export RESOLUTION_HIGHEST_FLAG="--upgrade-strategy eager"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--upgrade --upgrade-strategy only-if-needed"
+ export UPGRADE_EAGERLY="--upgrade --upgrade-strategy eager"
+ export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed"
fi
}
@@ -71,14 +76,6 @@ function common::get_airflow_version_specification() {
fi
}
-function common::override_pip_version_if_needed() {
- if [[ -n ${AIRFLOW_VERSION} ]]; then
- if [[ ${AIRFLOW_VERSION} =~ ^2\.0.* || ${AIRFLOW_VERSION} =~ ^1\.* ]]; then
- export AIRFLOW_PIP_VERSION=24.0
- fi
- fi
-}
-
function common::get_constraints_location() {
if [[ -f "${HOME}/constraints.txt" ]]; then
# constraints are already downloaded, do not calculate/override again
@@ -125,6 +122,15 @@ function common::show_packaging_tool_version_and_location() {
}
function common::install_packaging_tools() {
+ if [[ "${VIRTUAL_ENV=}" != "" ]]; then
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools in venv: ${VIRTUAL_ENV}${COLOR_RESET}"
+ echo
+ else
+ echo
+ echo "${COLOR_BLUE}Checking packaging tools for system Python installation: $(which python)${COLOR_RESET}"
+ echo
+ fi
if [[ ! ${AIRFLOW_PIP_VERSION} =~ [0-9.]* ]]; then
echo
echo "${COLOR_BLUE}Installing pip version from spec ${AIRFLOW_PIP_VERSION}${COLOR_RESET}"
diff --git a/scripts/docker/create_prod_venv.sh b/scripts/docker/create_prod_venv.sh
new file mode 100644
index 0000000000000..e6ae7f849db21
--- /dev/null
+++ b/scripts/docker/create_prod_venv.sh
@@ -0,0 +1,34 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# shellcheck shell=bash disable=SC2086
+# shellcheck source=scripts/docker/common.sh
+. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
+
+function create_prod_venv() {
+ echo
+ echo "${COLOR_BLUE}Removing ${HOME}/.local and re-creating it as virtual environment.${COLOR_RESET}"
+ rm -rf ~/.local
+ python -m venv ~/.local
+ echo "${COLOR_BLUE}The ${HOME}/.local virtualenv created.${COLOR_RESET}"
+}
+
+common::get_colors
+common::get_packaging_tool
+common::show_packaging_tool_version_and_location
+create_prod_venv
+common::install_packaging_tools
diff --git a/scripts/docker/get_package_specs.py b/scripts/docker/get_package_specs.py
new file mode 100755
index 0000000000000..bff99a2c1cbdb
--- /dev/null
+++ b/scripts/docker/get_package_specs.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+from __future__ import annotations
+
+import os
+import sys
+from pathlib import Path
+
+from packaging.utils import (
+ InvalidSdistFilename,
+ InvalidWheelFilename,
+ parse_sdist_filename,
+ parse_wheel_filename,
+)
+
+
+def print_package_specs(extras: str = "") -> None:
+ for package_path in sys.argv[1:]:
+ try:
+ package, _, _, _ = parse_wheel_filename(Path(package_path).name)
+ except InvalidWheelFilename:
+ try:
+ package, _ = parse_sdist_filename(Path(package_path).name)
+ except InvalidSdistFilename:
+ print(f"Could not parse package name from {package_path}", file=sys.stderr)
+ continue
+ print(f"{package}{extras} @ file://{package_path}")
+
+
+if __name__ == "__main__":
+ print_package_specs(extras=os.environ.get("EXTRAS", ""))
diff --git a/scripts/docker/install_additional_dependencies.sh b/scripts/docker/install_additional_dependencies.sh
index 91281f62330c4..174d08956cc33 100644
--- a/scripts/docker/install_additional_dependencies.sh
+++ b/scripts/docker/install_additional_dependencies.sh
@@ -31,7 +31,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies while upgrading to newer dependencies${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
@@ -45,7 +45,7 @@ function install_additional_dependencies() {
echo "${COLOR_BLUE}Installing additional dependencies upgrading only if needed${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade "${RESOLUTION_LOWEST_DIRECT_FLAG}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
${ADDITIONAL_PYTHON_DEPS}
set +x
@@ -60,7 +60,6 @@ function install_additional_dependencies() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh
index 1e63c4eafc6da..51dc3c9e00cb4 100644
--- a/scripts/docker/install_airflow.sh
+++ b/scripts/docker/install_airflow.sh
@@ -17,9 +17,14 @@
# under the License.
# Install airflow using regular 'pip install' command. This install airflow depending on the arguments:
+#
# AIRFLOW_INSTALLATION_METHOD - determines where to install airflow form:
# "." - installs airflow from local sources
# "apache-airflow" - installs airflow from PyPI 'apache-airflow' package
+# "apache-airflow @ URL - installs from URL
+#
+# (example GitHub URL https://github.com/apache/airflow/archive/main.tar.gz)
+#
# AIRFLOW_VERSION_SPECIFICATION - optional specification for Airflow version to install (
# might be ==2.0.2 for example or <3.0.0
# UPGRADE_TO_NEWER_DEPENDENCIES - determines whether eager-upgrade should be performed with the
@@ -30,12 +35,21 @@
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
function install_airflow() {
- # Coherence check for editable installation mode.
- if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then
+ # Determine the installation_command_flags based on AIRFLOW_INSTALLATION_METHOD method
+ local installation_command_flags
+ if [[ ${AIRFLOW_INSTALLATION_METHOD} == "." ]]; then
+ # When installing from sources - we always use `--editable` mode
+ installation_command_flags="--editable .[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == "apache-airflow" ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ elif [[ ${AIRFLOW_INSTALLATION_METHOD} == apache-airflow\ @\ * ]]; then
+ installation_command_flags="apache-airflow[${AIRFLOW_EXTRAS}] @ ${AIRFLOW_VERSION_SPECIFICATION/apache-airflow @//}"
+ else
+ echo
+ echo "${COLOR_RED}The '${INSTALLATION_METHOD}' installation method is not supported${COLOR_RESET}"
+ echo
+ echo "${COLOR_YELLOW}Supported methods are ('.', 'apache-airflow', 'apache-airflow @ URL')${COLOR_RESET}"
echo
- echo "${COLOR_RED}ERROR! You can only use --editable flag when installing airflow from sources!${COLOR_RESET}"
- echo "${COLOR_RED} Current installation method is '${AIRFLOW_INSTALLATION_METHOD} and should be '.'${COLOR_RESET}"
exit 1
fi
# Remove mysql from extras if client is not going to be installed
@@ -56,14 +70,10 @@ function install_airflow() {
${PACKAGING_TOOL_CMD} freeze | grep apache-airflow | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
set +x
echo
- echo "${COLOR_BLUE}Installing all packages with eager upgrade with ${AIRFLOW_INSTALL_EDITABLE_FLAG} mode${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages in eager upgrade mode. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_HIGHEST_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_EAGERLY} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=}
set +x
common::install_packaging_tools
echo
@@ -72,14 +82,11 @@ function install_airflow() {
pip check
else
echo
- echo "${COLOR_BLUE}Installing all packages with constraints${COLOR_RESET}"
+ echo "${COLOR_BLUE}Installing all packages with constraints. Installation method: ${AIRFLOW_INSTALLATION_METHOD}${COLOR_RESET}"
echo
set -x
# Install all packages with constraints
- if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \
- --constraint "${HOME}/constraints.txt"; then
+ if ! ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags} --constraint "${HOME}/constraints.txt"; then
set +x
echo
echo "${COLOR_YELLOW}Likely pyproject.toml has new dependencies conflicting with constraints.${COLOR_RESET}"
@@ -87,10 +94,7 @@ function install_airflow() {
echo "${COLOR_BLUE}Falling back to no-constraints, lowest-direct resolution installation.${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade ${RESOLUTION_LOWEST_DIRECT_FLAG} \
- ${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${AIRFLOW_INSTALL_EDITABLE_FLAG} \
- "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}"
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${UPGRADE_IF_NEEDED} ${ADDITIONAL_PIP_INSTALL_FLAGS} ${installation_command_flags}
fi
set +x
common::install_packaging_tools
@@ -105,7 +109,6 @@ function install_airflow() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
diff --git a/scripts/docker/install_airflow_dependencies_from_branch_tip.sh b/scripts/docker/install_airflow_dependencies_from_branch_tip.sh
index 95f38e21e50ff..f9c76f091a3e7 100644
--- a/scripts/docker/install_airflow_dependencies_from_branch_tip.sh
+++ b/scripts/docker/install_airflow_dependencies_from_branch_tip.sh
@@ -55,23 +55,23 @@ function install_airflow_dependencies_from_branch_tip() {
set +x
common::install_packaging_tools
set -x
+ echo "${COLOR_BLUE}Uninstalling providers. Dependencies remain${COLOR_RESET}"
# Uninstall airflow and providers to keep only the dependencies. In the future when
# planned https://github.com/pypa/pip/issues/11440 is implemented in pip we might be able to use this
# flag and skip the remove step.
- ${PACKAGING_TOOL_CMD} freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} 2>/dev/null || true
+ pip freeze | grep apache-airflow-providers | xargs ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} || true
set +x
echo
echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}"
echo
set +x
- ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow || true
+ ${PACKAGING_TOOL_CMD} uninstall ${EXTRA_UNINSTALL_FLAGS} apache-airflow
set -x
}
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
diff --git a/scripts/docker/install_from_docker_context_files.sh b/scripts/docker/install_from_docker_context_files.sh
index 4523ff469e53d..d6fab1e8273ce 100644
--- a/scripts/docker/install_from_docker_context_files.sh
+++ b/scripts/docker/install_from_docker_context_files.sh
@@ -24,6 +24,8 @@
# shellcheck source=scripts/docker/common.sh
. "$( dirname "${BASH_SOURCE[0]}" )/common.sh"
+# TODO: rewrite it all in Python (and all other scripts in scripts/docker)
+
function install_airflow_and_providers_from_docker_context_files(){
if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then
AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,}
@@ -39,46 +41,36 @@ function install_airflow_and_providers_from_docker_context_files(){
exit 1
fi
- # shellcheck disable=SC2206
- local packaging_flags=(
- # Don't quote this -- if it is empty we don't want it to create an
- # empty array element
- --find-links="file:///docker-context-files"
- )
-
- # Find Apache Airflow packages in docker-context files
- local reinstalling_apache_airflow_package
- reinstalling_apache_airflow_package=$(ls \
- /docker-context-files/apache?airflow?[0-9]*.{whl,tar.gz} 2>/dev/null || true)
- # Add extras when installing airflow
- if [[ -n "${reinstalling_apache_airflow_package}" ]]; then
- # When a provider depends on a dev version of Airflow, we need to
- # specify `apache-airflow==$VER`, otherwise pip will look for it on
- # pip, and fail to find it
+ # This is needed to get package names for local context packages
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} ${ADDITIONAL_PIP_INSTALL_FLAGS} --constraint ${HOME}/constraints.txt packaging
- # This will work as long as the wheel file is correctly named, which it
- # will be if it was build by wheel tooling
- local ver
- ver=$(basename "$reinstalling_apache_airflow_package" | cut -d "-" -f 2)
- reinstalling_apache_airflow_package="apache-airflow[${AIRFLOW_EXTRAS}]==$ver"
+ if [[ -n ${AIRFLOW_EXTRAS=} ]]; then
+ AIRFLOW_EXTRAS_TO_INSTALL="[${AIRFLOW_EXTRAS}]"
+ else
+ AIRFLOW_EXTRAS_TO_INSTALL=""
fi
- if [[ -z "${reinstalling_apache_airflow_package}" && ${AIRFLOW_VERSION=} != "" ]]; then
+ # Find Apache Airflow package in docker-context files
+ readarray -t install_airflow_package < <(EXTRAS="${AIRFLOW_EXTRAS_TO_INSTALL}" \
+ python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?[0-9]*.{whl,tar.gz} 2>/dev/null || true)
+ echo
+ echo "${COLOR_BLUE}Found airflow packages in docker-context-files folder: ${install_airflow_package[*]}${COLOR_RESET}"
+ echo
+
+ if [[ -z "${install_airflow_package[*]}" && ${AIRFLOW_VERSION=} != "" ]]; then
# When we install only provider packages from docker-context files, we need to still
# install airflow from PyPI when AIRFLOW_VERSION is set. This handles the case where
# pre-release dockerhub image of airflow is built, but we want to install some providers from
# docker-context files
- reinstalling_apache_airflow_package="apache-airflow[${AIRFLOW_EXTRAS}]==${AIRFLOW_VERSION}"
- fi
- # Find Apache Airflow packages in docker-context files
- local reinstalling_apache_airflow_providers_packages
- reinstalling_apache_airflow_providers_packages=$(ls \
- /docker-context-files/apache?airflow?providers*.{whl,tar.gz} 2>/dev/null || true)
- if [[ -z "${reinstalling_apache_airflow_package}" && \
- -z "${reinstalling_apache_airflow_providers_packages}" ]]; then
- return
+ install_airflow_package=("apache-airflow[${AIRFLOW_EXTRAS}]==${AIRFLOW_VERSION}")
fi
+ # Find Provider packages in docker-context files
+ readarray -t installing_providers_packages< <(python /scripts/docker/get_package_specs.py /docker-context-files/apache?airflow?providers*.{whl,tar.gz} 2>/dev/null || true)
+ echo
+ echo "${COLOR_BLUE}Found provider packages in docker-context-files folder: ${installing_providers_packages[*]}${COLOR_RESET}"
+ echo
+
if [[ ${USE_CONSTRAINTS_FOR_CONTEXT_PACKAGES=} == "true" ]]; then
local python_version
python_version=$(python -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")')
@@ -90,19 +82,19 @@ function install_airflow_and_providers_from_docker_context_files(){
echo
# force reinstall all airflow + provider packages with constraints found in
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" --upgrade \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} --upgrade \
${ADDITIONAL_PIP_INSTALL_FLAGS} --constraint "${local_constraints_file}" \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
else
echo
echo "${COLOR_BLUE}Installing docker-context-files packages with constraints from GitHub${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
--constraint "${HOME}/constraints.txt" \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
fi
else
@@ -110,9 +102,9 @@ function install_airflow_and_providers_from_docker_context_files(){
echo "${COLOR_BLUE}Installing docker-context-files packages without constraints${COLOR_RESET}"
echo
set -x
- ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} "${packaging_flags[@]}" \
+ ${PACKAGING_TOOL_CMD} install ${EXTRA_INSTALL_FLAGS} \
${ADDITIONAL_PIP_INSTALL_FLAGS} \
- ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages}
+ "${install_airflow_package[@]}" "${installing_providers_packages[@]}"
set +x
fi
common::install_packaging_tools
@@ -143,7 +135,6 @@ function install_all_other_packages_from_docker_context_files() {
common::get_colors
common::get_packaging_tool
common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::get_constraints_location
common::show_packaging_tool_version_and_location
diff --git a/scripts/docker/install_packaging_tools.sh b/scripts/docker/install_packaging_tools.sh
index 788db69221051..63a69ad4ef239 100644
--- a/scripts/docker/install_packaging_tools.sh
+++ b/scripts/docker/install_packaging_tools.sh
@@ -21,8 +21,5 @@
common::get_colors
common::get_packaging_tool
-common::get_airflow_version_specification
-common::override_pip_version_if_needed
common::show_packaging_tool_version_and_location
-
common::install_packaging_tools
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index 4e3cce5114bfc..7557d2165a149 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -71,10 +71,15 @@ function in_container_get_packaging_tool() {
echo
export PACKAGING_TOOL=""
export PACKAGING_TOOL_CMD="uv pip"
- export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
- export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
- export RESOLUTION_HIGHEST_FLAG="--resolution highest"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--resolution lowest-direct"
+ if [[ -z ${VIRTUAL_ENV=} ]]; then
+ export EXTRA_INSTALL_FLAGS="--python ${PYTHON_BIN}"
+ export EXTRA_UNINSTALL_FLAGS="--python ${PYTHON_BIN}"
+ else
+ export EXTRA_INSTALL_FLAGS=""
+ export EXTRA_UNINSTALL_FLAGS=""
+ fi
+ export UPGRADE_EAGERLY="--upgrade --resolution highest"
+ export UPGRADE_IF_NEEDED="--upgrade --resolution lowest-direct"
else
echo
echo "${COLOR_BLUE}Using 'pip' to install Airflow${COLOR_RESET}"
@@ -83,8 +88,8 @@ function in_container_get_packaging_tool() {
export PACKAGING_TOOL_CMD="pip"
export EXTRA_INSTALL_FLAGS="--root-user-action ignore"
export EXTRA_UNINSTALL_FLAGS="--yes"
- export RESOLUTION_HIGHEST_FLAG="--upgrade-strategy eager"
- export RESOLUTION_LOWEST_DIRECT_FLAG="--upgrade --upgrade-strategy only-if-needed"
+ export UPGRADE_EAGERLY="--upgrade-strategy eager"
+ export UPGRADE_IF_NEEDED="--upgrade --upgrade-strategy only-if-needed"
fi
}