| # syntax=docker/dockerfile:1.4 |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| # WARNING: THIS DOCKERFILE IS NOT INTENDED FOR PRODUCTION USE OR DEPLOYMENT. |
| # |
| ARG PYTHON_BASE_IMAGE="python:3.7-slim-bullseye" |
| |
| ############################################################################################## |
| # This is the script image where we keep all inlined bash scripts needed in other segments |
| # We use PYTHON_BASE_IMAGE to make sure that the scripts are different for different platforms. |
| ############################################################################################## |
| FROM ${PYTHON_BASE_IMAGE} as scripts |
| |
| ############################################################################################## |
| # Please DO NOT modify the inlined scripts manually. The content of those files will be |
| # replaced by pre-commit automatically from the "scripts/docker/" folder. |
| # This is done in order to avoid problems with caching and file permissions and in order to |
| # make the PROD Dockerfile standalone |
| ############################################################################################## |
| |
| # The content below is automatically copied from scripts/docker/determine_debian_version_specific_variables.sh |
| COPY <<"EOF" /determine_debian_version_specific_variables.sh |
| function determine_debian_version_specific_variables() { |
| local color_red |
| color_red=$'\e[31m' |
| local color_reset |
| color_reset=$'\e[0m' |
| |
| local debian_version |
| debian_version=$(lsb_release -cs) |
| if [[ ${debian_version} == "buster" ]]; then |
| export DISTRO_LIBENCHANT="libenchant-dev" |
| export DISTRO_LIBGCC="libgcc-8-dev" |
| export DISTRO_SELINUX="python-selinux" |
| export DISTRO_LIBFFI="libffi6" |
| # Note missing man directories on debian-buster |
| # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=863199 |
| mkdir -pv /usr/share/man/man1 |
| mkdir -pv /usr/share/man/man7 |
| elif [[ ${debian_version} == "bullseye" ]]; then |
| export DISTRO_LIBENCHANT="libenchant-2-2" |
| export DISTRO_LIBGCC="libgcc-10-dev" |
| export DISTRO_SELINUX="python3-selinux" |
| export DISTRO_LIBFFI="libffi7" |
| else |
| echo |
| echo "${color_red}Unknown distro version ${debian_version}${color_reset}" |
| echo |
| exit 1 |
| fi |
| } |
| |
| determine_debian_version_specific_variables |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_mysql.sh |
| COPY <<"EOF" /install_mysql.sh |
| set -euo pipefail |
| declare -a packages |
| |
| MYSQL_VERSION="8.0" |
| readonly MYSQL_VERSION |
| |
| COLOR_BLUE=$'\e[34m' |
| readonly COLOR_BLUE |
| COLOR_RESET=$'\e[0m' |
| readonly COLOR_RESET |
| |
| : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" |
| |
| install_mysql_client() { |
| if [[ "${1}" == "dev" ]]; then |
| packages=("libmysqlclient-dev" "mysql-client") |
| elif [[ "${1}" == "prod" ]]; then |
| packages=("libmysqlclient21" "mysql-client") |
| else |
| echo |
| echo "Specify either prod or dev" |
| echo |
| exit 1 |
| fi |
| |
| echo |
| echo "${COLOR_BLUE}Installing mysql client version ${MYSQL_VERSION}: ${1}${COLOR_RESET}" |
| echo |
| |
| local key="467B942D3A79BD29" |
| readonly key |
| |
| GNUPGHOME="$(mktemp -d)" |
| export GNUPGHOME |
| set +e |
| for keyserver in $(shuf -e ha.pool.sks-keyservers.net hkp://p80.pool.sks-keyservers.net:80 \ |
| keyserver.ubuntu.com hkp://keyserver.ubuntu.com:80) |
| do |
| gpg --keyserver "${keyserver}" --recv-keys "${key}" 2>&1 && break |
| done |
| set -e |
| gpg --export "${key}" > /etc/apt/trusted.gpg.d/mysql.gpg |
| gpgconf --kill all |
| rm -rf "${GNUPGHOME}" |
| unset GNUPGHOME |
| echo "deb http://repo.mysql.com/apt/debian/ $(lsb_release -cs) mysql-${MYSQL_VERSION}" > /etc/apt/sources.list.d/mysql.list |
| apt-get update |
| apt-get install --no-install-recommends -y "${packages[@]}" |
| apt-get autoremove -yqq --purge |
| apt-get clean && rm -rf /var/lib/apt/lists/* |
| } |
| |
| if [[ $(uname -m) == "arm64" || $(uname -m) == "aarch64" ]]; then |
| # disable MYSQL for ARM64 |
| INSTALL_MYSQL_CLIENT="false" |
| fi |
| |
| if [[ ${INSTALL_MYSQL_CLIENT:="true"} == "true" ]]; then |
| install_mysql_client "${@}" |
| fi |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_mssql.sh |
| COPY <<"EOF" /install_mssql.sh |
| set -euo pipefail |
| |
| : "${INSTALL_MSSQL_CLIENT:?Should be true or false}" |
| |
| COLOR_BLUE=$'\e[34m' |
| readonly COLOR_BLUE |
| COLOR_YELLOW=$'\e[33m' |
| readonly COLOR_YELLOW |
| COLOR_RESET=$'\e[0m' |
| readonly COLOR_RESET |
| |
| function install_mssql_client() { |
| # Install MsSQL client from Microsoft repositories |
| if [[ ${INSTALL_MSSQL_CLIENT:="true"} != "true" ]]; then |
| echo |
| echo "${COLOR_BLUE}Skip installing mssql client${COLOR_RESET}" |
| echo |
| return |
| fi |
| echo |
| echo "${COLOR_BLUE}Installing mssql client${COLOR_RESET}" |
| echo |
| local distro |
| local version |
| distro=$(lsb_release -is | tr '[:upper:]' '[:lower:]') |
| version_name=$(lsb_release -cs | tr '[:upper:]' '[:lower:]') |
| version=$(lsb_release -rs) |
| local driver |
| if [[ ${version_name} == "buster" ]]; then |
| driver=msodbcsql17 |
| elif [[ ${version_name} == "bullseye" ]]; then |
| driver=msodbcsql18 |
| else |
| echo |
| echo "${COLOR_YELLOW}Only Buster or Bullseye are supported. Skipping MSSQL installation${COLOR_RESET}" |
| echo |
| return |
| fi |
| curl --silent https://packages.microsoft.com/keys/microsoft.asc | apt-key add - >/dev/null 2>&1 |
| curl --silent "https://packages.microsoft.com/config/${distro}/${version}/prod.list" > \ |
| /etc/apt/sources.list.d/mssql-release.list |
| apt-get update -yqq |
| apt-get upgrade -yqq |
| ACCEPT_EULA=Y apt-get -yqq install -y --no-install-recommends "${driver}" |
| rm -rf /var/lib/apt/lists/* |
| apt-get autoremove -yqq --purge |
| apt-get clean && rm -rf /var/lib/apt/lists/* |
| } |
| |
| if [[ $(uname -m) == "arm64" || $(uname -m) == "aarch64" ]]; then |
| # disable MSSQL for ARM64 |
| INSTALL_MSSQL_CLIENT="false" |
| fi |
| |
| install_mssql_client "${@}" |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_postgres.sh |
| COPY <<"EOF" /install_postgres.sh |
| set -euo pipefail |
| declare -a packages |
| |
| COLOR_BLUE=$'\e[34m' |
| readonly COLOR_BLUE |
| COLOR_RESET=$'\e[0m' |
| readonly COLOR_RESET |
| |
| : "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" |
| |
| install_postgres_client() { |
| echo |
| echo "${COLOR_BLUE}Installing postgres client${COLOR_RESET}" |
| echo |
| |
| if [[ "${1}" == "dev" ]]; then |
| packages=("libpq-dev" "postgresql-client") |
| elif [[ "${1}" == "prod" ]]; then |
| packages=("postgresql-client") |
| else |
| echo |
| echo "Specify either prod or dev" |
| echo |
| exit 1 |
| fi |
| |
| curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - |
| echo "deb https://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list |
| apt-get update |
| apt-get install --no-install-recommends -y "${packages[@]}" |
| apt-get autoremove -yqq --purge |
| apt-get clean && rm -rf /var/lib/apt/lists/* |
| } |
| |
| if [[ ${INSTALL_POSTGRES_CLIENT:="true"} == "true" ]]; then |
| install_postgres_client "${@}" |
| fi |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_pip_version.sh |
| COPY <<"EOF" /install_pip_version.sh |
| . "$( dirname "${BASH_SOURCE[0]}" )/common.sh" |
| |
| : "${AIRFLOW_PIP_VERSION:?Should be set}" |
| |
| function install_pip_version() { |
| echo |
| echo "${COLOR_BLUE}Installing pip version ${AIRFLOW_PIP_VERSION}${COLOR_RESET}" |
| echo |
| pip install --disable-pip-version-check --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}" && |
| mkdir -p ${HOME}/.local/bin |
| } |
| |
| common::get_colors |
| common::get_airflow_version_specification |
| common::override_pip_version_if_needed |
| common::show_pip_version_and_location |
| |
| install_pip_version |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_airflow_dependencies_from_branch_tip.sh |
| COPY <<"EOF" /install_airflow_dependencies_from_branch_tip.sh |
| |
| . "$( dirname "${BASH_SOURCE[0]}" )/common.sh" |
| |
| : "${AIRFLOW_REPO:?Should be set}" |
| : "${AIRFLOW_BRANCH:?Should be set}" |
| : "${INSTALL_MYSQL_CLIENT:?Should be true or false}" |
| : "${INSTALL_POSTGRES_CLIENT:?Should be true or false}" |
| : "${AIRFLOW_PIP_VERSION:?Should be set}" |
| |
| function install_airflow_dependencies_from_branch_tip() { |
| echo |
| echo "${COLOR_BLUE}Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies${COLOR_RESET}" |
| echo |
| if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then |
| AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} |
| fi |
| if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then |
| AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} |
| fi |
| # Install latest set of dependencies using constraints. In case constraints were upgraded and there |
| # are conflicts, this might fail, but it should be fixed in the following installation steps |
| set -x |
| pip install --root-user-action ignore \ |
| "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \ |
| --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" || true |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| pip freeze | grep apache-airflow-providers | xargs pip uninstall --yes 2>/dev/null || true |
| set +x |
| echo |
| echo "${COLOR_BLUE}Uninstalling just airflow. Dependencies remain. Now target airflow can be reinstalled using mostly cached dependencies${COLOR_RESET}" |
| echo |
| pip uninstall --yes apache-airflow || true |
| } |
| |
| common::get_colors |
| common::get_airflow_version_specification |
| common::override_pip_version_if_needed |
| common::get_constraints_location |
| common::show_pip_version_and_location |
| |
| install_airflow_dependencies_from_branch_tip |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/common.sh |
| COPY <<"EOF" /common.sh |
| set -euo pipefail |
| |
| function common::get_colors() { |
| COLOR_BLUE=$'\e[34m' |
| COLOR_GREEN=$'\e[32m' |
| COLOR_RED=$'\e[31m' |
| COLOR_RESET=$'\e[0m' |
| COLOR_YELLOW=$'\e[33m' |
| export COLOR_BLUE |
| export COLOR_GREEN |
| export COLOR_RED |
| export COLOR_RESET |
| export COLOR_YELLOW |
| } |
| |
| |
| function common::get_airflow_version_specification() { |
| if [[ -z ${AIRFLOW_VERSION_SPECIFICATION=} |
| && -n ${AIRFLOW_VERSION} |
| && ${AIRFLOW_INSTALLATION_METHOD} != "." ]]; then |
| AIRFLOW_VERSION_SPECIFICATION="==${AIRFLOW_VERSION}" |
| fi |
| } |
| |
| function common::override_pip_version_if_needed() { |
| if [[ -n ${AIRFLOW_VERSION} ]]; then |
| if [[ ${AIRFLOW_VERSION} =~ ^2\.0.* || ${AIRFLOW_VERSION} =~ ^1\.* ]]; then |
| export AIRFLOW_PIP_VERSION="22.1.2" |
| fi |
| fi |
| } |
| |
| function common::get_constraints_location() { |
| # auto-detect Airflow-constraint reference and location |
| if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE=}" ]]; then |
| if [[ ${AIRFLOW_VERSION} =~ v?2.* && ! ${AIRFLOW_VERSION} =~ .*dev.* ]]; then |
| AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION} |
| else |
| AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH} |
| fi |
| fi |
| |
| if [[ -z ${AIRFLOW_CONSTRAINTS_LOCATION=} ]]; then |
| local constraints_base="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}" |
| local python_version |
| python_version="$(python --version 2>/dev/stdout | cut -d " " -f 2 | cut -d "." -f 1-2)" |
| AIRFLOW_CONSTRAINTS_LOCATION="${constraints_base}/${AIRFLOW_CONSTRAINTS_MODE}-${python_version}.txt" |
| fi |
| } |
| |
| function common::show_pip_version_and_location() { |
| echo "PATH=${PATH}" |
| echo "pip on path: $(which pip)" |
| echo "Using pip: $(pip --version)" |
| } |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_pipx_tools.sh |
| COPY <<"EOF" /install_pipx_tools.sh |
| . "$( dirname "${BASH_SOURCE[0]}" )/common.sh" |
| |
| function install_pipx_tools() { |
| echo |
| echo "${COLOR_BLUE}Installing pipx tools${COLOR_RESET}" |
| echo |
| # Make sure PIPX is installed in latest version |
| pip install --root-user-action ignore --upgrade pipx |
| if [[ $(uname -m) != "aarch64" ]]; then |
| # Do not install mssql-cli for ARM |
| # Install all the tools we need available in command line but without impacting the current environment |
| pipx install mssql-cli |
| |
| # Unfortunately mssql-cli installed by `pipx` does not work out of the box because it uses |
| # its own execution bash script which is not compliant with the auto-activation of |
| # pipx venvs - we need to manually patch Python executable in the script to fix it: ¯\_(ツ)_/¯ |
| sed "s/python /\/root\/\.local\/pipx\/venvs\/mssql-cli\/bin\/python /" -i /root/.local/bin/mssql-cli |
| fi |
| } |
| |
| common::get_colors |
| |
| install_pipx_tools |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/prepare_node_modules.sh |
| COPY <<"EOF" /prepare_node_modules.sh |
| set -euo pipefail |
| |
| COLOR_BLUE=$'\e[34m' |
| readonly COLOR_BLUE |
| COLOR_RESET=$'\e[0m' |
| readonly COLOR_RESET |
| |
| function prepare_node_modules() { |
| echo |
| echo "${COLOR_BLUE}Preparing node modules${COLOR_RESET}" |
| echo |
| local www_dir |
| if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." ]]; then |
| # In case we are building from sources in production image, we should build the assets |
| www_dir="${AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES}}/airflow/www" |
| else |
| www_dir="$(python -m site --user-site)/airflow/www" |
| fi |
| pushd ${www_dir} || exit 1 |
| set +e |
| yarn install --frozen-lockfile --no-cache 2>/tmp/out-yarn-install.txt |
| local res=$? |
| if [[ ${res} != 0 ]]; then |
| >&2 echo |
| >&2 echo "Error when running yarn install:" |
| >&2 echo |
| >&2 cat /tmp/out-yarn-install.txt && rm -f /tmp/out-yarn-install.txt |
| exit 1 |
| fi |
| rm -f /tmp/out-yarn-install.txt |
| popd || exit 1 |
| } |
| |
| prepare_node_modules |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/compile_www_assets.sh |
| COPY <<"EOF" /compile_www_assets.sh |
| set -euo pipefail |
| |
| BUILD_TYPE=${BUILD_TYPE="prod"} |
| REMOVE_ARTIFACTS=${REMOVE_ARTIFACTS="true"} |
| |
| COLOR_BLUE=$'\e[34m' |
| readonly COLOR_BLUE |
| COLOR_RESET=$'\e[0m' |
| readonly COLOR_RESET |
| |
| function compile_www_assets() { |
| echo |
| echo "${COLOR_BLUE}Compiling www assets: running yarn ${BUILD_TYPE}${COLOR_RESET}" |
| echo |
| local www_dir |
| if [[ ${AIRFLOW_INSTALLATION_METHOD=} == "." ]]; then |
| # In case we are building from sources in production image, we should build the assets |
| www_dir="${AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES}}/airflow/www" |
| else |
| www_dir="$(python -m site --user-site)/airflow/www" |
| fi |
| pushd ${www_dir} || exit 1 |
| set +e |
| yarn run "${BUILD_TYPE}" 2>/tmp/out-yarn-run.txt |
| res=$? |
| if [[ ${res} != 0 ]]; then |
| >&2 echo |
| >&2 echo "Error when running yarn run:" |
| >&2 echo |
| >&2 cat /tmp/out-yarn-run.txt && rm -rf /tmp/out-yarn-run.txt |
| exit 1 |
| fi |
| rm -f /tmp/out-yarn-run.txt |
| set -e |
| local md5sum_file |
| md5sum_file="static/dist/sum.md5" |
| readonly md5sum_file |
| find package.json yarn.lock static/css static/js -type f | sort | xargs md5sum > "${md5sum_file}" |
| if [[ ${REMOVE_ARTIFACTS} == "true" ]]; then |
| echo |
| echo "${COLOR_BLUE}Removing generated node modules${COLOR_RESET}" |
| echo |
| rm -rf "${www_dir}/node_modules" |
| rm -vf "${www_dir}"/{package.json,yarn.lock,.eslintignore,.eslintrc,.stylelintignore,.stylelintrc,compile_assets.sh,webpack.config.js} |
| else |
| echo |
| echo "${COLOR_BLUE}Leaving generated node modules${COLOR_RESET}" |
| echo |
| fi |
| popd || exit 1 |
| } |
| |
| compile_www_assets |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_airflow.sh |
| COPY <<"EOF" /install_airflow.sh |
| |
| . "$( dirname "${BASH_SOURCE[0]}" )/common.sh" |
| |
| : "${AIRFLOW_PIP_VERSION:?Should be set}" |
| |
| function install_airflow() { |
| # Coherence check for editable installation mode. |
| if [[ ${AIRFLOW_INSTALLATION_METHOD} != "." && \ |
| ${AIRFLOW_INSTALL_EDITABLE_FLAG} == "--editable" ]]; then |
| echo |
| echo "${COLOR_RED}ERROR! You can only use --editable flag when installing airflow from sources!${COLOR_RESET}" |
| echo "${COLOR_RED} Current installation method is '${AIRFLOW_INSTALLATION_METHOD} and should be '.'${COLOR_RESET}" |
| exit 1 |
| fi |
| # Remove mysql from extras if client is not going to be installed |
| if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then |
| AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} |
| echo "${COLOR_YELLOW}MYSQL client installation is disabled. Extra 'mysql' installations were therefore omitted.${COLOR_RESET}" |
| fi |
| # Remove postgres from extras if client is not going to be installed |
| if [[ ${INSTALL_POSTGRES_CLIENT} != "true" ]]; then |
| AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/postgres,} |
| echo "${COLOR_YELLOW}Postgres client installation is disabled. Extra 'postgres' installations were therefore omitted.${COLOR_RESET}" |
| fi |
| if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" != "false" ]]; then |
| echo |
| echo "${COLOR_BLUE}Installing all packages with eager upgrade${COLOR_RESET}" |
| echo |
| # eager upgrade |
| pip install --root-user-action ignore --upgrade --upgrade-strategy eager \ |
| "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \ |
| ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} |
| if [[ -n "${AIRFLOW_INSTALL_EDITABLE_FLAG}" ]]; then |
| # Remove airflow and reinstall it using editable flag |
| # We can only do it when we install airflow from sources |
| set -x |
| pip uninstall apache-airflow --yes |
| pip install --root-user-action ignore ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ |
| "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" |
| set +x |
| fi |
| |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| echo |
| echo "${COLOR_BLUE}Running 'pip check'${COLOR_RESET}" |
| echo |
| pip check |
| else \ |
| echo |
| echo "${COLOR_BLUE}Installing all packages with constraints and upgrade if needed${COLOR_RESET}" |
| echo |
| set -x |
| pip install --root-user-action ignore ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ |
| "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \ |
| --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| # then upgrade if needed without using constraints to account for new limits in setup.py |
| pip install --root-user-action ignore --upgrade --upgrade-strategy only-if-needed \ |
| ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ |
| "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| set +x |
| echo |
| echo "${COLOR_BLUE}Running 'pip check'${COLOR_RESET}" |
| echo |
| pip check |
| fi |
| |
| } |
| |
| common::get_colors |
| common::get_airflow_version_specification |
| common::override_pip_version_if_needed |
| common::get_constraints_location |
| common::show_pip_version_and_location |
| |
| install_airflow |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/install_additional_dependencies.sh |
| COPY <<"EOF" /install_additional_dependencies.sh |
| set -euo pipefail |
| |
| : "${UPGRADE_TO_NEWER_DEPENDENCIES:?Should be true or false}" |
| : "${ADDITIONAL_PYTHON_DEPS:?Should be set}" |
| : "${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS:?Should be set}" |
| : "${AIRFLOW_PIP_VERSION:?Should be set}" |
| |
| . "$( dirname "${BASH_SOURCE[0]}" )/common.sh" |
| |
| function install_additional_dependencies() { |
| if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" != "false" ]]; then |
| echo |
| echo "${COLOR_BLUE}Installing additional dependencies while upgrading to newer dependencies${COLOR_RESET}" |
| echo |
| set -x |
| pip install --root-user-action ignore --upgrade --upgrade-strategy eager \ |
| ${ADDITIONAL_PYTHON_DEPS} ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| set +x |
| echo |
| echo "${COLOR_BLUE}Running 'pip check'${COLOR_RESET}" |
| echo |
| pip check |
| else |
| echo |
| echo "${COLOR_BLUE}Installing additional dependencies upgrading only if needed${COLOR_RESET}" |
| echo |
| set -x |
| pip install --root-user-action ignore --upgrade --upgrade-strategy only-if-needed \ |
| ${ADDITIONAL_PYTHON_DEPS} |
| # make sure correct PIP version is used |
| pip install --disable-pip-version-check "pip==${AIRFLOW_PIP_VERSION}" 2>/dev/null |
| set +x |
| echo |
| echo "${COLOR_BLUE}Running 'pip check'${COLOR_RESET}" |
| echo |
| pip check |
| fi |
| } |
| |
| common::get_colors |
| common::get_airflow_version_specification |
| common::override_pip_version_if_needed |
| common::get_constraints_location |
| common::show_pip_version_and_location |
| |
| install_additional_dependencies |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/entrypoint_ci.sh |
| COPY <<"EOF" /entrypoint_ci.sh |
| #!/usr/bin/env bash |
| if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then |
| set -x |
| fi |
| |
| . /opt/airflow/scripts/in_container/_in_container_script_init.sh |
| |
| LD_PRELOAD="/usr/lib/$(uname -m)-linux-gnu/libstdc++.so.6" |
| export LD_PRELOAD |
| |
| chmod 1777 /tmp |
| |
| AIRFLOW_SOURCES=$(cd "${IN_CONTAINER_DIR}/../.." || exit 1; pwd) |
| |
| PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION:=3.7} |
| |
| export AIRFLOW_HOME=${AIRFLOW_HOME:=${HOME}} |
| |
| : "${AIRFLOW_SOURCES:?"ERROR: AIRFLOW_SOURCES not set !!!!"}" |
| |
| if [[ ${SKIP_ENVIRONMENT_INITIALIZATION=} != "true" ]]; then |
| |
| if [[ $(uname -m) == "arm64" || $(uname -m) == "aarch64" ]]; then |
| if [[ ${BACKEND:=} == "mysql" || ${BACKEND} == "mssql" ]]; then |
| echo "${COLOR_RED}ARM platform is not supported for ${BACKEND} backend. Exiting.${COLOR_RESET}" |
| exit 1 |
| fi |
| fi |
| |
| echo |
| echo "${COLOR_BLUE}Running Initialization. Your basic configuration is:${COLOR_RESET}" |
| echo |
| echo " * ${COLOR_BLUE}Airflow home:${COLOR_RESET} ${AIRFLOW_HOME}" |
| echo " * ${COLOR_BLUE}Airflow sources:${COLOR_RESET} ${AIRFLOW_SOURCES}" |
| echo " * ${COLOR_BLUE}Airflow core SQL connection:${COLOR_RESET} ${AIRFLOW__CORE__SQL_ALCHEMY_CONN:=}" |
| echo |
| |
| RUN_TESTS=${RUN_TESTS:="false"} |
| CI=${CI:="false"} |
| USE_AIRFLOW_VERSION="${USE_AIRFLOW_VERSION:=""}" |
| |
| if [[ ${USE_AIRFLOW_VERSION} == "" ]]; then |
| export PYTHONPATH=${AIRFLOW_SOURCES} |
| echo |
| echo "${COLOR_BLUE}Using airflow version from current sources${COLOR_RESET}" |
| echo |
| if [[ -d "${AIRFLOW_SOURCES}/airflow/www/" ]]; then |
| pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null |
| ./ask_for_recompile_assets_if_needed.sh |
| popd >/dev/null |
| fi |
| # Cleanup the logs, tmp when entering the environment |
| sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* |
| sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* |
| mkdir -p "${AIRFLOW_SOURCES}"/logs/ |
| mkdir -p "${AIRFLOW_SOURCES}"/tmp/ |
| elif [[ ${USE_AIRFLOW_VERSION} == "none" ]]; then |
| echo |
| echo "${COLOR_BLUE}Skip installing airflow - only install wheel/tar.gz packages that are present locally.${COLOR_RESET}" |
| echo |
| echo |
| echo "${COLOR_BLUE}Uninstalling airflow and providers" |
| echo |
| uninstall_airflow_and_providers |
| elif [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then |
| echo |
| echo "${COLOR_BLUE}Uninstalling airflow and providers" |
| echo |
| uninstall_airflow_and_providers |
| echo "${COLOR_BLUE}Install airflow from wheel package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" |
| echo |
| install_airflow_from_wheel "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" |
| uninstall_providers |
| elif [[ ${USE_AIRFLOW_VERSION} == "sdist" ]]; then |
| echo |
| echo "${COLOR_BLUE}Uninstalling airflow and providers" |
| echo |
| uninstall_airflow_and_providers |
| echo |
| echo "${COLOR_BLUE}Install airflow from sdist package with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" |
| echo |
| install_airflow_from_sdist "${AIRFLOW_EXTRAS}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" |
| uninstall_providers |
| else |
| echo |
| echo "${COLOR_BLUE}Uninstalling airflow and providers" |
| echo |
| uninstall_airflow_and_providers |
| echo |
| echo "${COLOR_BLUE}Install released airflow from PyPI with extras: '${AIRFLOW_EXTRAS}' and constraints reference ${AIRFLOW_CONSTRAINTS_REFERENCE}.${COLOR_RESET}" |
| echo |
| install_released_airflow_version "${USE_AIRFLOW_VERSION}" "${AIRFLOW_CONSTRAINTS_REFERENCE}" |
| fi |
| if [[ ${USE_PACKAGES_FROM_DIST=} == "true" ]]; then |
| echo |
| echo "${COLOR_BLUE}Install all packages from dist folder${COLOR_RESET}" |
| if [[ ${USE_AIRFLOW_VERSION} == "wheel" ]]; then |
| echo "${COLOR_BLUE}(except apache-airflow)${COLOR_RESET}" |
| fi |
| if [[ ${PACKAGE_FORMAT} == "both" ]]; then |
| echo |
| echo "${COLOR_RED}ERROR:You can only specify 'wheel' or 'sdist' as PACKAGE_FORMAT not 'both'.${COLOR_RESET}" |
| echo |
| exit 1 |
| fi |
| echo |
| installable_files=() |
| for file in /dist/*.{whl,tar.gz} |
| do |
| if [[ ${USE_AIRFLOW_VERSION} == "wheel" && ${file} == "/dist/apache?airflow-[0-9]"* ]]; then |
| # Skip Apache Airflow package - it's just been installed above with extras |
| echo "Skipping ${file}" |
| continue |
| fi |
| if [[ ${PACKAGE_FORMAT} == "wheel" && ${file} == *".whl" ]]; then |
| echo "Adding ${file} to install" |
| installable_files+=( "${file}" ) |
| fi |
| if [[ ${PACKAGE_FORMAT} == "sdist" && ${file} == *".tar.gz" ]]; then |
| echo "Adding ${file} to install" |
| installable_files+=( "${file}" ) |
| fi |
| done |
| if (( ${#installable_files[@]} )); then |
| pip install --root-user-action ignore "${installable_files[@]}" |
| fi |
| fi |
| |
| # Added to have run-tests on path |
| export PATH=${PATH}:${AIRFLOW_SOURCES} |
| |
| # This is now set in conftest.py - only for pytest tests |
| unset AIRFLOW__CORE__UNIT_TEST_MODE |
| |
| mkdir -pv "${AIRFLOW_HOME}/logs/" |
| cp -f "${IN_CONTAINER_DIR}/airflow_ci.cfg" "${AIRFLOW_HOME}/unittests.cfg" |
| |
| # Change the default worker_concurrency for tests |
| export AIRFLOW__CELERY__WORKER_CONCURRENCY=8 |
| |
| set +e |
| |
| "${IN_CONTAINER_DIR}/check_environment.sh" |
| ENVIRONMENT_EXIT_CODE=$? |
| set -e |
| if [[ ${ENVIRONMENT_EXIT_CODE} != 0 ]]; then |
| echo |
| echo "Error: check_environment returned ${ENVIRONMENT_EXIT_CODE}. Exiting." |
| echo |
| exit ${ENVIRONMENT_EXIT_CODE} |
| fi |
| # Create symbolic link to fix possible issues with kubectl config cmd-path |
| mkdir -p /usr/lib/google-cloud-sdk/bin |
| touch /usr/lib/google-cloud-sdk/bin/gcloud |
| ln -s -f /usr/bin/gcloud /usr/lib/google-cloud-sdk/bin/gcloud |
| |
| if [[ ${SKIP_SSH_SETUP="false"} == "false" ]]; then |
| # Set up ssh keys |
| echo 'yes' | ssh-keygen -t rsa -C your_email@youremail.com -m PEM -P '' -f ~/.ssh/id_rsa \ |
| >"${AIRFLOW_HOME}/logs/ssh-keygen.log" 2>&1 |
| |
| cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys |
| ln -s -f ~/.ssh/authorized_keys ~/.ssh/authorized_keys2 |
| chmod 600 ~/.ssh/* |
| |
| # SSH Service |
| sudo service ssh restart >/dev/null 2>&1 |
| |
| # Sometimes the server is not quick enough to load the keys! |
| while [[ $(ssh-keyscan -H localhost 2>/dev/null | wc -l) != "3" ]] ; do |
| echo "Not all keys yet loaded by the server" |
| sleep 0.05 |
| done |
| |
| ssh-keyscan -H localhost >> ~/.ssh/known_hosts 2>/dev/null |
| fi |
| |
| # shellcheck source=scripts/in_container/configure_environment.sh |
| . "${IN_CONTAINER_DIR}/configure_environment.sh" |
| |
| # shellcheck source=scripts/in_container/run_init_script.sh |
| . "${IN_CONTAINER_DIR}/run_init_script.sh" |
| |
| cd "${AIRFLOW_SOURCES}" |
| |
| if [[ ${START_AIRFLOW:="false"} == "true" || ${START_AIRFLOW} == "True" ]]; then |
| export AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS=${LOAD_DEFAULT_CONNECTIONS} |
| export AIRFLOW__CORE__LOAD_EXAMPLES=${LOAD_EXAMPLES} |
| # shellcheck source=scripts/in_container/bin/run_tmux |
| exec run_tmux |
| fi |
| fi |
| |
| set +u |
| if [[ "${RUN_TESTS}" != "true" ]]; then |
| exec /bin/bash "${@}" |
| fi |
| set -u |
| |
| export RESULT_LOG_FILE="/files/test_result-${TEST_TYPE}-${BACKEND}.xml" |
| |
| EXTRA_PYTEST_ARGS=( |
| "--verbosity=0" |
| "--strict-markers" |
| "--durations=100" |
| "--maxfail=50" |
| "--color=yes" |
| "--junitxml=${RESULT_LOG_FILE}" |
| # timeouts in seconds for individual tests |
| "--timeouts-order" |
| "moi" |
| "--setup-timeout=60" |
| "--execution-timeout=60" |
| "--teardown-timeout=60" |
| # Only display summary for non-expected case |
| # f - failed |
| # E - error |
| # X - xpassed (passed even if expected to fail) |
| # The following cases are not displayed: |
| # s - skipped |
| # x - xfailed (expected to fail and failed) |
| # p - passed |
| # P - passed with output |
| "-rfEX" |
| ) |
| |
| if [[ "${TEST_TYPE}" == "Helm" ]]; then |
| # Enable parallelism |
| EXTRA_PYTEST_ARGS+=( |
| "-n" "auto" |
| ) |
| else |
| EXTRA_PYTEST_ARGS+=( |
| "--with-db-init" |
| ) |
| fi |
| |
| if [[ ${ENABLE_TEST_COVERAGE:="false"} == "true" ]]; then |
| EXTRA_PYTEST_ARGS+=( |
| "--cov=airflow/" |
| "--cov-config=.coveragerc" |
| "--cov-report=xml:/files/coverage-${TEST_TYPE}-${BACKEND}.xml" |
| ) |
| fi |
| |
| declare -a SELECTED_TESTS CLI_TESTS API_TESTS PROVIDERS_TESTS CORE_TESTS WWW_TESTS \ |
| ALL_TESTS ALL_PRESELECTED_TESTS ALL_OTHER_TESTS |
| |
| function find_all_other_tests() { |
| local all_tests_dirs |
| all_tests_dirs=$(find "tests" -type d) |
| all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/tests$/d" ) |
| all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/tests\/dags/d" ) |
| local path |
| for path in "${ALL_PRESELECTED_TESTS[@]}" |
| do |
| escaped_path="${path//\//\\\/}" |
| all_tests_dirs=$(echo "${all_tests_dirs}" | sed "/${escaped_path}/d" ) |
| done |
| for path in ${all_tests_dirs} |
| do |
| ALL_OTHER_TESTS+=("${path}") |
| done |
| } |
| |
| if [[ ${#@} -gt 0 && -n "$1" ]]; then |
| SELECTED_TESTS=("${@}") |
| else |
| CLI_TESTS=("tests/cli") |
| API_TESTS=("tests/api" "tests/api_connexion") |
| PROVIDERS_TESTS=("tests/providers") |
| ALWAYS_TESTS=("tests/always") |
| CORE_TESTS=( |
| "tests/core" |
| "tests/executors" |
| "tests/jobs" |
| "tests/models" |
| "tests/serialization" |
| "tests/ti_deps" |
| "tests/utils" |
| ) |
| WWW_TESTS=("tests/www") |
| HELM_CHART_TESTS=("tests/charts") |
| ALL_TESTS=("tests") |
| ALL_PRESELECTED_TESTS=( |
| "${CLI_TESTS[@]}" |
| "${API_TESTS[@]}" |
| "${HELM_CHART_TESTS[@]}" |
| "${PROVIDERS_TESTS[@]}" |
| "${CORE_TESTS[@]}" |
| "${ALWAYS_TESTS[@]}" |
| "${WWW_TESTS[@]}" |
| ) |
| |
| if [[ ${TEST_TYPE:=""} == "CLI" ]]; then |
| SELECTED_TESTS=("${CLI_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "API" ]]; then |
| SELECTED_TESTS=("${API_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "Providers" ]]; then |
| SELECTED_TESTS=("${PROVIDERS_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "Core" ]]; then |
| SELECTED_TESTS=("${CORE_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "Always" ]]; then |
| SELECTED_TESTS=("${ALWAYS_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "WWW" ]]; then |
| SELECTED_TESTS=("${WWW_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "Helm" ]]; then |
| SELECTED_TESTS=("${HELM_CHART_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "Other" ]]; then |
| find_all_other_tests |
| SELECTED_TESTS=("${ALL_OTHER_TESTS[@]}") |
| elif [[ ${TEST_TYPE:=""} == "All" || ${TEST_TYPE} == "Quarantined" || \ |
| ${TEST_TYPE} == "Always" || \ |
| ${TEST_TYPE} == "Postgres" || ${TEST_TYPE} == "MySQL" || \ |
| ${TEST_TYPE} == "Long" || \ |
| ${TEST_TYPE} == "Integration" ]]; then |
| SELECTED_TESTS=("${ALL_TESTS[@]}") |
| else |
| echo |
| echo "${COLOR_RED}ERROR: Wrong test type ${TEST_TYPE} ${COLOR_RESET}" |
| echo |
| exit 1 |
| fi |
| |
| fi |
| readonly SELECTED_TESTS CLI_TESTS API_TESTS PROVIDERS_TESTS CORE_TESTS WWW_TESTS \ |
| ALL_TESTS ALL_PRESELECTED_TESTS |
| |
| if [[ -n ${LIST_OF_INTEGRATION_TESTS_TO_RUN=} ]]; then |
| # Integration tests |
| for INT in ${LIST_OF_INTEGRATION_TESTS_TO_RUN} |
| do |
| EXTRA_PYTEST_ARGS+=("--integration" "${INT}") |
| done |
| elif [[ ${TEST_TYPE:=""} == "Long" ]]; then |
| EXTRA_PYTEST_ARGS+=( |
| "-m" "long_running" |
| "--include-long-running" |
| ) |
| elif [[ ${TEST_TYPE:=""} == "Postgres" ]]; then |
| EXTRA_PYTEST_ARGS+=( |
| "--backend" |
| "postgres" |
| ) |
| elif [[ ${TEST_TYPE:=""} == "MySQL" ]]; then |
| EXTRA_PYTEST_ARGS+=( |
| "--backend" |
| "mysql" |
| ) |
| elif [[ ${TEST_TYPE:=""} == "Quarantined" ]]; then |
| EXTRA_PYTEST_ARGS+=( |
| "-m" "quarantined" |
| "--include-quarantined" |
| ) |
| fi |
| |
| echo |
| echo "Running tests ${SELECTED_TESTS[*]}" |
| echo |
| |
| ARGS=("${EXTRA_PYTEST_ARGS[@]}" "${SELECTED_TESTS[@]}") |
| |
| if [[ ${RUN_SYSTEM_TESTS:="false"} == "true" ]]; then |
| "${IN_CONTAINER_DIR}/run_system_tests.sh" "${ARGS[@]}" |
| else |
| "${IN_CONTAINER_DIR}/run_ci_tests.sh" "${ARGS[@]}" |
| fi |
| EOF |
| |
| # The content below is automatically copied from scripts/docker/entrypoint_exec.sh |
| COPY <<"EOF" /entrypoint_exec.sh |
| #!/usr/bin/env bash |
| . /opt/airflow/scripts/in_container/_in_container_script_init.sh |
| |
| . /opt/airflow/scripts/in_container/configure_environment.sh |
| |
| . /opt/airflow/scripts/in_container/run_init_script.sh |
| |
| exec /bin/bash "${@}" |
| EOF |
| |
| ############################################################################################## |
| # This is the www image where we keep all inlined files needed to build ui |
| # It is copied separately to volume to speed up building and avoid cache miss on changed |
| # file permissions. |
| # We use PYTHON_BASE_IMAGE to make sure that the scripts are different for different platforms. |
| ############################################################################################## |
| FROM ${PYTHON_BASE_IMAGE} as www |
| COPY airflow/www/package.json airflow/www/yarn.lock airflow/www/webpack.config.js / |
| COPY airflow/www/static/ /static |
| |
| FROM ${PYTHON_BASE_IMAGE} as main |
| |
| # Nolog bash flag is currently ignored - but you can replace it with other flags (for example |
| # xtrace - to show commands executed) |
| SHELL ["/bin/bash", "-o", "pipefail", "-o", "errexit", "-o", "nounset", "-o", "nolog", "-c"] |
| |
| ARG PYTHON_BASE_IMAGE |
| ARG AIRFLOW_IMAGE_REPOSITORY="https://github.com/apache/airflow" |
| |
| # By increasing this number we can do force build of all dependencies |
| ARG DEPENDENCIES_EPOCH_NUMBER="6" |
| |
| # Make sure noninteractive debian install is used and language variables set |
| ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} \ |
| DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ |
| LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 \ |
| DEPENDENCIES_EPOCH_NUMBER=${DEPENDENCIES_EPOCH_NUMBER} \ |
| INSTALL_MYSQL_CLIENT="true" \ |
| INSTALL_MSSQL_CLIENT="true" \ |
| INSTALL_POSTGRES_CLIENT="true" |
| |
| RUN echo "Base image version: ${PYTHON_BASE_IMAGE}" |
| |
| ARG ADDITIONAL_DEV_APT_DEPS="" |
| ARG DEV_APT_COMMAND="\ |
| curl --silent --fail --location https://deb.nodesource.com/setup_14.x | bash - \ |
| && curl --silent --fail https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - >/dev/null 2>&1 \ |
| && echo 'deb https://dl.yarnpkg.com/debian/ stable main' > /etc/apt/sources.list.d/yarn.list" |
| ARG ADDITIONAL_DEV_APT_COMMAND="" |
| ARG ADDITIONAL_DEV_ENV_VARS="" |
| |
| ENV DEV_APT_COMMAND=${DEV_APT_COMMAND} \ |
| ADDITIONAL_DEV_APT_DEPS=${ADDITIONAL_DEV_APT_DEPS} \ |
| ADDITIONAL_DEV_APT_COMMAND=${ADDITIONAL_DEV_APT_COMMAND} |
| |
| COPY --from=scripts determine_debian_version_specific_variables.sh /scripts/docker/ |
| |
| # Install basic and additional apt dependencies |
| RUN apt-get update \ |
| && apt-get install --no-install-recommends -yqq apt-utils >/dev/null 2>&1 \ |
| && apt-get install -y --no-install-recommends curl gnupg2 lsb-release \ |
| && mkdir -pv /usr/share/man/man1 \ |
| && mkdir -pv /usr/share/man/man7 \ |
| && export ${ADDITIONAL_DEV_ENV_VARS?} \ |
| && source /scripts/docker/determine_debian_version_specific_variables.sh \ |
| && bash -o pipefail -o errexit -o nounset -o nolog -c "${DEV_APT_COMMAND}" \ |
| && bash -o pipefail -o errexit -o nounset -o nolog -c "${ADDITIONAL_DEV_APT_COMMAND}" \ |
| && apt-get update \ |
| && apt-get install -y --no-install-recommends \ |
| apt-utils \ |
| build-essential \ |
| dirmngr \ |
| dumb-init \ |
| freetds-bin \ |
| freetds-dev \ |
| git \ |
| graphviz \ |
| gosu \ |
| libffi-dev \ |
| libldap2-dev \ |
| libkrb5-dev \ |
| libpq-dev \ |
| libsasl2-2 \ |
| libsasl2-dev \ |
| libsasl2-modules \ |
| libssl-dev \ |
| "${DISTRO_LIBENCHANT}" \ |
| locales \ |
| netcat \ |
| nodejs \ |
| rsync \ |
| sasl2-bin \ |
| sudo \ |
| unixodbc \ |
| unixodbc-dev \ |
| yarn \ |
| ${ADDITIONAL_DEV_APT_DEPS} \ |
| && apt-get autoremove -yqq --purge \ |
| && apt-get clean \ |
| && rm -rf /var/lib/apt/lists/* |
| |
| # Only copy mysql/mssql installation scripts for now - so that changing the other |
| # scripts which are needed much later will not invalidate the docker layer here. |
| COPY --from=scripts install_mysql.sh install_mssql.sh install_postgres.sh /scripts/docker/ |
| |
| # We run scripts with bash here to make sure we can execute the scripts. Changing to +x might have an |
| # unexpected result - the cache for Dockerfiles might get invalidated in case the host system |
| # had different umask set and group x bit was not set. In Azure the bit might be not set at all. |
| # That also protects against AUFS Docker backen dproblem where changing the executable bit required sync |
| RUN bash /scripts/docker/install_mysql.sh prod \ |
| && bash /scripts/docker/install_mysql.sh dev \ |
| && bash /scripts/docker/install_mssql.sh \ |
| && bash /scripts/docker/install_postgres.sh dev \ |
| && adduser --gecos "First Last,RoomNumber,WorkPhone,HomePhone" --disabled-password \ |
| --quiet "airflow" --home "/home/airflow" \ |
| && echo -e "airflow\nairflow" | passwd airflow 2>&1 \ |
| && echo "airflow ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/airflow \ |
| && chmod 0440 /etc/sudoers.d/airflow |
| |
| ARG RUNTIME_APT_DEPS="\ |
| apt-transport-https \ |
| bash-completion \ |
| ca-certificates \ |
| software-properties-common \ |
| krb5-user \ |
| krb5-user \ |
| ldap-utils \ |
| less \ |
| lsb-release \ |
| net-tools \ |
| openssh-client \ |
| openssh-server \ |
| postgresql-client \ |
| sqlite3 \ |
| tmux \ |
| unzip \ |
| vim \ |
| xxd" |
| |
| # Install Helm |
| ARG HELM_VERSION="v3.6.3" |
| |
| RUN SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') \ |
| && PLATFORM=$([ "$(uname -m)" = "aarch64" ] && echo "arm64" || echo "amd64" ) \ |
| && HELM_URL="https://get.helm.sh/helm-${HELM_VERSION}-${SYSTEM}-${PLATFORM}.tar.gz" \ |
| && curl --silent --location "${HELM_URL}" | tar -xz -O "${SYSTEM}-${PLATFORM}/helm" > /usr/local/bin/helm \ |
| && chmod +x /usr/local/bin/helm |
| |
| ARG ADDITIONAL_RUNTIME_APT_DEPS="" |
| ARG RUNTIME_APT_COMMAND="" |
| ARG ADDITIONAL_RUNTIME_APT_COMMAND="" |
| ARG ADDITIONAL_DEV_APT_ENV="" |
| ARG ADDITIONAL_RUNTIME_APT_ENV="" |
| |
| ARG DOCKER_CLI_VERSION=19.03.9 |
| ARG HOME=/root |
| ARG AIRFLOW_HOME=/root/airflow |
| ARG AIRFLOW_SOURCES=/opt/airflow |
| |
| ENV RUNTIME_APT_DEP=${RUNTIME_APT_DEPS} \ |
| ADDITIONAL_RUNTIME_APT_DEPS=${ADDITIONAL_RUNTIME_APT_DEPS} \ |
| RUNTIME_APT_COMMAND=${RUNTIME_APT_COMMAND} \ |
| ADDITIONAL_RUNTIME_APT_COMMAND=${ADDITIONAL_RUNTIME_APT_COMMAND}\ |
| DOCKER_CLI_VERSION=${DOCKER_CLI_VERSION} \ |
| HOME=${HOME} \ |
| AIRFLOW_HOME=${AIRFLOW_HOME} \ |
| AIRFLOW_SOURCES=${AIRFLOW_SOURCES} |
| |
| RUN export ${ADDITIONAL_DEV_APT_ENV?} \ |
| && export ${ADDITIONAL_RUNTIME_APT_ENV?} \ |
| && source /scripts/docker/determine_debian_version_specific_variables.sh \ |
| && bash -o pipefail -o errexit -o nounset -o nolog -c "${RUNTIME_APT_COMMAND}" \ |
| && bash -o pipefail -o errexit -o nounset -o nolog -c "${ADDITIONAL_RUNTIME_APT_COMMAND}" \ |
| && apt-get update \ |
| && apt-get install --no-install-recommends -y \ |
| "${DISTRO_LIBGCC}" \ |
| ${RUNTIME_APT_DEPS} \ |
| ${ADDITIONAL_RUNTIME_APT_DEPS} \ |
| && apt-get autoremove -yqq --purge \ |
| && apt-get clean \ |
| && rm -rf /var/lib/apt/lists/* \ |
| && curl --silent "https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_CLI_VERSION}.tgz" \ |
| | tar -C /usr/bin --strip-components=1 -xvzf - docker/docker |
| |
| WORKDIR ${AIRFLOW_SOURCES} |
| |
| RUN mkdir -pv ${AIRFLOW_HOME} && \ |
| mkdir -pv ${AIRFLOW_HOME}/dags && \ |
| mkdir -pv ${AIRFLOW_HOME}/logs |
| |
| ARG AIRFLOW_REPO=apache/airflow |
| ARG AIRFLOW_BRANCH=main |
| # Airflow Extras installed |
| ARG AIRFLOW_EXTRAS="all" |
| ARG ADDITIONAL_AIRFLOW_EXTRAS="" |
| # Allows to override constraints source |
| ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow" |
| ARG AIRFLOW_CONSTRAINTS_MODE="constraints-source-providers" |
| ARG AIRFLOW_CONSTRAINTS_REFERENCE="" |
| ARG AIRFLOW_CONSTRAINTS_LOCATION="" |
| ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-main" |
| # By changing the epoch we can force reinstalling Airflow and pip all dependencies |
| # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. |
| ARG AIRFLOW_CI_BUILD_EPOCH="3" |
| ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true" |
| # By default in the image, we are installing all providers when installing from sources |
| ARG INSTALL_PROVIDERS_FROM_SOURCES="true" |
| ARG AIRFLOW_PIP_VERSION=22.1.2 |
| # Setup PIP |
| # By default PIP install run without cache to make image smaller |
| ARG PIP_NO_CACHE_DIR="true" |
| # By default PIP has progress bar but you can disable it. |
| ARG PIP_PROGRESS_BAR="on" |
| # Optimizing installation of Cassandra driver (in case there are no prebuilt wheels which is the |
| # case as of 20.04.2021 with Python 3.9 |
| # Speeds up building the image - cassandra driver without CYTHON saves around 10 minutes |
| ARG CASS_DRIVER_NO_CYTHON="1" |
| # Build cassandra driver on multiple CPUs |
| ARG CASS_DRIVER_BUILD_CONCURRENCY="8" |
| |
| ARG AIRFLOW_VERSION="2.3.0.dev" |
| |
| ENV AIRFLOW_REPO=${AIRFLOW_REPO}\ |
| AIRFLOW_BRANCH=${AIRFLOW_BRANCH} \ |
| AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_AIRFLOW_EXTRAS} \ |
| CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY} \ |
| AIRFLOW_CONSTRAINTS_MODE=${AIRFLOW_CONSTRAINTS_MODE} \ |
| AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE} \ |
| AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} \ |
| DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} \ |
| AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} \ |
| AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} \ |
| INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES} \ |
| AIRFLOW_VERSION=${AIRFLOW_VERSION} \ |
| AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} \ |
| # In the CI image we always: |
| # * install MySQL, MsSQL |
| # * install airflow from current sources, not from PyPI package |
| # * install airflow without `--user` flag |
| # * install airflow in editable mode |
| # * install always current version of airflow |
| INSTALL_MYSQL_CLIENT="true" \ |
| INSTALL_MSSQL_CLIENT="true" \ |
| INSTALL_POSTGRES_CLIENT="true" \ |
| AIRFLOW_INSTALLATION_METHOD="." \ |
| AIRFLOW_INSTALL_EDITABLE_FLAG="--editable" \ |
| AIRFLOW_VERSION_SPECIFICATION="" \ |
| PIP_NO_CACHE_DIR=${PIP_NO_CACHE_DIR} \ |
| PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} \ |
| CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY} \ |
| CASS_DRIVER_NO_CYTHON=${CASS_DRIVER_NO_CYTHON} |
| |
| RUN echo "Airflow version: ${AIRFLOW_VERSION}" |
| |
| # Those are additional constraints that are needed for some extras but we do not want to |
| # force them on the main Airflow package. Those limitations are: |
| # * dill<0.3.3 required by apache-beam |
| ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="dill<0.3.3" |
| ARG UPGRADE_TO_NEWER_DEPENDENCIES="false" |
| ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \ |
| UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} |
| |
| # Copy all scripts required for installation - changing any of those should lead to |
| # rebuilding from here |
| COPY --from=scripts install_pip_version.sh install_airflow_dependencies_from_branch_tip.sh \ |
| common.sh /scripts/docker/ |
| |
| # We are first creating a venv where all python packages and .so binaries needed by those are |
| # installed. |
| # In case of CI builds we want to pre-install main version of airflow dependencies so that |
| # We do not have to always reinstall it from the scratch. |
| # And is automatically reinstalled from the scratch every time patch release of python gets released |
| # The Airflow (and providers in case INSTALL_PROVIDERS_FROM_SOURCES is "false") |
| # are uninstalled, only dependencies remain. |
| # the cache is only used when "upgrade to newer dependencies" is not set to automatically |
| # account for removed dependencies (we do not install them in the first place) |
| RUN bash /scripts/docker/install_pip_version.sh; \ |
| if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \ |
| ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \ |
| bash /scripts/docker/install_airflow_dependencies_from_branch_tip.sh; \ |
| fi |
| |
| # The PATH is needed for PIPX to find the tools installed |
| ENV PATH="/root/.local/bin:${PATH}" |
| |
| COPY --from=scripts install_pipx_tools.sh /scripts/docker/ |
| |
| # Install useful command line tools in their own virtualenv so that they do not clash with |
| # dependencies installed in Airflow |
| RUN bash /scripts/docker/install_pipx_tools.sh |
| |
| # Copy package.json and yarn.lock to install node modules |
| # this way even if other static check files change, node modules will not need to be installed |
| # we want to keep node_modules so we can do this step separately from compiling assets |
| COPY --from=www package.json yarn.lock ${AIRFLOW_SOURCES}/airflow/www/ |
| COPY --from=scripts prepare_node_modules.sh /scripts/docker/ |
| |
| # Package JS/css for production |
| RUN bash /scripts/docker/prepare_node_modules.sh |
| |
| # Copy all the needed www/ for assets compilation. Done as two separate COPY |
| # commands so as otherwise it copies the _contents_ of static/ in to www/ |
| COPY --from=www webpack.config.js ${AIRFLOW_SOURCES}/airflow/www/ |
| COPY --from=www static ${AIRFLOW_SOURCES}/airflow/www/static/ |
| COPY --from=scripts compile_www_assets.sh /scripts/docker/ |
| |
| # Build artifacts without removing temporary artifacts (we will need them for incremental changes) |
| # in build mode |
| RUN REMOVE_ARTIFACTS="false" BUILD_TYPE="build" bash /scripts/docker/compile_www_assets.sh |
| |
| # Airflow sources change frequently but dependency configuration won't change that often |
| # We copy setup.py and other files needed to perform setup of dependencies |
| # So in case setup.py changes we can install latest dependencies required. |
| COPY setup.py ${AIRFLOW_SOURCES}/setup.py |
| COPY setup.cfg ${AIRFLOW_SOURCES}/setup.cfg |
| |
| COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/ |
| |
| COPY --from=scripts install_airflow.sh /scripts/docker/ |
| |
| # The goal of this line is to install the dependencies from the most current setup.py from sources |
| # This will be usually incremental small set of packages in CI optimized build, so it will be very fast |
| # In non-CI optimized build this will install all dependencies before installing sources. |
| # Usually we will install versions based on the dependencies in setup.py and upgraded only if needed. |
| # But in cron job we will install latest versions matching setup.py to see if there is no breaking change |
| # and push the constraints if everything is successful |
| RUN bash /scripts/docker/install_airflow.sh |
| |
| COPY --from=scripts entrypoint_ci.sh /entrypoint |
| COPY --from=scripts entrypoint_exec.sh /entrypoint-exec |
| RUN chmod a+x /entrypoint /entrypoint-exec |
| |
| COPY --from=scripts install_pip_version.sh install_additional_dependencies.sh /scripts/docker/ |
| |
| # Additional python deps to install |
| ARG ADDITIONAL_PYTHON_DEPS="" |
| |
| RUN bash /scripts/docker/install_pip_version.sh; \ |
| if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \ |
| bash /scripts/docker/install_additional_dependencies.sh; \ |
| fi |
| |
| # Install autocomplete for airflow |
| RUN if command -v airflow; then \ |
| register-python-argcomplete airflow >> ~/.bashrc ; \ |
| fi |
| |
| # Install autocomplete for Kubectl |
| RUN echo "source /etc/bash_completion" >> ~/.bashrc |
| |
| # We can copy everything here. The Context is filtered by dockerignore. This makes sure we are not |
| # copying over stuff that is accidentally generated or that we do not need (such as egg-info) |
| # if you want to add something that is missing and you expect to see it in the image you can |
| # add it with ! in .dockerignore next to the airflow, test etc. directories there |
| COPY . ${AIRFLOW_SOURCES}/ |
| |
| WORKDIR ${AIRFLOW_SOURCES} |
| |
| ARG BUILD_ID |
| ARG COMMIT_SHA |
| ARG AIRFLOW_IMAGE_DATE_CREATED |
| |
| ENV PATH="/files/bin/:/opt/airflow/scripts/in_container/bin/:${PATH}" \ |
| GUNICORN_CMD_ARGS="--worker-tmp-dir /dev/shm/" \ |
| BUILD_ID=${BUILD_ID} \ |
| COMMIT_SHA=${COMMIT_SHA} |
| |
| # Link dumb-init for backwards compatibility (so that older images also work) |
| RUN ln -sf /usr/bin/dumb-init /usr/local/bin/dumb-init |
| |
| EXPOSE 8080 |
| |
| LABEL org.apache.airflow.distro="debian" \ |
| org.apache.airflow.module="airflow" \ |
| org.apache.airflow.component="airflow" \ |
| org.apache.airflow.image="airflow-ci" \ |
| org.apache.airflow.version="${AIRFLOW_VERSION}" \ |
| org.apache.airflow.uid="0" \ |
| org.apache.airflow.gid="0" \ |
| org.apache.airflow.build-id="${BUILD_ID}" \ |
| org.apache.airflow.commit-sha="${COMMIT_SHA}" \ |
| org.opencontainers.image.source="${AIRFLOW_IMAGE_REPOSITORY}" \ |
| org.opencontainers.image.created="${AIRFLOW_IMAGE_DATE_CREATED}" \ |
| org.opencontainers.image.authors="dev@airflow.apache.org" \ |
| org.opencontainers.image.url="https://airflow.apache.org" \ |
| org.opencontainers.image.documentation="https://github.com/apache/airflow/IMAGES.rst" \ |
| org.opencontainers.image.source="https://github.com/apache/airflow" \ |
| org.opencontainers.image.version="${AIRFLOW_VERSION}" \ |
| org.opencontainers.image.revision="${COMMIT_SHA}" \ |
| org.opencontainers.image.vendor="Apache Software Foundation" \ |
| org.opencontainers.image.licenses="Apache-2.0" \ |
| org.opencontainers.image.ref.name="airflow-ci-image" \ |
| org.opencontainers.image.title="Continuous Integration Airflow Image" \ |
| org.opencontainers.image.description="Installed Apache Airflow with Continuous Integration dependencies" |
| |
| ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"] |
| CMD [] |