blob: df24b145e4eb1a7d5a2569a4e46cc99188978840 [file] [log] [blame]
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -euo pipefail
AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
if [[ ${BREEZE_REDIRECT=} == "" ]]; then
set +u
mkdir -p "${AIRFLOW_SOURCES}"/logs
export BREEZE_REDIRECT="true"
if [[ "$(uname)" == "Darwin" ]]; then
exec script -q "${AIRFLOW_SOURCES}"/logs/breeze.out "$(command -v bash)" -c "$(printf "%q " "${0}" "${@}")"
else
exec script --return --quiet "${AIRFLOW_SOURCES}"/logs/breeze.out -c "$(printf "%q " "${0}" "${@}")"
fi
set -u
fi
export AIRFLOW_SOURCES
readonly AIRFLOW_SOURCES
# Bash arrays need to be defined outside of functions unfortunately :(
# Because on Mac OS Bash 3.4 defining arrays inside functions does not work
# Array with extra options for Docker compose
declare -a EXTRA_DC_OPTIONS
export EXTRA_DC_OPTIONS
# Array with selected integrations
declare -a INTEGRATIONS
export INTEGRATIONS
# This is where remaining args are passed
declare -a REMAINING_ARGS
export REMAINING_ARGS
# This is where static check options are defined
declare -a EXTRA_STATIC_CHECK_OPTIONS
export EXTRA_STATIC_CHECK_OPTIONS
#######################################################################################################
# Sets up all the default variables for Breeze. They are needed by all other functions
# All those variables are exported. They are not set to read-only because those
# defaults can be modified later on when command line arguments are parsed
# and variables stored in .build directory (stored in the previous run) are read
#
# Used globals:
# FORCE_SCREEN_WIDTH
#
# Modified globals (constants or candidates for constants after we override them via appropriate flags):
#
# BREEZE
# SUPPRESS_CHEATSHEET_FILE
# SUPPRESS_ASCIIART_FILE
# MAX_SCREEN_WIDTH
# SCREEN_WIDTH
# MOUNT_SELECTED_LOCAL_SOURCES
# FORCE_PULL_IMAGES
# FORWARD_CREDENTIALS
# DB_RESET
# START_AIRFLOW
# INSTALL_AIRFLOW_VERSION
# INSTALL_AIRFLOW_REFERENCE
# FORCE_BUILD_IMAGES
# PRODUCTION_IMAGE
# PYTHON_MAJOR_MINOR_VERSION
#
# Global variables:
#
# command_to_run
# second_command_to_run
# docker_compose_command
#
# Also it sets the variables and globals set by common initialization functions from
# scripts/ci/libraries/_initialization.sh and breeze-complete script (which sets-up auto-complete).
#
#######################################################################################################
function breeze::setup_default_breeze_constants() {
# Indicates that we are inside Breeze environment
export BREEZE=true
readonly BREEZE
# If those files are present, the ASCII-art/cheat-sheet are suppressed
SUPPRESS_CHEATSHEET_FILE="${AIRFLOW_SOURCES}/.build/.suppress_cheatsheet"
readonly SUPPRESS_CHEATSHEET_FILE
SUPPRESS_ASCIIART_FILE="${AIRFLOW_SOURCES}/.build/.suppress_asciiart"
readonly SUPPRESS_ASCIIART_FILE
# Maximum screen indented_screen_width to print the lines spanning the whole terminal indented_screen_width
export MAX_SCREEN_WIDTH=100
readonly MAX_SCREEN_WIDTH
# By default we mount selected local Airflow sources
export MOUNT_SELECTED_LOCAL_SOURCES="true"
# By default we do not mount all local Airflow sources
export MOUNT_ALL_LOCAL_SOURCES="false"
# By default we only pull images if we do not have them locally.
# This can be overridden by '--force-pull-images' flag
export FORCE_PULL_IMAGES="false"
# By default we do not pull python base image. We should do that only when we run upgrade check in
# CI master and when we manually refresh the images to latest versions
export FORCE_PULL_BASE_PYTHON_IMAGE="false"
# Forward common host credentials to docker (gcloud, aws etc.).
export FORWARD_CREDENTIALS="false"
# If set to true, the database will be reset at entry. Works for Postgres and MySQL
export DB_RESET="false"
# If set to true, the database will be initialized, a user created and webserver and scheduler started
export START_AIRFLOW="false"
# If set to true, the test connections will be created
export LOAD_DEFAULT_CONNECTIONS="false"
# If set to true, the sample dags will be used
export LOAD_EXAMPLES="false"
# If set to true, Breeze db volumes will be preserved when breeze is stopped and reused next time
# Which means that you do not have to start from scratch
export PRESERVE_VOLUMES="false"
# If set to true, RBAC UI will not be used for 1.10 version
export DISABLE_RBAC="false"
# Sources by default are installed from local sources when using breeze
AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM:="."}
export AIRFLOW_SOURCES_FROM
# They are copied to /opt/airflow by default in breeze
AIRFLOW_SOURCES_TO=${AIRFLOW_SOURCES_TO:="/opt/airflow"}
export AIRFLOW_SOURCES_TO
# Unlike in CI scripts, in breeze by default production image ist installed from sources
export AIRFLOW_INSTALLATION_METHOD="."
# If it set is set to specified version, then the source version of Airflow
# is removed and the specified version of Airflow is installed from PyPi
export INSTALL_AIRFLOW_VERSION=""
# If it is set to specified reference (tag/branch), then the source version
# of Airflow is removed and the specified version of Airflow is installed from GitHub
export INSTALL_AIRFLOW_REFERENCE=""
# if set to true, the ci image will look for wheel packages in dist folder and will install them
# during entering the container
export INSTALL_PACKAGES_FROM_DIST="false"
# Determines whether to force build without checking if it is needed
# Can be overridden by '--force-build-images' flag.
export FORCE_BUILD_IMAGES="false"
# load all the common functions here - those are the functions that are shared between Breeze
# and CI scripts. The CI scripts do not use Breeze as driving script - they read all configuration
# from the environment variables. That's why we keep all the common initialization in those libs
# shellcheck source=scripts/ci/libraries/_all_libs.sh
. "${AIRFLOW_SOURCES}/scripts/ci/libraries/_all_libs.sh"
# When we generate documentation for README files, we want to force the indented_screen_width of terminal so that
# No matter who is running the documentation generation gets the same output
if [[ ${FORCE_SCREEN_WIDTH:="false"} != "true" ]]; then
# Sets indented_screen_width of the screen from terminal
SCREEN_WIDTH="$(tput cols)"
if [[ -z ${SCREEN_WIDTH=} ]]; then
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
fi
if ((SCREEN_WIDTH > MAX_SCREEN_WIDTH)); then
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
fi
else
SCREEN_WIDTH=${MAX_SCREEN_WIDTH}
fi
export SCREEN_WIDTH
readonly SCREEN_WIDTH
# for Breeze default tests executed are "All"
export TEST_TYPE=${TEST_TYPE:="All"}
# Update short and long options in the breeze-complete script
# This way autocomplete will work automatically with all options available
# shellcheck source=breeze-complete
. "${AIRFLOW_SOURCES}/breeze-complete"
# Default command to run - entering breeze environment
command_to_run="enter_breeze"
# In some cases we also want to run two commands in a row (for example when we restart the environment)
second_command_to_run=""
# Determines if help should be run (set to true by --help flag)
run_help="false"
# Holds docker compose command if the `docker-compose` command is used.
docker_compose_command=""
}
#######################################################################################################
#
# Initializes development-friendly virtualenv if you are already in such env. It installs all the necessary
# packages from PyPI and it case of problems it provides useful hints on what prerequisites should be
# installed. It also removes and resets the existing AIRFLOW_HOME installation to make sure that you
# have it synchronized with the version of airflow installed. It resets the airflow's sqlite database to
# a clean state. You can use this function if your virtualenv is broken, to clean it up
#
# Used globals:
# PYTHON_MAJOR_MINOR_VERSION
# AIRFLOW_HOME_DIR
# AIRFLOW_SOURCES
# DEFAULT_CONSTRAINTS_BRANCH
# OSTYPE
#
#######################################################################################################
function breeze::initialize_virtualenv() {
# Check if we are inside virtualenv
set +e
echo -e "import sys\nif not hasattr(sys,'base_prefix'):\n sys.exit(1)" |
python"${PYTHON_MAJOR_MINOR_VERSION}"
local res=$?
set -e
if [[ ${res} != "0" ]]; then
echo
echo "${COLOR_RED}ERROR: Initializing local virtualenv only works when you have virtualenv activated ${COLOR_RESET}"
echo
echo "Please enter your local virtualenv before (for example using 'pyenv activate' or 'workon') "
echo
exit 1
else
echo
echo "Initializing the virtualenv: $(command -v python)!"
echo
echo "This will wipe out ${AIRFLOW_HOME_DIR} and reset all the databases!"
echo
"${AIRFLOW_SOURCES}/confirm" "Proceeding with the initialization"
echo
pushd "${AIRFLOW_SOURCES}" >/dev/null 2>&1 || exit 1
set +e
# We need to export this one to speed up Cassandra driver installation in virtualenv
CASS_DRIVER_NO_CYTHON="1" pip install -e ".[devel]" \
--constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt"
res=$?
set -e
popd
if [[ ${res} != "0" ]]; then
echo "#######################################################################"
echo " You had some troubles installing the venv !!!!!"
echo " Try running the command below and rerun virtualenv installation"
echo
if [[ ${OSTYPE} == "darwin"* ]]; then
echo " brew install sqlite mysql postgresql openssl"
echo " export LDFLAGS=\"-L/usr/local/opt/openssl/lib\""
echo " export CPPFLAGS=\"-I/usr/local/opt/openssl/include\""
else
echo " sudo apt install build-essentials python3.6-dev python3.7-dev python3.8-dev python-dev openssl \\"
echo " sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql"
fi
echo
echo "#######################################################################"
exit ${res}
fi
echo
echo "Wiping and recreating ${AIRFLOW_HOME_DIR}"
echo
rm -rvf "${AIRFLOW_HOME_DIR}"
mkdir -p "${AIRFLOW_HOME_DIR}"
echo
echo "Resetting AIRFLOW sqlite database"
echo
AIRFLOW__CORE__LOAD_EXAMPLES="False" \
AIRFLOW__CORE__UNIT_TEST_MODE="False" \
AIRFLOW__CORE__SQL_ALCHEMY_POOL_ENABLED="False" \
AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_SOURCES}/empty" \
AIRFLOW__CORE__PLUGINS_FOLDER="${AIRFLOW_SOURCES}/empty" \
airflow db reset -y
echo
echo "Resetting AIRFLOW sqlite unit test database"
echo
AIRFLOW__CORE__LOAD_EXAMPLES="False" \
AIRFLOW__CORE__UNIT_TEST_MODE="True" \
AIRFLOW__CORE__SQL_ALCHEMY_POOL_ENABLED="False" \
AIRFLOW__CORE__DAGS_FOLDER="${AIRFLOW_SOURCES}/empty" \
AIRFLOW__CORE__PLUGINS_FOLDER="${AIRFLOW_SOURCES}/empty" \
airflow db reset -y
echo
echo "Initialization of virtualenv was successful! Go ahead and develop Airflow!"
echo
exit 0
fi
}
#######################################################################################################
#
# Sets up autocomplete for Breeze for both - bash and zsh
#
# Used globals:
#
# AIRFLOW_SOURCES
# HOME
# OSTYPE
#
#######################################################################################################
function breeze::setup_autocomplete() {
echo "Installing bash/zsh completion for local user"
echo
"${AIRFLOW_SOURCES}/confirm" "This will create ~/.bash_completion.d/ directory and modify ~/.*rc files"
echo
echo
mkdir -pv ~/.bash_completion.d
ln -sf "${AIRFLOW_SOURCES}/breeze-complete" "${HOME}/.bash_completion.d/"
echo
echo "Breeze Bash completion is now linked to: ${AIRFLOW_SOURCES}/breeze-complete"
echo
local breeze_comment="Added by Airflow Breeze autocomplete setup"
if ! grep "${breeze_comment}" "${HOME}/.bashrc" >/dev/null 2>&1; then
touch ~/.bashrc
# shellcheck disable=SC2129
echo "# START: ${breeze_comment}" >>~/.bashrc
cat <<"EOF" >>~/.bashrc
for bcfile in ~/.bash_completion.d/* ; do
. ${bcfile}
done
EOF
echo "# END: ${breeze_comment}" >>~/.bashrc
echo
echo "The ${HOME}/.bashrc has been modified"
echo
else
echo
echo "The ${HOME}/.bashrc was already modified before. Not changing it."
echo
fi
if ! grep "${breeze_comment}" "${HOME}/.zshrc" >/dev/null 2>&1; then
# shellcheck disable=SC2129
echo "# START: ${breeze_comment}" >>~/.zshrc
cat <<"EOF" >>~/.zshrc
autoload compinit && compinit
autoload bashcompinit && bashcompinit
source ~/.bash_completion.d/breeze-complete
EOF
echo "# END: ${breeze_comment}" >>~/.zshrc
echo
echo "The ${HOME}/.zshrc has been modified"
echo
else
echo
echo "The ${HOME}/.zshrc was already modified before. Not changing it."
echo
fi
if [[ "${OSTYPE}" == "darwin"* ]]; then
# For MacOS we have to handle the special case where terminal app DOES NOT run .bashrc by default
# But re-runs .bash_profile :(
# See https://scriptingosx.com/2017/04/about-bash_profile-and-bashrc-on-macos/
if ! grep "${breeze_comment}" "${HOME}/.bash_profile"; then
# shellcheck disable=SC2129
echo "# START: ${breeze_comment}" >>~/.bash_profile
cat <<"EOF" >>~/.bash_profile
if [ -r ~/.bashrc ]; then
source ~/.bashrc
fi
EOF
echo "# END: ${breeze_comment}" >>~/.bash_profile
echo
echo "The ${HOME}/.bash_profile has been modified"
echo
else
echo
echo "The ${HOME}/.bash_profile was already modified before. Not changing it."
echo
fi
fi
echo
echo
echo "Breeze completion is installed to ~/.bash_completion.d/breeze-complete"
echo
echo "Please exit and re-enter your shell or run:"
echo
echo " source ~/.bash_completion.d/breeze-complete"
echo
exit 0
}
#######################################################################################################
#
# Prints information about the current configuration of Breeze - if you enter breeze interactively
# and you did not suppress cheatsheet or asciiart, it also prints those. It also prints values
# of constants set by breeze::read_saved_environment_variables() function and other initialization functions.
#
# Used globals:
#
# BACKEND
# POSTGRES_VERSION
# MYSQL_VERSION
# SUPPRESS_CHEATSHEET_FILE
# SUPPRESS_ASCIIART_FILE
# PRODUCTION_IMAGE
# BRANCH_NAME
# AIRFLOW_CI_IMAGE
# AIRFLOW_PROD_IMAGE
# AIRFLOW_VERSION
# DOCKERHUB_USER
# DOCKERHUB_REPO
# INSTALL_AIRFLOW_VERSION
# INSTALL_AIRFLOW_REFERENCE
#
# Outputs:
# Prints the information about the build to stdout.
#
#######################################################################################################
function breeze::print_badge() {
local backend_version=""
if [[ ${BACKEND} == "postgres" ]]; then
backend_version="${POSTGRES_VERSION}"
elif [[ ${BACKEND} == "mysql" ]]; then
backend_version="${MYSQL_VERSION}"
fi
if [[ ! -f "${SUPPRESS_ASCIIART_FILE}" && ${command_to_run} == "enter_breeze" ]]; then
cat <<EOF
@&&&&&&@
@&&&&&&&&&&&@
&&&&&&&&&&&&&&&&
&&&&&&&&&&
&&&&&&&
&&&&&&&
@@@@@@@@@@@@@@@@ &&&&&&
@&&&&&&&&&&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&
&&&&&&&&&
&&&&&&&&&&&&
@@&&&&&&&&&&&&&&&@
@&&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
&&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
&&&&&&&&&&&&&&&&&&&&&&&& &&&&&&
&&&&&&
&&&&&&&
@&&&&&&&&
@&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&
@&&&@ && @&&&&&&&&&&& &&&&&&&&&&&& && &&&&&&&&&& &&& &&& &&&
&&& &&& && @&& &&& && && &&& &&&@ &&& &&&&& &&&
&&& &&& && @&&&&&&&&&&&& &&&&&&&&&&& && && &&& &&& &&& &&@ &&&
&&&&&&&&&&& && @&&&&&&&&& && && &&@ &&& &&@&& &&@&&
&&& &&& && @&& &&&@ && &&&&&&&&&&& &&&&&&&&&&&& &&&& &&&&
&&&&&&&&&&&& &&&&&&&&&&&& &&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&&
&&& &&& && &&& && &&& &&&& &&
&&&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&& &&&& &&&&&&&&&&
&&& && && &&&& && &&& &&&& &&
&&&&&&&&&&&&& && &&&&@ &&&&&&&&&&&@ &&&&&&&&&&&& @&&&&&&&&&&& &&&&&&&&&&&
EOF
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
cat <<EOF
Use production image.
Branch name: ${BRANCH_NAME}
Docker image: ${AIRFLOW_PROD_IMAGE}
GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
Airflow source version: $(build_images::get_airflow_version_from_production_image)
EOF
else
cat <<EOF
Use CI image.
Branch name: ${BRANCH_NAME}
Docker image: ${AIRFLOW_CI_IMAGE}
GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
Airflow source version: ${AIRFLOW_VERSION}
EOF
fi
cat <<EOF
Python version: ${PYTHON_MAJOR_MINOR_VERSION}
DockerHub user: ${DOCKERHUB_USER}
DockerHub repo: ${DOCKERHUB_REPO}
Backend: ${BACKEND} ${backend_version}
EOF
if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE=} ]]; then
cat <<EOF
Airflow installed: ${INSTALL_AIRFLOW_VERSION=}${INSTALL_AIRFLOW_REFERENCE=}
EOF
fi
else
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
cat <<EOF
Production image.
Branch name: ${BRANCH_NAME}
Docker image: ${AIRFLOW_PROD_IMAGE}
GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
EOF
else
cat <<EOF
CI image.
Branch name: ${BRANCH_NAME}
Docker image: ${AIRFLOW_CI_IMAGE}
GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix)
EOF
fi
cat <<EOF
Airflow source version: ${AIRFLOW_VERSION}
Python version: ${PYTHON_MAJOR_MINOR_VERSION}
DockerHub user: ${DOCKERHUB_USER}
DockerHub repo: ${DOCKERHUB_REPO}
Backend: ${BACKEND} ${backend_version}
EOF
if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE=} ]]; then
cat <<EOF
Airflow installed from: ${INSTALL_AIRFLOW_VERSION}${INSTALL_AIRFLOW_REFERENCE}
EOF
fi
fi
if [[ ${VERBOSE} == "true" ]]; then
initialization::summarize_build_environment
fi
}
#######################################################################################################
#
# Prepares command file that can be used to easily run the docker commands outside of Breeze.
#
# The command file generated in cache ./build directory is a standalone script that contains
# All the environment variables and docker-compose configuration to run the command.
# This is because depending on configuration of Breeze we might have different compose files
# used and different env variables set.
#
# Those are a convenience scripts that you might use to debug command execution although
# In most cases they are used internally by Breeze.
#
# Used Globals:
# BRANCH_NAME
# PYTHON_MAJOR_MINOR_VERSION
# DOCKERHUB_USER
# DOCKERHUB_REPO
# BACKEND
# AIRFLOW_VERSION
# INSTALL_AIRFLOW_VERSION
# WEBSERVER_HOST_PORT
# POSTGRES_HOST_PORT
# POSTGRES_VERSION
# MYSQL_HOST_PORT
# MYSQL_VERSION
# AIRFLOW_SOURCES
# AIRFLOW_CI_IMAGE
# AIRFLOW_PROD_IMAGE
# AIRFLOW_PROD_IMAGE_KUBERNETES
# AIRFLOW_PROD_BASE_TAG
# SQLITE_URL
#
# Arguments:
#
# file to prepare
# command to run
# compose_file to use
# airflow_image to use
#
# Outputs:
# Creates the convenience command file that can be run to use the docker command.
#
#######################################################################################################
function breeze::prepare_command_file() {
local file="${1}"
local command="${2}"
local compose_file="${3}"
local airflow_image="${4}"
cat <<EOF >"${file}"
#!/usr/bin/env bash
if [[ \${VERBOSE} == "true" ]]; then
echo
echo "Executing script:"
echo
echo "${COLOR_CYAN}${file} \${@}${COLOR_RESET}"
echo
set -x
fi
cd "\$( dirname "\${BASH_SOURCE[0]}" )" || exit
export DOCKERHUB_USER=${DOCKERHUB_USER}
export DOCKERHUB_REPO=${DOCKERHUB_REPO}
export HOST_USER_ID=${HOST_USER_ID}
export HOST_GROUP_ID=${HOST_GROUP_ID}
export COMPOSE_FILE="${compose_file}"
export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}"
export BACKEND="${BACKEND}"
export AIRFLOW_VERSION="${AIRFLOW_VERSION}"
export INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}"
export WEBSERVER_HOST_PORT="${WEBSERVER_HOST_PORT}"
export FLOWER_HOST_PORT="${FLOWER_HOST_PORT}"
export REDIS_HOST_PORT="${REDIS_HOST_PORT}"
export POSTGRES_HOST_PORT="${POSTGRES_HOST_PORT}"
export POSTGRES_VERSION="${POSTGRES_VERSION}"
export MYSQL_HOST_PORT="${MYSQL_HOST_PORT}"
export MYSQL_VERSION="${MYSQL_VERSION}"
export AIRFLOW_SOURCES="${AIRFLOW_SOURCES}"
export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}"
export AIRFLOW_PROD_IMAGE="${AIRFLOW_PROD_IMAGE}"
export AIRFLOW_PROD_IMAGE_KUBERNETES="${AIRFLOW_PROD_IMAGE_KUBERNETES}"
export AIRFLOW_PROD_BASE_TAG="${AIRFLOW_PROD_BASE_TAG}"
export AIRFLOW_IMAGE="${airflow_image}"
export SQLITE_URL="${SQLITE_URL}"
docker-compose --log-level INFO ${command}
EOF
chmod u+x "${file}"
}
#######################################################################################################
#
# Prepare all command files that we are using. Depending on the command to execute we use two
# convenience scripts:
#
# dc_ci - to run docker compose command for CI image
# dc_prod - to run docker compose command for PROD image
#
# Global constants set:
#
# PYTHON_BASE_IMAGE_VERSION
# PYTHON_BASE_IMAGE
# AIRFLOW_CI_IMAGE
# AIRFLOW_PROD_BASE_TAG
# AIRFLOW_PROD_IMAGE
# AIRFLOW_PROD_IMAGE_KUBERNETES
# BUILT_CI_IMAGE_FLAG_FILE
#
#######################################################################################################
function breeze::prepare_command_files() {
local main_ci_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/base.yml
local main_prod_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/base.yml
local backend_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml
local backend_port_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}-port.yml
local local_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local.yml
local local_all_sources_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local-all-sources.yml
local files_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/files.yml
local local_prod_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/local-prod.yml
local remove_sources_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml
local forward_credentials_docker_compose_file=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml
local compose_ci_file=${main_ci_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
local compose_prod_file=${main_prod_docker_compose_file}:${backend_docker_compose_file}:${files_docker_compose_file}
if [[ "${MOUNT_SELECTED_LOCAL_SOURCES}" != "false" ]]; then
compose_ci_file=${compose_ci_file}:${local_docker_compose_file}:${backend_port_docker_compose_file}
compose_prod_file=${compose_prod_file}:${local_prod_docker_compose_file}:${backend_port_docker_compose_file}
fi
if [[ "${MOUNT_ALL_LOCAL_SOURCES}" != "false" ]]; then
compose_ci_file=${compose_ci_file}:${local_all_sources_docker_compose_file}:${backend_port_docker_compose_file}
compose_prod_file=${compose_prod_file}:${local_all_sources_docker_compose_file}:${backend_port_docker_compose_file}
fi
if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then
compose_ci_file=${compose_ci_file}:${forward_credentials_docker_compose_file}
compose_prod_file=${compose_prod_file}:${forward_credentials_docker_compose_file}
fi
if [[ -n ${INSTALL_AIRFLOW_VERSION=} ]]; then
compose_ci_file=${compose_ci_file}:${remove_sources_docker_compose_file}
fi
set +u
local unique_integrations
# shellcheck disable=SC2207
unique_integrations=($(echo "${INTEGRATIONS[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' '))
local integration
for integration in "${unique_integrations[@]}"; do
compose_ci_file=${compose_ci_file}:${SCRIPTS_CI_DIR}/docker-compose/integration-${integration}.yml
done
set -u
export DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI="dc_ci"
readonly DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI
export DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD="dc_prod"
readonly DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD
# Prepare script for "run docker compose CI command"
breeze::prepare_command_file "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" \
"\"\${@}\"" "${compose_ci_file}" "${AIRFLOW_CI_IMAGE}"
# Prepare script for "run docker compose PROD command"
breeze::prepare_command_file "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD}" \
"\"\${@}\"" "${compose_prod_file}" "${AIRFLOW_PROD_IMAGE}"
}
#######################################################################################################
#
# Prints detailed help for all commands and flags. Used to generate documentation added to BREEZE.rst
# automatically.
#
# Used global variables:
# _breeze_all_commands
#
# Outputs:
# Prints detailed help for all commands to stdout.
#
#######################################################################################################
function breeze::do_help_all() {
echo
breeze::print_line
breeze::usage
breeze::print_line
echo
echo
echo "Detailed usage"
echo
breeze::print_line
echo
local subcommand
# shellcheck disable=SC2154
for subcommand in ${_breeze_all_commands}; do
breeze::detailed_usage "${subcommand}"
breeze::print_line
echo
done
echo
breeze::flags
}
#######################################################################################################
#
# Parses all arguments that can be passed to Breeze command - that includes command to run and flags.
#
# Used global variables:
# _breeze_getopt_short_options
# _breeze_getopt_long_options
# _breeze_allowed_integrations
#
# Updated global constants:
# By the end of this function, all the constants from `initialization::make_constants_read_only`
# function are set and they are set as read-only.
#
#######################################################################################################
function breeze::parse_arguments() {
set -u
local params
if ! params=$(getopt \
-o "${_breeze_getopt_short_options:=}" \
-l "${_breeze_getopt_long_options:=}" \
--name "$CMDNAME" -- "$@"); then
breeze::flags
exit 1
fi
eval set -- "${params}"
unset params
# Parse Flags.
# Please update short and long options in the breeze-complete script
# This way autocomplete will work out-of-the-box
while true; do
case "${1}" in
-h | --help)
run_help="true"
shift
;;
-p | --python)
export PYTHON_MAJOR_MINOR_VERSION="${2}"
echo "Python version: ${PYTHON_MAJOR_MINOR_VERSION}"
echo
shift 2
;;
-b | --backend)
export BACKEND="${2}"
echo "Backend: ${BACKEND}"
echo
shift 2
;;
-i | --integration)
local INTEGRATION=${2}
parameters::check_and_save_allowed_param "INTEGRATION" "integration" "--integration"
echo "Integration: ${INTEGRATION}"
if [[ ${INTEGRATION} == "all" ]]; then
# shellcheck disable=SC2154
for INTEGRATION in ${_breeze_allowed_integrations}; do
if [[ ${INTEGRATION} != "all" ]]; then
echo "${INTEGRATION}"
INTEGRATIONS+=("${INTEGRATION}")
fi
done
else
INTEGRATIONS+=("${INTEGRATION}")
fi
if [[ " ${INTEGRATIONS[*]} " =~ " trino " ]]; then
INTEGRATIONS+=("kerberos");
fi
echo
shift 2
;;
-K | --kubernetes-mode)
export KUBERNETES_MODE="${2}"
echo "Kubernetes mode: ${KUBERNETES_MODE}"
echo
shift 2
;;
-V | --kubernetes-version)
export KUBERNETES_VERSION="${2}"
echo "Kubernetes version: ${KUBERNETES_VERSION}"
echo
shift 2
;;
--kind-version)
export KIND_VERSION="${2}"
echo "Kind version: ${KIND_VERSION}"
echo
shift 2
;;
--helm-version)
export HELM_VERSION="${2}"
echo "Helm version: ${HELM_VERSION}"
echo
shift 2
;;
--postgres-version)
export POSTGRES_VERSION="${2}"
echo "Postgres version: ${POSTGRES_VERSION}"
echo
shift 2
;;
--mysql-version)
export MYSQL_VERSION="${2}"
echo "MySQL version: ${MYSQL_VERSION}"
echo
shift 2
;;
-l | --skip-mounting-local-sources)
MOUNT_SELECTED_LOCAL_SOURCES="false"
MOUNT_ALL_LOCAL_SOURCES="false"
echo "Mount selected local sources: ${MOUNT_SELECTED_LOCAL_SOURCES}"
echo "Mount all local sources: ${MOUNT_ALL_LOCAL_SOURCES}"
echo
shift
;;
--mount-all-local-sources)
MOUNT_ALL_LOCAL_SOURCES="true"
MOUNT_SELECTED_LOCAL_SOURCES="false"
echo "Mount selected local sources: ${MOUNT_SELECTED_LOCAL_SOURCES}"
echo "Mount all local sources: ${MOUNT_ALL_LOCAL_SOURCES}"
echo
shift
;;
-a | --install-airflow-version)
INSTALL_AIRFLOW_VERSION="${2}"
# Reference is mutually exclusive with version
INSTALL_AIRFLOW_REFERENCE=""
echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION}"
echo
shift 2
;;
-t | --install-airflow-reference)
INSTALL_AIRFLOW_REFERENCE="${2}"
# Reference is mutually exclusive with version
INSTALL_AIRFLOW_VERSION=""
# Skip mounting local sources when airflow is installed from remote
INSTALL_PROVIDERS_FROM_SOURCES="false"
echo "Installs Airflow from reference: ${INSTALL_AIRFLOW_REFERENCE}"
echo
shift 2
;;
-d | --db-reset)
echo "Resetting the DB!"
echo
export DB_RESET="true"
shift
;;
-v | --verbose)
export VERBOSE="true"
echo "Verbose output"
echo
shift
;;
-y | --assume-yes)
export FORCE_ANSWER_TO_QUESTIONS="yes"
echo "Assuming 'yes' answer to all questions."
echo
shift
;;
-n | --assume-no)
export FORCE_ANSWER_TO_QUESTIONS="no"
echo "Assuming 'no' answer to all questions."
echo
shift
;;
-q | --assume-quit)
export FORCE_ANSWER_TO_QUESTIONS="quit"
echo "Assuming 'quit' answer to all questions."
echo
shift
;;
-F | --force-build-images)
echo "Force build images"
echo
export FORCE_BUILD_IMAGES="true"
# if you want to force build an image - assume you want to build it :)
export FORCE_ANSWER_TO_QUESTIONS="yes"
shift
;;
-C | --force-clean-images)
echo "Clean build of images without cache"
echo
export DOCKER_CACHE="disabled"
# if not set here, docker cached is determined later, depending on type of image to be build
readonly DOCKER_CACHE
export FORCE_BUILD_IMAGES="true"
shift
;;
-r | --skip-rebuild-check)
echo "Skips checking image for rebuilds"
echo
export CHECK_IMAGE_FOR_REBUILD="false"
export SKIP_BUILDING_PROD_IMAGE="true"
shift
;;
-L | --build-cache-local)
echo "Use local cache to build images"
echo
export DOCKER_CACHE="local"
# if not set here, docker cached is determined later, depending on type of image to be build
readonly DOCKER_CACHE
shift
;;
-U | --build-cache-pulled)
echo "Use pulled cache to build images"
echo
export DOCKER_CACHE="pulled"
# if not set here, docker cached is determined later, depending on type of image to be build
readonly DOCKER_CACHE
shift
;;
-X | --build-cache-disabled)
echo "Use disabled cache to build images"
echo
export DOCKER_CACHE="disabled"
readonly DOCKER_CACHE
# if not set here, docker cached is determined later, depending on type of image to be build
shift
;;
-P | --force-pull-images)
echo "Force pulling images before build. Uses pulled images as cache."
echo
export FORCE_PULL_IMAGES="true"
export FORCE_BUILD_IMAGES="true"
# if you want to force build an image - assume you want to build it :)
export FORCE_ANSWER_TO_QUESTIONS="yes"
shift
;;
--force-pull-base-python-image)
echo "Force pulling base python image. Uses pulled images as cache."
echo
export FORCE_PULL_BASE_PYTHON_IMAGE="true"
export FORCE_BUILD_IMAGES="true"
# if you want to force build an image - assume you want to build it :)
export FORCE_ANSWER_TO_QUESTIONS="yes"
shift
;;
-I | --production-image)
export PRODUCTION_IMAGE="true"
export SQLITE_URL=
echo
echo "*************** PRODUCTION IMAGE *************************"
echo
shift
;;
--disable-pypi-when-building)
export INSTALL_FROM_PYPI="false"
export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
echo "Disable installing airflow from PYPI"
shift
;;
-E | --extras)
export AIRFLOW_EXTRAS="${2}"
echo "Extras : ${AIRFLOW_EXTRAS}"
shift 2
;;
--skip-installing-airflow-providers-from-sources)
export INSTALL_PROVIDERS_FROM_SOURCES="false"
echo "Install all Airflow Providers: false"
shift
;;
--additional-extras)
export ADDITIONAL_AIRFLOW_EXTRAS="${2}"
echo "Additional extras : ${ADDITIONAL_AIRFLOW_EXTRAS}"
shift 2
;;
--additional-python-deps)
export ADDITIONAL_PYTHON_DEPS="${2}"
echo "Additional python dependencies: ${ADDITIONAL_PYTHON_DEPS}"
shift 2
;;
--dev-apt-deps)
export DEV_APT_DEPS="${2}"
echo "Apt dev dependencies: ${DEV_APT_DEPS}"
shift 2
;;
--additional-dev-apt-deps)
export ADDITIONAL_DEV_APT_DEPS="${2}"
echo "Additional apt dev dependencies: ${ADDITIONAL_DEV_APT_DEPS}"
shift 2
;;
--dev-apt-command)
export DEV_APT_COMMAND="${2}"
echo "Apt dev command: ${DEV_APT_COMMAND}"
shift 2
;;
--additional-dev-apt-command)
export ADDITIONAL_DEV_APT_COMMAND="${2}"
echo "Additional Apt dev command: ${ADDITIONAL_DEV_APT_COMMAND}"
shift 2
;;
--additional-dev-apt-env)
export ADDITIONAL_DEV_APT_ENV="${2}"
echo "Additional Apt dev environment variables: ${ADDITIONAL_DEV_APT_ENV}"
shift 2
;;
--runtime-apt-deps)
export RUNTIME_APT_DEPS="${2}"
echo "Apt runtime dependencies: ${RUNTIME_APT_DEPS}"
shift 2
;;
--additional-runtime-apt-deps)
export ADDITIONAL_RUNTIME_APT_DEPS="${2}"
echo "Additional apt runtime dependencies: ${ADDITIONAL_RUNTIME_APT_DEPS}"
shift 2
;;
--runtime-apt-command)
export RUNTIME_APT_COMMAND="${2}"
echo "Apt runtime command: ${RUNTIME_APT_COMMAND}"
shift 2
;;
--additional-runtime-apt-command)
export ADDITIONAL_RUNTIME_APT_COMMAND="${2}"
echo "Additional Apt runtime command: ${ADDITIONAL_RUNTIME_APT_COMMAND}"
shift 2
;;
--additional-runtime-apt-env)
export ADDITIONAL_RUNTIME_APT_ENV="${2}"
echo "Additional Apt runtime environment variables: ${ADDITIONAL_RUNTIME_APT_ENV}"
shift 2
;;
--disable-mysql-client-installation)
export INSTALL_MYSQL_CLIENT="false"
echo "Install MySQL client: ${INSTALL_MYSQL_CLIENT}"
shift
;;
--constraints-location)
export AIRFLOW_CONSTRAINTS_LOCATION="${2}"
echo "Constraints location: ${AIRFLOW_CONSTRAINTS_LOCATION}"
shift 2
;;
--disable-pip-cache)
echo "Disable PIP cache during build"
echo
export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"
shift
;;
--install-from-local-files-when-building)
export INSTALL_FROM_DOCKER_CONTEXT_FILES="true"
echo "Install wheels from local docker-context-files when building image"
shift
;;
--image-tag)
export IMAGE_TAG="${2}"
echo "Tag to add to the image: ${IMAGE_TAG}"
shift 2
;;
-D | --dockerhub-user)
export DOCKERHUB_USER="${2}"
echo "Dockerhub user ${DOCKERHUB_USER}"
echo
shift 2
;;
-R | --dockerhub-repo)
export DOCKERHUB_REPO="${2}"
echo "Dockerhub repo ${DOCKERHUB_REPO}"
echo
shift 2
;;
-f | --forward-credentials)
echo "Forwarding credentials. Be careful as your credentials ar available in the container!"
echo
export FORWARD_CREDENTIALS="true"
shift
;;
-c | --use-github-registry)
echo
echo "Use GitHub registry"
echo
export USE_GITHUB_REGISTRY="true"
shift
;;
--github-registry)
echo
echo "Using GitHub registry."
echo "GitHub registry used: ${2}"
echo
export GITHUB_REGISTRY="${2}"
export USE_GITHUB_REGISTRY="true"
shift 2
;;
-g | --github-repository)
echo
echo "Using GitHub registry."
echo "GitHub repository: ${2}"
echo
export GITHUB_REPOSITORY="${2}"
export USE_GITHUB_REGISTRY="true"
shift 2
;;
-s | --github-image-id)
echo
echo "GitHub image id: ${2}"
echo
echo "Force pulling the image, using GitHub registry and skip mounting local sources."
echo "This is in order to get the exact same version as used in CI environment for SHA/RUN_ID!."
echo "You can specify --skip-mounting-local-sources to not mount local sources to get exact. "
echo "behaviour as in the CI environment."
echo
export FORCE_PULL_IMAGES="true"
export USE_GITHUB_REGISTRY="true"
export GITHUB_REGISTRY_PULL_IMAGE_TAG="${2}"
export GITHUB_REGISTRY_PUSH_IMAGE_TAG="${2}"
export CHECK_IMAGE_FOR_REBUILD="false"
export SKIP_BUILDING_PROD_IMAGE="true"
export SKIP_CHECK_REMOTE_IMAGE="true"
export FAIL_ON_GITHUB_DOCKER_PULL_ERROR="true"
shift 2
;;
--init-script)
export INIT_SCRIPT_FILE="${2}"
echo "The initialization file is in ${INIT_SCRIPT_FILE}"
echo
shift 2
;;
-S | --version-suffix-for-pypi)
export VERSION_SUFFIX_FOR_PYPI="${2}"
echo "Version suffix for PyPI ${VERSION_SUFFIX_FOR_PYPI}"
echo
shift 2
;;
-N | --version-suffix-for-svn)
export VERSION_SUFFIX_FOR_SVN="${2}"
echo "Version suffix for SVN ${VERSION_SUFFIX_FOR_SVN}"
echo
shift 2
;;
--load-example-dags)
export LOAD_EXAMPLES="true"
echo "Include Airflow sample dags"
echo
shift
;;
--load-default-connections)
export LOAD_DEFAULT_CONNECTIONS="true"
echo "Include Airflow default connections"
echo
shift
;;
--preserve-volumes)
export PRESERVE_VOLUMES="true"
echo "Preserves data volumes when stopping airflow"
echo
shift
;;
--no-rbac-ui)
export DISABLE_RBAC="true"
echo "When installing Airflow 1.10, RBAC UI will be disabled."
echo
shift
;;
--install-packages-from-dist)
export INSTALL_PACKAGES_FROM_DIST="true"
echo "Install packages found in dist folder when entering breeze."
echo
shift
;;
--upgrade-to-newer-dependencies)
export UPGRADE_TO_NEWER_DEPENDENCIES="true"
echo "Upgrade packages to latest versions."
echo
shift
;;
--continue-on-pip-check-failure)
export CONTINUE_ON_PIP_CHECK_FAILURE="true"
echo "Skip PIP check failure."
echo
shift
;;
--package-format)
export PACKAGE_FORMAT="${2}"
echo "Selected package type: ${PACKAGE_FORMAT}"
echo
shift 2
;;
--installation-method)
export AIRFLOW_INSTALLATION_METHOD="${2}"
echo "Airflow installation method: ${AIRFLOW_INSTALLATION_METHOD}"
echo
shift 2
;;
--generate-constraints-mode)
export GENERATE_CONSTRAINTS_MODE="${2}"
echo "Generate constraints mode: ${GENERATE_CONSTRAINTS_MODE}"
echo
shift 2
;;
--test-type)
export TEST_TYPE="${2}"
echo "Selected test type: ${TEST_TYPE}"
echo
shift 2
;;
--dry-run-docker)
export DRY_RUN_DOCKER="true"
echo "Dry run mode"
echo
shift
;;
--)
shift
break
;;
*)
breeze::flags
echo
echo "${COLOR_RED}ERROR: Unknown flag ${COLOR_RESET}"
echo
exit 1
;;
esac
done
local last_subcommand=""
# Parse commands
if [[ "$#" -ne 0 ]]; then
case "${1}" in
shell)
last_subcommand="${1}"
shift
;;
exec)
last_subcommand="${1}"
command_to_run="run_exec"
shift
;;
build-docs)
last_subcommand="${1}"
command_to_run="build_docs"
shift
;;
build-image)
last_subcommand="${1}"
command_to_run="build_image"
# if you want to build an image - assume you want to build it :)
export FORCE_ANSWER_TO_QUESTIONS="yes"
# and assume you want to build it no matter if it is needed
export FORCE_BUILD_IMAGES="true"
echo "Build image"
echo
shift
;;
cleanup-image)
last_subcommand="${1}"
echo "Cleanup the image"
echo
command_to_run="cleanup_image"
shift
;;
docker-compose)
last_subcommand="${1}"
if [[ $# -lt 2 ]]; then
echo "You should specify docker compose command to run"
shift
run_help="true"
else
docker_compose_command="${2}"
shift 2
fi
command_to_run="run_docker_compose"
;;
generate-constraints)
last_subcommand="${1}"
command_to_run="perform_generate_constraints"
export FORCE_ANSWER_TO_QUESTIONS="yes"
export FORCE_BUILD_IMAGES="true"
export SKIP_CHECK_REMOTE_IMAGE="true"
export UPGRADE_TO_NEWER_DEPENDENCIES="true"
shift
;;
prepare-airflow-packages)
last_subcommand="${1}"
command_to_run="perform_prepare_airflow_packages"
export INSTALL_PROVIDERS_FROM_SOURCES="false"
shift
;;
prepare-provider-packages)
last_subcommand="${1}"
command_to_run="perform_prepare_provider_packages"
shift
;;
prepare-provider-documentation)
last_subcommand="${1}"
command_to_run="perform_prepare_provider_documentation"
shift
;;
push-image)
last_subcommand="${1}"
command_to_run="perform_push_image"
export SKIP_CHECK_REMOTE_IMAGE="true"
shift
;;
initialize-local-virtualenv)
last_subcommand="${1}"
echo "Initializing local virtualenv"
echo
command_to_run="perform_initialize_local_virtualenv"
export INSTALL_PROVIDERS_FROM_SOURCES="true"
shift
;;
kind-cluster)
last_subcommand="${1}"
# Switch to production image for all kind operations
export PRODUCTION_IMAGE="true"
command_to_run="manage_kind_cluster"
export KIND_CLUSTER_OPERATION="${2:-}"
if [[ -n ${KIND_CLUSTER_OPERATION=} ]]; then
shift 2
else
shift
fi
;;
setup-autocomplete)
last_subcommand="${1}"
echo "Setting up autocomplete"
echo
command_to_run="perform_setup_autocomplete"
shift
;;
static-check)
last_subcommand="${1}"
command_to_run="perform_static_checks"
if [[ "$#" -lt 2 ]]; then
if [[ ${run_help} != "true" ]]; then
echo "You should specify static check that you would like to run or 'all' to run all checks."
echo
echo "One of :"
echo
echo "${_breeze_allowed_static_checks:=}"
echo
echo "For example:"
echo
echo "${CMDNAME} static-check mypy"
echo
exit 1
else
shift
fi
else
export PYTHON_MAJOR_MINOR_VERSION=${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}
static_check="${2:-}"
EXTRA_STATIC_CHECK_OPTIONS+=("--show-diff-on-failure")
shift 2
fi
;;
start-airflow)
last_subcommand="${1}"
export START_AIRFLOW="true"
shift
;;
stop)
last_subcommand="${1}"
command_to_run="run_docker_compose"
docker_compose_command="down"
EXTRA_DC_OPTIONS+=("--remove-orphans")
for INTEGRATION in ${_breeze_allowed_integrations}; do
if [[ ${INTEGRATION} != "all" ]]; then
INTEGRATIONS+=("${INTEGRATION}")
fi
done
shift
;;
restart)
last_subcommand="${1}"
command_to_run="run_docker_compose"
docker_compose_command="down"
EXTRA_DC_OPTIONS+=("--remove-orphans")
second_command_to_run="enter_breeze"
echo "Restarts the environment. Includes emptying the databases."
shift
;;
tests)
last_subcommand="${1}"
command_to_run="run_tests"
shift
;;
toggle-suppress-cheatsheet)
last_subcommand="${1}"
if [[ -f "${SUPPRESS_CHEATSHEET_FILE}" ]]; then
rm -f "${SUPPRESS_CHEATSHEET_FILE}"
else
touch "${SUPPRESS_CHEATSHEET_FILE}"
fi
echo "Toggle suppress cheatsheet"
echo
shift
command_to_run="toggle_suppress_cheatsheet"
;;
toggle-suppress-asciiart)
last_subcommand="${1}"
if [[ -f "${SUPPRESS_ASCIIART_FILE}" ]]; then
rm -f "${SUPPRESS_ASCIIART_FILE}"
else
touch "${SUPPRESS_ASCIIART_FILE}"
fi
echo "Toggle suppress asciiart"
echo
shift
command_to_run="toggle_suppress_asciiart"
;;
flags)
breeze::flags
exit 0
;;
help)
breeze::usage
exit 0
;;
help-all)
breeze::do_help_all
exit 0
;;
*)
breeze::usage
echo
echo "${COLOR_RED}ERROR: Unknown command ${COLOR_RESET}"
echo
exit 1
;;
esac
else
:
# By default, start interactive terminal
fi
if [[ ${run_help} == "true" ]]; then
if [[ ${last_subcommand} == "" ]]; then
breeze::usage
breeze::flag_footer
else
breeze::detailed_usage "${last_subcommand}"
fi
exit 0
fi
if [[ ${PRESERVE_VOLUMES} != "true" ]]; then
EXTRA_DC_OPTIONS+=("--volumes")
fi
# EXTRA_DC_OPTIONS is only used by Breeze. It's value is set here as well.
readonly EXTRA_DC_OPTIONS
# Also Remaining args are set here and set as read-only - no more changes to it.
REMAINING_ARGS+=("$@")
export REMAINING_ARGS
readonly REMAINING_ARGS
}
#######################################################################################################
#
# Prepares nicely formatted versions of list of allowed and default values defined in Breeze.
# It is used in help command to print the lists in a readable format and fold the lists
# so that they fit the screen indented_screen_width.
#
# Used global variables:
# _breeze_allowed_*
#
# Updated global constants:
# FORMATTED_* constant variables that can be used in Breeze Help output
#
#######################################################################################################
function breeze::prepare_formatted_versions() {
local indent=15
local list_prefix
list_prefix=$(printf "%-${indent}s" " ")
local indented_screen_width=$((SCREEN_WIDTH - indent))
FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS=$(echo "${_breeze_allowed_python_major_minor_versions=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS
FORMATTED_BACKENDS=$(echo "${_breeze_allowed_backends=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_BACKENDS
FORMATTED_STATIC_CHECKS=$(echo "${_breeze_allowed_static_checks=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_STATIC_CHECKS
FORMATTED_INTEGRATIONS=$(echo "${_breeze_allowed_integrations=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_INTEGRATIONS
FORMATTED_KUBERNETES_MODES=$(echo "${_breeze_allowed_kubernetes_modes=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_KUBERNETES_MODES
FORMATTED_KUBERNETES_VERSIONS=$(echo "${_breeze_allowed_kubernetes_versions=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_KUBERNETES_VERSIONS
FORMATTED_KIND_VERSIONS=$(echo "${_breeze_allowed_kind_versions=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_KIND_VERSIONS
FORMATTED_HELM_VERSIONS=$(echo "${_breeze_allowed_helm_versions=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_HELM_VERSIONS
FORMATTED_KIND_OPERATIONS=$(echo "${_breeze_allowed_kind_operations=""}" | tr '\n' ' ' |
fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_KIND_OPERATIONS
FORMATTED_INSTALL_AIRFLOW_VERSIONS=$(echo "${_breeze_allowed_install_airflow_versions=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_INSTALL_AIRFLOW_VERSIONS
FORMATTED_INSTALLATION_METHOD=$(echo "${_breeze_allowed_installation_methods=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_INSTALLATION_METHOD
FORMATTED_GENERATE_CONSTRAINTS_MODE=$(echo "${_breeze_allowed_generate_constraints_modes=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_GENERATE_CONSTRAINTS_MODE
FORMATTED_GITHUB_REGISTRY=$(echo "${_breeze_allowed_github_registrys=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_GITHUB_REGISTRY
FORMATTED_POSTGRES_VERSIONS=$(echo "${_breeze_allowed_postgres_versions=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_POSTGRES_VERSIONS
FORMATTED_MYSQL_VERSIONS=$(echo "${_breeze_allowed_mysql_versions=""}" |
tr '\n' ' ' | fold -w "${indented_screen_width}" -s | sed "s/^/${list_prefix}/")
readonly FORMATTED_MYSQL_VERSIONS
FORMATTED_DEFAULT_CI_EXTRAS=$(echo "${DEFAULT_CI_EXTRAS=}" |
tr ',' ' ' | fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
readonly FORMATTED_DEFAULT_CI_EXTRAS
FORMATTED_DEFAULT_PROD_EXTRAS=$(echo "${DEFAULT_PROD_EXTRAS=}" |
tr ',' ' ' | fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
readonly FORMATTED_DEFAULT_PROD_EXTRAS
FORMATTED_TEST_TYPES=$(echo "${_breeze_allowed_test_types=""}" |
fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
readonly FORMATTED_TEST_TYPES
FORMATTED_PACKAGE_FORMATS=$(echo "${_breeze_allowed_package_formats=""}" |
fold -w "${indented_screen_width}" -s | sed "s/ /,/g; s/^/${list_prefix}/")
readonly FORMATTED_PACKAGE_FORMATS
}
#######################################################################################################
#
# Prepares usage information for all the commands in Breeze.
# Those usage commands are stored in appropriate environment variables.
#
# Created constants:
# USAGE_*
# DETAILED_USAGE_*
#
#######################################################################################################
# shellcheck disable=SC2034,SC2090,SC2089,SC2155
function breeze::prepare_usage() {
# Note that MacOS uses Bash 3.* and we cannot use associative arrays
export USAGE_SHELL="[Default] Enters interactive shell in the container"
readonly USAGE_SHELL
export USAGE_EXEC="Execs into running breeze container in new terminal"
readonly USAGE_EXEC
export USAGE_BUILD_DOCS="Builds documentation in the container"
readonly USAGE_BUILD_DOCS
export USAGE_BUILD_IMAGE="Builds CI or Production docker image"
readonly USAGE_BUILD_DOCS
export USAGE_CLEANUP_IMAGE="Cleans up the container image created"
readonly USAGE_BUILD_DOCS
export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command"
readonly USAGE_DOCKER_COMPOSE
export USAGE_FLAGS="Shows all breeze's flags"
readonly USAGE_FLAGS
export USAGE_GENERATE_CONSTRAINTS="Generates pinned constraint files"
readonly USAGE_GENERATE_CONSTRAINTS
export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv"
readonly USAGE_INITIALIZE_LOCAL_VIRTUALENV
export USAGE_PREPARE_PROVIDER_DOCUMENTATION="Prepares provider packages documentation"
readonly USAGE_PREPARE_PROVIDER_DOCUMENTATION
export USAGE_PREPARE_AIRFLOW_PACKAGES="Prepares airflow packages"
readonly USAGE_PREPARE_AIRFLOW_PACKAGES
export USAGE_PREPARE_PROVIDER_PACKAGES="Prepares provider packages"
readonly USAGE_PREPARE_PROVIDER_PACKAGES
export USAGE_PUSH_IMAGE="Pushes images to registry"
readonly USAGE_PUSH_IMAGE
export USAGE_KIND_CLUSTER="Manages KinD cluster on the host"
readonly USAGE_KIND_CLUSTER
export USAGE_SETUP_AUTOCOMPLETE="Sets up autocomplete for breeze"
readonly USAGE_SETUP_AUTOCOMPLETE
export USAGE_START_AIRFLOW="Starts Scheduler and Webserver and enters the shell"
readonly USAGE_START_AIRFLOW
export USAGE_STOP="Stops the docker-compose environment"
readonly USAGE_STOP
export USAGE_RESTART="Stops the docker-compose environment including DB cleanup"
readonly USAGE_RESTART
export USAGE_STATIC_CHECK="Performs selected static check for changed files"
readonly USAGE_STATIC_CHECK
export USAGE_TOGGLE_SUPPRESS_CHEATSHEET="Toggles on/off cheatsheet"
readonly USAGE_TOGGLE_SUPPRESS_CHEATSHEET
export USAGE_TOGGLE_SUPPRESS_ASCIIART="Toggles on/off asciiart"
readonly USAGE_TOGGLE_SUPPRESS_ASCIIART
export USAGE_TESTS="Runs selected tests in the container"
readonly USAGE_TESTS
export USAGE_HELP="Shows this help message"
readonly USAGE_HELP
export USAGE_HELP_ALL="Shows detailed help for all commands and flags"
readonly USAGE_HELP_ALL
export DETAILED_USAGE_SHELL="
${CMDNAME} shell [FLAGS] [-- <EXTRA_ARGS>]
This is default subcommand if no subcommand is used.
Enters interactive shell where you can run all tests, start Airflow webserver, scheduler,
workers, interact with the database, run DAGs etc. It is the default command if no command
is selected. The shell is executed in the container and in case integrations are chosen,
the integrations will be started as separated docker containers - under the docker-compose
supervision. Local sources are by default mounted to within the container so you can edit
them locally and run tests immediately in the container. Several folders ('files', 'dist')
are also mounted so that you can exchange files between the host and container.
The 'files/airflow-breeze-config/variables.env' file can contain additional variables
and setup. This file is automatically sourced when you enter the container. Database
and webserver ports are forwarded to appropriate database/webserver so that you can
connect to it from your host environment.
You can also pass <EXTRA_ARGS> after -- they will be passed as bash parameters, this is
especially useful to pass bash options, for example -c to execute command:
'${CMDNAME} shell -- -c \"ls -la\"'
'${CMDNAME} -- -c \"ls -la\"'
For DockerHub pull --dockerhub-user and --dockerhub-repo flags can be used to specify
the repository to pull from. For GitHub repository, the --github-repository
flag can be used for the same purpose. You can also use
--github-image-id <COMMIT_SHA>|<RUN_ID> in case you want to pull the image
with specific COMMIT_SHA tag or RUN_ID.
'${CMDNAME} shell \\
--github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
'${CMDNAME} \\
--github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - pull/use image with SHA
'${CMDNAME} shell \\
--github-image-id 209845560' - pull/use image with RUN_ID
'${CMDNAME} \\
--github-image-id 209845560' - pull/use image with RUN_ID
"
readonly DETAILED_USAGE_SHELL
export DETAILED_USAGE_EXEC="
${CMDNAME} exec [-- <EXTRA_ARGS>]
Execs into interactive shell to an already running container. The container mus be started
already by breeze shell command. If you are not familiar with tmux, this is the best
way to run multiple processes in the same container at the same time for example scheduler,
webserver, workers, database console and interactive terminal.
"
export DETAILED_USAGE_BUILD_DOCS="
${CMDNAME} build-docs [-- <EXTRA_ARGS>]
Builds Airflow documentation. The documentation is build inside docker container - to
maintain the same build environment for everyone. Appropriate sources are mapped from
the host to the container so that latest sources are used. The folders where documentation
is generated ('docs/_build') are also mounted to the container - this way results of
the documentation build is available in the host.
The possible extra args are: --docs-only, --spellcheck-only, --package-filter, --help
"
readonly DETAILED_USAGE_BUILD_DOCS
export DETAILED_USAGE_BUILD_IMAGE="
${CMDNAME} build-image [FLAGS]
Builds docker image (CI or production) without entering the container. You can pass
additional options to this command, such as:
Choosing python version:
'--python'
Choosing cache option:
'--build-cache-local' or '-build-cache-pulled', or '--build-cache-none'
Choosing whether to force pull images or force build the image:
'--force-build-image',
'--force-pull-image', '--force-pull-base-python-image'
You can also pass '--production-image' flag to build production image rather than CI image.
For DockerHub pull. '--dockerhub-user' and '--dockerhub-repo' flags can be used to specify
the repository to pull from. For GitHub repository, the '--github-repository'
flag can be used for the same purpose. You can also use
'--github-image-id <COMMIT_SHA>|<RUN_ID>' in case you want to pull the image with
specific COMMIT_SHA tag or RUN_ID.
Flags:
$(breeze::flag_airflow_variants)
$(breeze::flag_choose_different_airflow_version)
$(breeze::flag_production_image)
$(breeze::flag_build_docker_images)
$(breeze::flag_pull_push_docker_images)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_BUILD_IMAGE
export DETAILED_USAGE_CLEANUP_IMAGE="
${CMDNAME} cleanup-image [FLAGS]
Removes the breeze-related images created in your local docker image cache. This will
not reclaim space in docker cache. You need to 'docker system prune' (optionally
with --all) to reclaim that space.
Flags:
$(breeze::flag_airflow_variants)
$(breeze::flag_production_image)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_CLEANUP_IMAGE
export DETAILED_USAGE_DOCKER_COMPOSE="
${CMDNAME} docker-compose [FLAGS] COMMAND [-- <EXTRA_ARGS>]
Run docker-compose command instead of entering the environment. Use 'help' as command
to see available commands. The <EXTRA_ARGS> passed after -- are treated
as additional options passed to docker-compose. For example
'${CMDNAME} docker-compose pull -- --ignore-pull-failures'
Flags:
$(breeze::flag_airflow_variants)
$(breeze::flag_backend_variants)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_DOCKER_COMPOSE
export DETAILED_USAGE_FLAGS="
Explains in detail all the flags that can be used with breeze.
"
readonly DETAILED_USAGE_FLAGS
export DETAILED_USAGE_PREPARE_PROVIDER_DOCUMENTATION="
${CMDNAME} prepare-provider-documentation [FLAGS] [PACKAGE_ID ...]
Prepares documentation files for provider packages.
The command is optionally followed by the list of packages to generate readme for.
If the first parameter is not formatted as a date, then today is regenerated.
If no packages are specified, readme for all packages are generated.
If no date is specified, current date + 3 days is used (allowing for PMC votes to pass).
Examples:
'${CMDNAME} prepare-provider-documentation' or
'${CMDNAME} prepare-provider-documentation --version-suffix-for-pypi rc1'
General form:
'${CMDNAME} prepare-provider-documentation <PACKAGE_ID> ...'
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
'google' but in several cases, it might be one level deeper separated with
'.' for example 'apache.hive'
Flags:
$(breeze::flag_version_suffix)
$(breeze::flag_packages)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_PREPARE_PROVIDER_DOCUMENTATION
export DETAILED_USAGE_GENERATE_CONSTRAINTS="
${CMDNAME} generate-constraints [FLAGS]
Generates pinned constraint files with all extras from setup.py. Those files are generated in
files folder - separate files for different python version. Those constraint files when
pushed to orphan constraints-master, constraints-2-0 and constraints-1-10 branches are used
to generate repeatable CI builds as well as run repeatable production image builds and
upgrades when you want to include installing or updating some of the released providers
released at the time particular airflow version was released. You can use those
constraints to predictably install released Airflow versions. This is mainly used to test
the constraint generation or manually fix them - constraints are pushed to the orphan
branches by a successful scheduled CRON job in CI automatically, but sometimes manual fix
might be needed.
Flags:
$(breeze::flag_generate_constraints)
$(breeze::flag_airflow_variants)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_GENERATE_CONSTRAINTS
export DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV="
${CMDNAME} initialize-local-virtualenv [FLAGS]
Initializes locally created virtualenv installing all dependencies of Airflow
taking into account the constraints for the version specified.
This local virtualenv can be used to aid auto-completion and IDE support as
well as run unit tests directly from the IDE. You need to have virtualenv
activated before running this command.
Flags:
$(breeze::flag_airflow_variants)
"
readonly DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV
export DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES="
${CMDNAME} prepare-airflow-packages [FLAGS]
Prepares airflow packages (sdist and wheel) in dist folder. Note that
prepare-provider-packages command cleans up the dist folder, so if you want also
to generate provider packages, make sure you run prepare-provider-packages first,
and prepare-airflow-packages second.
General form:
'${CMDNAME} prepare-airflow-packages
Flags:
$(breeze::flag_packages)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_PREPARE_AIRFLOW_PACKAGES
export DETAILED_USAGE_PREPARE_PROVIDER_PACKAGES="
${CMDNAME} prepare-provider-packages [FLAGS] [PACKAGE_ID ...]
Prepares provider packages. You can provide (after --) optional list of packages to prepare.
If no packages are specified, readme for all packages are generated. You can specify optional
--version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or
--version-suffix-for-pypi flag to generate rc candidates for PyPI packages. You can also
provide both suffixes in case you prepare alpha/beta versions. The packages are prepared in
dist folder. Note that this command also cleans up dist folder before generating the packages
so that you do not have accidental files there. This will delete airflow package if it is
prepared there so make sure you run prepare-provider-packages first,
and prepare-airflow-packages second.
Examples:
'${CMDNAME} prepare-provider-packages' or
'${CMDNAME} prepare-provider-packages google' or
'${CMDNAME} prepare-provider-packages --package-format wheel google' or
'${CMDNAME} prepare-provider-packages --version-suffix-for-svn rc1 http google amazon' or
'${CMDNAME} prepare-provider-packages --version-suffix-for-pypi rc1 http google amazon'
'${CMDNAME} prepare-provider-packages --version-suffix-for-pypi a1
--version-suffix-for-svn a1 http google amazon'
General form:
'${CMDNAME} prepare-provider-packages [--package-format PACKAGE_FORMAT] \\
[--version-suffix-for-svn|--version-suffix-for-pypi] <PACKAGE_ID> ...'
* <PACKAGE_ID> is usually directory in the airflow/providers folder (for example
'google'), but in several cases, it might be one level deeper separated with '.'
for example 'apache.hive'
Flags:
$(breeze::flag_packages)
$(breeze::flag_version_suffix)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_PREPARE_PROVIDER_PACKAGES
export DETAILED_USAGE_PUSH_IMAGE="
${CMDNAME} push_image [FLAGS]
Pushes images to docker registry. You can push the images to DockerHub registry (default)
or to the GitHub registry (if --use-github-registry flag is used).
For DockerHub pushes --dockerhub-user and --dockerhub-repo flags can be used to specify
the repository to push to. For GitHub repository, the --github-repository
flag can be used for the same purpose. You can also add
--github-image-id <COMMIT_SHA>|<RUN_ID> in case you want to push image with specific
SHA tag or run id. In case you specify --github-repository or --github-image-id, you
do not need to specify --use-github-registry flag.
You can also add --production-image flag to switch to production image (default is CI one)
Examples:
'${CMDNAME} push-image' or
'${CMDNAME} push-image --dockerhub-user user' to push to your private registry or
'${CMDNAME} push-image --production-image' - to push production image or
'${CMDNAME} push-image --use-github-registry' - to push to GitHub image registry or
'${CMDNAME} push-image \\
--github-repository user/airflow' - to push to your user's fork
'${CMDNAME} push-image \\
--github-image-id 9a621eaa394c0a0a336f8e1b31b35eff4e4ee86e' - to push with COMMIT_SHA
'${CMDNAME} push-image \\
--github-image-id 209845560' - to push with RUN_ID
Flags:
$(breeze::flag_pull_push_docker_images)
$(breeze::flag_verbosity)
"
readonly DETAILED_USAGE_PUSH_IMAGE
export DETAILED_USAGE_KIND_CLUSTER="
${CMDNAME} kind-cluster [FLAGS] OPERATION
Manages host-side Kind Kubernetes cluster that is used to run Kubernetes integration tests.
It allows to start/stop/restart/status the Kind Kubernetes cluster and deploy Airflow to it.
This enables you to run tests inside the breeze environment with latest airflow images.
Note that in case of deploying airflow, the first step is to rebuild the image and loading it
to the cluster so you can also pass appropriate build image flags that will influence
rebuilding the production image. Operation is one of:
${FORMATTED_KIND_OPERATIONS}
The last two operations - shell and k9s allow you to perform interactive testing with
kubernetes tests. You can enter the shell from which you can run kubernetes tests and in
another terminal you can start the k9s CLI to debug kubernetes instance. It is an easy
way to debug the kubernetes deployments.
You can read more about k9s at https://k9scli.io/
Flags:
$(breeze::flag_airflow_variants)
$(breeze::flag_build_docker_images)
"
readonly DETAILED_USAGE_KIND_CLUSTER
export DETAILED_USAGE_SETUP_AUTOCOMPLETE="
${CMDNAME} setup-autocomplete
Sets up autocomplete for breeze commands. Once you do it you need to re-enter the bash
shell and when typing breeze command <TAB> will provide autocomplete for
parameters and values.
"
readonly DETAILED_USAGE_SETUP_AUTOCOMPLETE
export DETAILED_USAGE_START_AIRFLOW="
${CMDNAME} start-airflow
Like the Shell command this will enter the interactive shell, but it will also start
automatically the Scheduler and the Webserver. It will leave you in a tmux session where you
can also observe what is happening in your Airflow.
This is a convenient way to setup a development environment. Your dags will be loaded from the
folder 'files/dags' on your host machine (it could take some times).
If you want to load default connections and example dags you can use the dedicated flags.
Flags:
$(breeze::flag_start_airflow)
"
readonly DETAILED_USAGE_START_AIRFLOW
export DETAILED_USAGE_STOP="
${CMDNAME} stop
Brings down running docker compose environment. When you start the environment, the docker
containers will continue running so that startup time is shorter. But they take quite a lot of
memory and CPU. This command stops all running containers from the environment.
Flags:
$(breeze::flag_stop_airflow)
"
readonly DETAILED_USAGE_STOP
export DETAILED_USAGE_RESTART="
${CMDNAME} restart [FLAGS]
Restarts running docker compose environment. When you restart the environment, the docker
containers will be restarted. That includes cleaning up the databases. This is
especially useful if you switch between different versions of Airflow.
Flags:
$(breeze::flag_stop_airflow)
"
readonly DETAILED_USAGE_RESTART
export DETAILED_USAGE_STATIC_CHECK="
${CMDNAME} static-check [FLAGS] static_check [-- <EXTRA_ARGS>]
Run selected static checks for currently changed files. You should specify static check that
you would like to run or 'all' to run all checks. One of:
${FORMATTED_STATIC_CHECKS}
You can pass extra arguments including options to the pre-commit framework as
<EXTRA_ARGS> passed after --. For example:
'${CMDNAME} static-check mypy' or
'${CMDNAME} static-check mypy -- --files tests/core.py'
'${CMDNAME} static-check mypy -- --all-files'
To check all files that differ between you current branch and master run:
'${CMDNAME} static-check all -- --from-ref \$(git merge-base master HEAD) --to-ref HEAD'
To check all files that are in the HEAD commit run:
'${CMDNAME} static-check mypy -- --from-ref HEAD^ --to-ref HEAD'
You can see all the options by adding --help EXTRA_ARG:
'${CMDNAME} static-check mypy -- --help'
"
readonly DETAILED_USAGE_STATIC_CHECK
export DETAILED_USAGE_TESTS="
${CMDNAME} tests [FLAGS] [TEST_TARGET ..] [-- <EXTRA_ARGS>]
Run the specified unit test target. There might be multiple
targets specified separated with comas. The <EXTRA_ARGS> passed after -- are treated
as additional options passed to pytest. You can pass 'tests' as target to
run all tests. For example:
'${CMDNAME} tests tests/core/test_core.py -- --logging-level=DEBUG'
'${CMDNAME} tests tests
Flags:
$(breeze::flag_tests)
"
readonly DETAILED_USAGE_TESTS
export DETAILED_USAGE_TOGGLE_SUPPRESS_CHEATSHEET="
${CMDNAME} toggle-suppress-cheatsheet
Toggles on/off cheatsheet displayed before starting bash shell.
"
readonly DETAILED_USAGE_TOGGLE_SUPPRESS_CHEATSHEET
export DETAILED_USAGE_TOGGLE_SUPPRESS_ASCIIART="
${CMDNAME} toggle-suppress-asciiart
Toggles on/off asciiart displayed before starting bash shell.
"
readonly DETAILED_USAGE_TOGGLE_SUPPRESS_ASCIIART
export DETAILED_USAGE_HELP="
${CMDNAME} help
Shows general help message for all commands.
"
readonly DETAILED_USAGE_HELP
export DETAILED_USAGE_HELP_ALL="
${CMDNAME} help-all
Shows detailed help for all commands and flags.
"
readonly DETAILED_USAGE_HELP_ALL
}
# shellcheck enable=all
#######################################################################################################
#
# Gets environment variable value converting the lowercase name of command into variable name
# Arguments:
# prefix for the variable to add
# name of the variable
# Outputs:
# Writes the capitalized name of the variable to stdout
#######################################################################################################
function breeze::get_variable_from_lowercase_name() {
local prefix="${1}"
local name="${2}"
local suffix
suffix="$(echo "${name}" | tr "[:lower:]-" "[:upper:]_")"
local variable_name="${prefix}_${suffix}"
echo "${!variable_name}"
}
#######################################################################################################
#
# Gets usage information from lowercase command
# Arguments:
# lowercase command name
# Outputs:
# usage information for the command.
#######################################################################################################
function breeze::get_usage() {
breeze::get_variable_from_lowercase_name "USAGE" "${1}"
}
#######################################################################################################
#
# Gets detailed usage information from lowercase command
# Arguments:
# lowercase command name
# Outputs:
# Detailed usage information for the command.
#######################################################################################################
function breeze::get_detailed_usage() {
breeze::get_variable_from_lowercase_name "DETAILED_USAGE" "${1}"
}
#######################################################################################################
#
# Prints general usage information for all commands.
#
# Globals used:
# CMDNAME
# _breeze_commands
# _breeze_extra_arg_commands
# _breeze_help_commands
#
# Outputs:
# General usage information for all commands.
#######################################################################################################
function breeze::usage() {
echo "
usage: ${CMDNAME} [FLAGS] [COMMAND] -- <EXTRA_ARGS>
By default the script enters the CI container and drops you to bash shell, but you can choose
one of the commands to run specific actions instead.
Add --help after each command to see details:
Commands without arguments:
"
for subcommand in ${_breeze_commands}; do
printf " %-40s %s\n" "${subcommand}" "$(breeze::get_usage "${subcommand}")"
done
echo "
Commands with arguments:
"
# shellcheck disable=SC2154
for subcommand in ${_breeze_extra_arg_commands}; do
printf " %-35s%-10s %s\n" "${subcommand}" "<ARG>" "$(breeze::get_usage "${subcommand}")"
done
echo "
Help commands:
"
# shellcheck disable=SC2154
for subcommand in ${_breeze_help_commands}; do
printf " %-40s %s\n" "${subcommand}" "$(breeze::get_usage "${subcommand}")"
done
echo
}
#######################################################################################################
#
# Prints detailed usage for command specified
#
# Argument:
# Command name.
#
# Outputs:
# Detailed usage information for the command
#######################################################################################################
# Prints detailed usage for command specified
function breeze::detailed_usage() {
subcommand=${1}
echo "
Detailed usage for command: ${subcommand}
$(breeze::get_detailed_usage "${subcommand}")
"
}
#######################################################################################################
#
# Prints flag footer - common to all commands..
#
# Outputs:
# Footer common for all commands.
#######################################################################################################
function breeze::flag_footer() {
echo "
Run '${CMDNAME} flags' to see all applicable flags.
"
}
#######################################################################################################
#
# Prints flags for different variants of airflow to use
#
# Global constants used:
# FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_airflow_variants() {
echo "
-p, --python PYTHON_MAJOR_MINOR_VERSION
Python version used for the image. This is always major/minor version.
Note that versions 2.7 and 3.5 are only valid when installing Airflow 1.10 with
--install-airflow-version or --install-airflow-reference flags.
One of:
${FORMATTED_PYTHON_MAJOR_MINOR_VERSIONS}
"
}
#######################################################################################################
#
# Prints flags for different backend to use
#
# Global constants used:
# FORMATTED_BACKENDS
# FORMATTED_POSTGRES_VERSIONS
# FORMATTED_MYSQL_VERSIONS
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_backend_variants() {
echo "
-b, --backend BACKEND
Backend to use for tests - it determines which database is used.
One of:
${FORMATTED_BACKENDS}
Default: ${_breeze_default_backend:=}
--postgres-version POSTGRES_VERSION
Postgres version used. One of:
${FORMATTED_POSTGRES_VERSIONS}
--mysql-version MYSQL_VERSION
Mysql version used. One of:
${FORMATTED_MYSQL_VERSIONS}
"
}
#######################################################################################################
#
# Prints production image flags
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_production_image() {
echo "
-I, --production-image
Use production image for entering the environment and builds (not for tests).
"
}
#######################################################################################################
#
# Prints additional breeze action flags
#
# Global constants used:
# FORMATTED_INTEGRATIONS
#
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_breeze_actions() {
echo "
-d, --db-reset
Resets the database at entry to the environment. It will drop all the tables
and data and recreate the DB from scratch even if 'restart' command was not used.
Combined with 'restart' command it enters the environment in the state that is
ready to start Airflow webserver/scheduler/worker. Without the switch, the database
does not have any tables and you need to run reset db manually.
-i, --integration INTEGRATION
Integration to start during tests - it determines which integrations are started
for integration tests. There can be more than one integration started, or all to
start all integrations. Selected integrations are not saved for future execution.
One of:
${FORMATTED_INTEGRATIONS}
--init-script INIT_SCRIPT_FILE
Initialization script name - Sourced from files/airflow-breeze-config. Default value
init.sh. It will be executed after the environment is configured and started.
"
}
#######################################################################################################
#
# Prints Kubernetes action flags
#
# Global constants used:
# FORMATTED_KUBERNETES_MODE
# FORMATTED_KUBERNETES_VERSIONS
# FORMATTED_KIND_VERSIONS
# FORMATTED_HELM_VERSIONS
#
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_kubernetes_configuration() {
echo "
Configuration for the KinD Kubernetes cluster and tests:
-K, --kubernetes-mode KUBERNETES_MODE
Kubernetes mode - only used in case one of kind-cluster commands is used.
One of:
${FORMATTED_KUBERNETES_MODES}
Default: ${_breeze_default_kubernetes_mode:=}
-V, --kubernetes-version KUBERNETES_VERSION
Kubernetes version - only used in case one of kind-cluster commands is used.
One of:
${FORMATTED_KUBERNETES_VERSIONS}
Default: ${_breeze_default_kubernetes_version:=}
--kind-version KIND_VERSION
Kind version - only used in case one of kind-cluster commands is used.
One of:
${FORMATTED_KIND_VERSIONS}
Default: ${_breeze_default_kind_version:=}
--helm-version HELM_VERSION
Helm version - only used in case one of kind-cluster commands is used.
One of:
${FORMATTED_HELM_VERSIONS}
Default: ${_breeze_default_helm_version:=}
"
}
#######################################################################################################
#
# Prints flags that determine what is the source mounting scheme
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_local_file_mounting() {
echo "
-l, --skip-mounting-local-sources
Skips mounting local volume with sources - you get exactly what is in the
docker image rather than your current local sources of Airflow.
"
}
#######################################################################################################
#
# Prints flags that allow to choose different airflow variants
#
# Global constants used:
# FORMATTED_INSTALL_AIRFLOW_VERSIONS
#
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_choose_different_airflow_version() {
echo "
-a, --install-airflow-version INSTALL_AIRFLOW_VERSION
In CI image, installs Airflow (in entrypoint) from PIP released version or using
the installation method specified (sdist, wheel, none).
In PROD image the installation of selected method or version happens during image building.
For PROD image, the 'none' options is not valid.
One of:
${FORMATTED_INSTALL_AIRFLOW_VERSIONS}
When 'none' is used, you can install airflow from local packages. When building image,
airflow package should be added to 'docker-context-files' and
--install-from-docker-context-files flag should be used. When running an image, airflow
package should be added to dist folder and --install-packages-from-dist flag should be used.
-t, --install-airflow-reference INSTALL_AIRFLOW_REFERENCE
If specified, installs Airflow directly from reference in GitHub. This happens at
image building time in production image and at container entering time for CI image.
This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1.
--installation-method INSTALLATION_METHOD
Method of installing airflow for production image - either from the sources ('.')
or from package 'apache-airflow' to install from PyPI.
Default in Breeze is to install from sources. One of:
${FORMATTED_INSTALLATION_METHOD}
--no-rbac-ui
Disables RBAC UI when Airflow 1.10.* is installed.
--install-packages-from-dist
If specified it will look for packages placed in dist folder and it will install the
packages after installing Airflow. This is useful for testing provider
packages.
--upgrade-to-newer-dependencies
Upgrades PIP packages to latest versions available without looking at the constraints.
--continue-on-pip-check-failure
Continue even if 'pip check' fails.
"
}
#######################################################################################################
#
# Prints flags that allow to choose variants of constraint generation
#
# Global constants used:
# GENERATE_CONSTRAINTS_MODES
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_generate_constraints() {
echo "
--generate-constraints-mode GENERATE_CONSTRAINTS_MODE
Mode of generating constraints - determines whether providers are installed when generating
constraints and which version of them (either the ones from sources are used or the ones
from pypi.
One of:
${FORMATTED_GENERATE_CONSTRAINTS_MODE}
"
}
#######################################################################################################
#
# Prints flags that allow to set assumed answers to questions
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_assume_answers_to_questions() {
echo "
-y, --assume-yes
Assume 'yes' answer to all questions.
-n, --assume-no
Assume 'no' answer to all questions.
-q, --assume-quit
Assume 'quit' answer to all questions.
"
}
#######################################################################################################
#
# Prints flags that are used for credential forwarding
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_credentials() {
echo "
-f, --forward-credentials
Forwards host credentials to docker container. Use with care as it will make
your credentials available to everything you install in Docker.
"
}
#######################################################################################################
#
# Prints flags that control package preparation
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_packages() {
echo "
--package-format PACKAGE_FORMAT
Chooses format of packages to prepare.
One of:
${FORMATTED_PACKAGE_FORMATS}
Default: ${_breeze_default_package_format:=}
"
}
#######################################################################################################
#
# Prints flags that control verbosity
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_verbosity() {
echo "
-v, --verbose
Show verbose information about executed docker, kind, kubectl, helm commands. Useful for
debugging - when you run breeze with --verbose flags you will be able to see the commands
executed under the hood and copy&paste them to your terminal to debug them more easily.
Note that you can further increase verbosity and see all the commands executed by breeze
by running 'export VERBOSE_COMMANDS=\"true\"' before running breeze.
--dry-run-docker
Only show docker commands to execute instead of actually executing them. The docker
commands are printed in yellow color.
"
}
#######################################################################################################
#
# Prints information about help flag
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_help() {
echo "
-h, --help
Shows detailed help message for the command specified.
"
}
#######################################################################################################
#
# Prints flags controlling docker build process
#
# Global constants used:
# FORMATTED_DEFAULT_CI_EXTRAS
# FORMATTED_DEFAULT_PROD_EXTRAS
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_build_docker_images() {
echo "
-F, --force-build-images
Forces building of the local docker images. The images are rebuilt
automatically for the first time or when changes are detected in
package-related files, but you can force it using this flag.
-P, --force-pull-images
Forces pulling of images from DockerHub before building to populate cache. The
images are pulled by default only for the first time you run the
environment, later the locally build images are used as cache.
--force-pull-base-python-image
Forces pulling of Python base image from DockerHub before building to
populate cache. This should only be run in case we need to update to latest available
Python base image. This should be a rare and manually triggered event. Also this flag
is used in the scheduled run in CI when we rebuild all the images from the scratch
and run the tests to see if the latest python images do not fail our tests.
Customization options:
-E, --extras EXTRAS
Extras to pass to build images The default are different for CI and production images:
CI image:
${FORMATTED_DEFAULT_CI_EXTRAS}
Production image:
${FORMATTED_DEFAULT_PROD_EXTRAS}
--image-tag TAG
Additional tag in the image.
--skip-installing-airflow-providers-from-sources
By default 'pip install' in Airflow 2.0 installs only the provider packages that
are needed by the extras. When you build image during the development (which is
default in Breeze) all providers are installed by default from sources.
You can disable it by adding this flag but then you have to install providers from
wheel packages via --install-packages-from-dist flag.
--disable-pypi-when-building
Disable installing Airflow from pypi when building. If you use this flag and want
to install Airflow, you have to install it from packages placed in
'docker-context-files' and use --install-from-local-files-when-building flag.
--additional-extras ADDITIONAL_EXTRAS
Additional extras to pass to build images The default is no additional extras.
--additional-python-deps ADDITIONAL_PYTHON_DEPS
Additional python dependencies to use when building the images.
--dev-apt-command DEV_APT_COMMAND
The basic command executed before dev apt deps are installed.
--additional-dev-apt-command ADDITIONAL_DEV_APT_COMMAND
Additional command executed before dev apt deps are installed.
--additional-dev-apt-deps ADDITIONAL_DEV_APT_DEPS
Additional apt dev dependencies to use when building the images.
--dev-apt-deps DEV_APT_DEPS
The basic apt dev dependencies to use when building the images.
--additional-dev-apt-deps ADDITIONAL_DEV_DEPS
Additional apt dev dependencies to use when building the images.
--additional-dev-apt-envs ADDITIONAL_DEV_APT_ENVS
Additional environment variables set when adding dev dependencies.
--runtime-apt-command RUNTIME_APT_COMMAND
The basic command executed before runtime apt deps are installed.
--additional-runtime-apt-command ADDITIONAL_RUNTIME_APT_COMMAND
Additional command executed before runtime apt deps are installed.
--runtime-apt-deps ADDITIONAL_RUNTIME_APT_DEPS
The basic apt runtime dependencies to use when building the images.
--additional-runtime-apt-deps ADDITIONAL_RUNTIME_DEPS
Additional apt runtime dependencies to use when building the images.
--additional-runtime-apt-envs ADDITIONAL_RUNTIME_APT_DEPS
Additional environment variables set when adding runtime dependencies.
Build options:
--disable-mysql-client-installation
Disables installation of the mysql client which might be problematic if you are building
image in controlled environment. Only valid for production image.
--constraints-location
Url to the constraints file. In case of the production image it can also be a path to the
constraint file placed in 'docker-context-files' folder, in which case it has to be
in the form of '/docker-context-files/<NAME_OF_THE_FILE>'
--disable-pip-cache
Disables GitHub PIP cache during the build. Useful if GitHub is not reachable during build.
--install-from-local-files-when-building
This flag is used during image building. If it is used additionally to installing
Airflow from PyPI, the packages are installed from the .whl and .tar.gz packages placed
in the 'docker-context-files' folder. The same flag can be used during entering the image in
the CI image - in this case also the .whl and .tar.gz files will be installed automatically
-C, --force-clean-images
Force build images with cache disabled. This will remove the pulled or build images
and start building images from scratch. This might take a long time.
-r, --skip-rebuild-check
Skips checking image for rebuilds. It will use whatever image is available locally/pulled.
-L, --build-cache-local
Uses local cache to build images. No pulled images will be used, but results of local
builds in the Docker cache are used instead. This will take longer than when the pulled
cache is used for the first time, but subsequent '--build-cache-local' builds will be
faster as they will use mostly the locally build cache.
This is default strategy used by the Production image builds.
-U, --build-cache-pulled
Uses images pulled from registry (either DockerHub or GitHub depending on
--use-github-registry flag) to build images. The pulled images will be used as cache.
Those builds are usually faster than when ''--build-cache-local'' with the exception if
the registry images are not yet updated. The DockerHub images are updated nightly and the
GitHub images are updated after merges to master so it might be that the images are still
outdated vs. the latest version of the Dockerfiles you are using. In this case, the
''--build-cache-local'' might be faster, especially if you iterate and change the
Dockerfiles yourself.
This is default strategy used by the CI image builds.
-X, --build-cache-disabled
Disables cache during docker builds. This is useful if you want to make sure you want to
rebuild everything from scratch.
This strategy is used by default for both Production and CI images for the scheduled
(nightly) builds in CI.
"
}
#######################################################################################################
#
# Prints flags controlling docker pull and push process
#
# Global constants used:
# _breeze_default_dockerhub_user
# _breeze_default_dockerhub_repo
# _breeze_default_github_repository
# _breeze_default_github_image_id
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_pull_push_docker_images() {
echo "
-D, --dockerhub-user DOCKERHUB_USER
DockerHub user used to pull, push and build images. Default: ${_breeze_default_dockerhub_user:=}.
-H, --dockerhub-repo DOCKERHUB_REPO
DockerHub repository used to pull, push, build images. Default: ${_breeze_default_dockerhub_repo:=}.
-c, --use-github-registry
If GitHub registry is enabled, pulls and pushes are done from the GitHub registry not
DockerHub. You need to be logged in to the registry in order to be able to pull/push from
and you need to be committer to push to Apache Airflow' GitHub registry.
--github-registry GITHUB_REGISTRY
GitHub registry used. GitHub has legacy Packages registry and Public Beta Container
registry.
Default: ${_breeze_default_github_registry:=}.
If you use this flag, automatically --use-github-registry flag is enabled.
${FORMATTED_GITHUB_REGISTRY}
-g, --github-repository GITHUB_REPOSITORY
GitHub repository used to pull, push images when cache is used.
Default: ${_breeze_default_github_repository:=}.
If you use this flag, automatically --use-github-registry flag is enabled.
-s, --github-image-id COMMIT_SHA|RUN_ID
<RUN_ID> or <COMMIT_SHA> of the image. Images in GitHub registry are stored with those
to be able to easily find the image for particular CI runs. Once you know the
<RUN_ID> or <COMMIT_SHA>, you can specify it in github-image-id flag and Breeze will
automatically pull and use that image so that you can easily reproduce a problem
that occurred in CI.
If you use this flag, automatically --use-github-registry is enabled.
Default: ${_breeze_default_github_image_id:=}.
"
}
#######################################################################################################
#
# Prints flags that control version of generated packages
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_version_suffix() {
echo "
-S, --version-suffix-for-pypi SUFFIX
Adds optional suffix to the version in the generated provider package. It can be used
to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI.
-N, --version-suffix-for-svn SUFFIX
Adds optional suffix to the generated names of package. It can be used to generate
rc1/rc2 ... versions of the packages to be uploaded to SVN.
"
}
#####################################################################################################
#
# Prints flags that control how Airflow should be populated with the command start-airflow
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_start_airflow() {
echo "
--load-example-dags
Include Airflow example dags.
--load-default-connections
Include Airflow Default Connections.
"
}
#####################################################################################################
#
# Prints flags that control how Airflow should be populated with the command stop-airflow
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_stop_airflow() {
echo "
--preserve-volumes
Use this flag if you would like to preserve data volumes from the databases used
by the integrations. By default, those volumes are deleted, so when you run 'stop'
or 'restart' commands you start from scratch, but by using this flag you can
preserve them. If you want to delete those volumes after stopping Breeze, just
run the 'breeze stop' again without this flag.
"
}
#####################################################################################################
#
# Prints flags that control tests
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flag_tests() {
echo "
--test-type TEST_TYPE
Type of the test to run. One of:
${FORMATTED_TEST_TYPES}
Default: ${_breeze_default_test_type:=}
"
}
#######################################################################################################
#
# Prints all flags
#
# Outputs:
# Flag information.
#######################################################################################################
function breeze::flags() {
echo "
$(breeze::print_line)
Summary of all flags supported by Breeze:
$(breeze::print_star_line)
Choose Airflow variant
$(breeze::flag_airflow_variants)
$(breeze::print_star_line)
Choose backend to run for Airflow
$(breeze::flag_backend_variants)
$(breeze::print_star_line)
Enable production image
$(breeze::flag_production_image)
$(breeze::print_star_line)
Additional actions executed while entering breeze
$(breeze::flag_breeze_actions)
$(breeze::print_star_line)
Additional actions executed while starting Airflow
$(breeze::flag_start_airflow)
$(breeze::print_star_line)
Cleanup options when stopping Airflow
$(breeze::flag_stop_airflow)
$(breeze::print_star_line)
Kind kubernetes and Kubernetes tests configuration(optional)
$(breeze::flag_kubernetes_configuration)
$(breeze::print_star_line)
Manage mounting local files
$(breeze::flag_local_file_mounting)
$(breeze::print_star_line)
Assume answers to questions
$(breeze::flag_assume_answers_to_questions)
$(breeze::print_star_line)
Choose different Airflow version to install or run
$(breeze::flag_choose_different_airflow_version)
$(breeze::print_star_line)
Credentials
$(breeze::flag_credentials)
$(breeze::print_star_line)
Flags for building Docker images (both CI and production)
$(breeze::flag_build_docker_images)
$(breeze::print_star_line)
Flags for pulling/pushing Docker images (both CI and production)
$(breeze::flag_pull_push_docker_images)
$(breeze::print_star_line)
Flags for running tests
$(breeze::flag_tests)
$(breeze::print_star_line)
Flags for generation of the provider packages
$(breeze::flag_version_suffix)
$(breeze::print_star_line)
Increase verbosity of the scripts
$(breeze::flag_verbosity)
$(breeze::print_star_line)
Print detailed help message
$(breeze::flag_help)
"
}
#######################################################################################################
#
# Prints header line filling screen indented_screen_width - only when VERBOSE is set
#
# Outputs:
# Prints header line.
#######################################################################################################
function breeze::print_header_line() {
if [ ${VERBOSE:="false"} == "true" ]; then
echo
printf '=%.0s' $(seq "${SCREEN_WIDTH}")
echo
fi
}
#######################################################################################################
#
# Prints separation line filling screen indented_screen_width
#
# Outputs:
# Prints line.
#######################################################################################################
function breeze::print_line() {
printf '#%.0s' $(seq "${SCREEN_WIDTH}")
}
# Prints star line filling screen indented_screen_width
function breeze::print_star_line() {
printf '*%.0s' $(seq "${SCREEN_WIDTH}")
}
#######################################################################################################
#
# Reads saved environment variables. Some of the variables are stored across session so that once
# you use them you do not have to use it next time. That makes those flags persistent.
#
# An example of it is `--backend` or '--kubernetes-mode' flags.
#
# Note that PYTHON_MAJOR_MINOR_VERSION is not read here - it is read at the
# `setup_default_breeze_variables` method because it is needed
# to determine the right images to use and set several variables that depend on the Python version
#
# Uses:
# _breeze_default_* corresponding defaults for variables it reads
#
# Modified and used global constants:
#
# BACKEND
# KUBERNETES_MODE
# KUBERNETES_VERSION
# KIND_VERSION
# HELM_VERSION
# POSTGRES_VERSION
# MYSQL_VERSION
# DOCKERHUB_USER
# DOCKERHUB_REPO
#
#######################################################################################################
function breeze::read_saved_environment_variables() {
BACKEND="${BACKEND:=$(parameters::read_from_file BACKEND)}"
BACKEND=${BACKEND:-${_breeze_default_backend}}
KUBERNETES_MODE="${KUBERNETES_MODE:=$(parameters::read_from_file KUBERNETES_MODE)}"
KUBERNETES_MODE=${KUBERNETES_MODE:=${_breeze_default_kubernetes_mode}}
KUBERNETES_VERSION="${KUBERNETES_VERSION:=$(parameters::read_from_file KUBERNETES_VERSION)}"
KUBERNETES_VERSION=${KUBERNETES_VERSION:=${_breeze_default_kubernetes_version}}
KIND_VERSION="${KIND_VERSION:=$(parameters::read_from_file KIND_VERSION)}"
KIND_VERSION=${KIND_VERSION:=${_breeze_default_kind_version}}
HELM_VERSION="${HELM_VERSION:=$(parameters::read_from_file HELM_VERSION)}"
HELM_VERSION=${HELM_VERSION:=${_breeze_default_helm_version}}
POSTGRES_VERSION="${POSTGRES_VERSION:=$(parameters::read_from_file POSTGRES_VERSION)}"
POSTGRES_VERSION=${POSTGRES_VERSION:=${_breeze_default_postgres_version}}
MYSQL_VERSION="${MYSQL_VERSION:=$(parameters::read_from_file MYSQL_VERSION)}"
MYSQL_VERSION=${MYSQL_VERSION:=${_breeze_default_mysql_version}}
# Here you read DockerHub user/account that you use
# You can populate your own images in DockerHub this way and work with the,
# You can override it with "--dockerhub-user" option and it will be stored in .build directory
DOCKERHUB_USER="${DOCKERHUB_USER:=$(parameters::read_from_file DOCKERHUB_USER)}"
DOCKERHUB_USER="${DOCKERHUB_USER:=${_breeze_default_dockerhub_user}}"
# Here you read DockerHub repo that you use
# You can populate your own images in DockerHub this way and work with them
# You can override it with "--dockerhub-repo" option and it will be stored in .build directory
DOCKERHUB_REPO="${DOCKERHUB_REPO:=$(parameters::read_from_file DOCKERHUB_REPO)}"
DOCKERHUB_REPO="${DOCKERHUB_REPO:=${_breeze_default_dockerhub_repo}}"
}
#######################################################################################################
#
# Checks if variables are correctly set and if they are - saves them so that they can be used across
# sessions. In case we are installing Airflow 1.10, the constants are set to match 1.10 line.
#
# In case the variables are matching expected values they are saved in ".build/VARIABLE_NAME" for
# later reuse. If not, error is printed and the saved file is cleaned, so that next time
# default value can be used.
#
# Used Global constants:
# INSTALL_AIRFLOW_REFERENCE
# INSTALL_AIRFLOW_VERSION
# BACKEND
# KUBERNETES_MODE
# KUBERNETES_VERSION
# KIND_VERSION
# HELM_VERSION
# POSTGRES_VERSION
# MYSQL_VERSION
# DOCKERHUB_USER
# DOCKERHUB_REPO
#
# Updated Global constants:
# BRANCH_NAME
#
# Output: saved variable files in .build,
#######################################################################################################
function breeze::check_and_save_all_params() {
parameters::check_and_save_allowed_param "PYTHON_MAJOR_MINOR_VERSION" "Python version" "--python"
if [[ -n "${INSTALL_AIRFLOW_REFERENCE=}" ]]; then
if [[ ${INSTALL_AIRFLOW_REFERENCE=} == *1_10* ]]; then
export BRANCH_NAME="v1-10-test"
elif [[ ${INSTALL_AIRFLOW_REFERENCE=} == *2_0* ]]; then
export BRANCH_NAME="v2-0-test"
fi
elif [[ -n "${INSTALL_AIRFLOW_VERSION=}" ]]; then
if [[ ${INSTALL_AIRFLOW_VERSION=} == *1.10* ]]; then
export BRANCH_NAME="v1-10-test"
elif [[ ${INSTALL_AIRFLOW_VERSION=} == *2.0* ]]; then
export BRANCH_NAME="v2-0-test"
fi
fi
if [[ ${PYTHON_MAJOR_MINOR_VERSION} == "2.7" || ${PYTHON_MAJOR_MINOR_VERSION} == "3.5" ]]; then
if [[ ${BRANCH_NAME} == "master" || ${BRANCH_NAME} == "v2-0-test" ]]; then
echo
echo "${COLOR_RED}ERROR: The ${PYTHON_MAJOR_MINOR_VERSION} can only be used when installing Airflow 1.10.* ${COLOR_RESET}"
echo
echo "You can use it only when you specify 1.10 Airflow via --install-airflow-version"
echo "or --install-airflow-reference and they point to 1.10 version of Airflow"
echo
exit 1
fi
fi
parameters::check_and_save_allowed_param "BACKEND" "backend" "--backend"
parameters::check_and_save_allowed_param "KUBERNETES_MODE" "Kubernetes mode" "--kubernetes-mode"
parameters::check_and_save_allowed_param "KUBERNETES_VERSION" "Kubernetes version" "--kubernetes-version"
parameters::check_and_save_allowed_param "KIND_VERSION" "KinD version" "--kind-version"
parameters::check_and_save_allowed_param "HELM_VERSION" "Helm version" "--helm-version"
parameters::check_and_save_allowed_param "POSTGRES_VERSION" "Postgres version" "--postgres-version"
parameters::check_and_save_allowed_param "MYSQL_VERSION" "Mysql version" "--mysql-version"
parameters::check_and_save_allowed_param "GITHUB_REGISTRY" "GitHub Registry" "--github-registry"
parameters::check_allowed_param TEST_TYPE "Type of tests" "--test-type"
parameters::check_allowed_param PACKAGE_FORMAT "Format of packages to build" "--package-format"
# Can't verify those - they can be anything, so let's just save them
parameters::save_to_file DOCKERHUB_USER
parameters::save_to_file DOCKERHUB_REPO
}
#######################################################################################################
#
# Prints cheatsheet if it is not suppressed
#
# Used global constants:
#
# AIRFLOW_SOURCES
# WEBSERVER_HOST_PORT
# POSTGRES_HOST_PORT
# MYSQL_HOST_PORT
#
#######################################################################################################
function breeze::print_cheatsheet() {
if [[ ! -f ${SUPPRESS_CHEATSHEET_FILE} && ${command_to_run} == "enter_breeze" ]]; then
echo
breeze::print_line
echo
echo " Airflow Breeze CHEATSHEET"
echo
set +e
if ! command -v breeze; then
breeze::print_line
echo
echo " Adding breeze to your path:"
echo " When you exit the environment, you can add sources of Airflow to the path - you can"
echo " run breeze or the scripts above from any directory by calling 'breeze' commands directly"
echo
echo " export PATH=\${PATH}:\"${AIRFLOW_SOURCES}\""
echo
fi
set -e
breeze::print_line
echo
echo " Port forwarding:"
echo
echo " Ports are forwarded to the running docker containers for webserver and database"
echo " * ${WEBSERVER_HOST_PORT} -> forwarded to Airflow webserver -> airflow:8080"
echo " * ${FLOWER_HOST_PORT} -> forwarded to Flower dashboard -> airflow:5555"
echo " * ${POSTGRES_HOST_PORT} -> forwarded to Postgres database -> postgres:5432"
echo " * ${MYSQL_HOST_PORT} -> forwarded to MySQL database -> mysql:3306"
echo " * ${REDIS_HOST_PORT} -> forwarded to Redis broker -> redis:6379"
echo
echo " Here are links to those services that you can use on host:"
echo " * Webserver: http://127.0.0.1:${WEBSERVER_HOST_PORT}"
echo " * Flower: http://127.0.0.1:${FLOWER_HOST_PORT}"
echo " * Postgres: jdbc:postgresql://127.0.0.1:${POSTGRES_HOST_PORT}/airflow?user=postgres&password=airflow"
echo " * Mysql: jdbc:mysql://127.0.0.1:${MYSQL_HOST_PORT}/airflow?user=root"
echo " * Redis: redis://127.0.0.1:${REDIS_HOST_PORT}/0"
else
echo
fi
}
#######################################################################################################
#
# Prints setup instruction in case we find that autocomplete is not set
# also prints how to toggle asciiart/cheatsheet
#
# Used global constants:
# CMDNAME
#######################################################################################################
function breeze::print_setup_instructions() {
if [[ ${command_to_run} == "enter_breeze" ]]; then
# shellcheck disable=SC2034 # Unused variables left for comp_breeze usage
if ! typeset -f "_comp_breeze" >/dev/null; then
breeze::print_line
echo
echo " You can setup autocomplete by running '${CMDNAME} setup-autocomplete'"
echo
echo
fi
breeze::print_line
echo
echo " You can toggle ascii/cheatsheet by running:"
echo " * ${CMDNAME} toggle-suppress-cheatsheet"
echo " * ${CMDNAME} toggle-suppress-asciiart"
echo
breeze::print_line
echo
echo
echo
echo
fi
}
#######################################################################################################
#
# Checks that pre-commit is installed and upgrades it if needed
# this is used in case static check command is used.
#
#######################################################################################################
function breeze::make_sure_precommit_is_installed() {
echo
echo "Making sure pre-commit is installed"
echo
local pip_binary
if command -v pip3 >/dev/null; then
pip_binary=pip3
elif command -v pip >/dev/null; then
pip_binary=pip
else
echo
echo "${COLOR_RED}ERROR: You need to have pip or pip3 in your PATH ${COLOR_RESET}"
echo
exit 1
fi
"${pip_binary}" install --upgrade pre-commit >/dev/null 2>&1
# Add ~/.local/bin to the path in case pip is run outside of virtualenv
export PATH="${PATH}":~/.local/bin
}
#######################################################################################################
#
# Removes CI and PROD images and cleans up the flag that indicates that the image was already built
#
# Used global constants:
# PYTHON_BASE_IMAGE
# AIRFLOW_CI_IMAGE
# AIRFLOW_PROD_IMAGE
# BUILT_CI_IMAGE_FLAG_FILE
#
#######################################################################################################
function breeze::remove_images() {
# shellcheck disable=SC2086
docker rmi --force ${PYTHON_BASE_IMAGE} \
${GITHUB_REGISTRY_PYTHON_BASE_IMAGE} \
${AIRFLOW_PYTHON_BASE_IMAGE} \
${AIRFLOW_CI_IMAGE} \
${DEFAULT_CI_IMAGE} \
${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE} \
${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE} \
${AIRFLOW_PROD_IMAGE} \
${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE} \
${AIRFLOW_PROD_BUILD_IMAGE} \
${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE} \
2>/dev/null >/dev/null && true
echo
echo "###################################################################"
echo "NOTE!! Removed Airflow images for Python version ${PYTHON_MAJOR_MINOR_VERSION}."
echo " But the disk space in docker will be reclaimed only after"
echo " running 'docker system prune' command."
echo "###################################################################"
echo
rm -f "${BUILT_CI_IMAGE_FLAG_FILE}"
}
#######################################################################################################
#
# Runs chosen static checks
#
# Uses variables:
# static_check
#
#######################################################################################################
function breeze::run_static_checks() {
if [[ ${static_check} == "all" ]]; then
echo
echo "Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
echo
pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
elif [[ ${static_check} == "all-but-pylint" ]]; then
echo
echo "Setting SKIP=pylint. Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
echo
echo
SKIP=pylint pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
else
if [[ ${static_check} == *"pylint"* || ${static_check} == *"mypy"* || ${static_check} == *"flake8"* ]]; then
echo
echo "Running build pre-commit before running the requested static check"
echo
pre-commit run "build"
fi
echo
echo "Running: pre-commit run" "${static_check}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
echo
pre-commit run "${static_check}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@"
fi
}
#######################################################################################################
#
# Runs Build before a command if needed - the right build command will be determined and used
# depending on which command you are trying ton
#
# Used global constants:
# PRODUCTION_IMAGE
# KIND_CLUSTER_OPERATION
# FORMATTED_KIND_OPERATIONS
#
# Used variables:
# command_to_run
#
#######################################################################################################
function breeze::run_build_command() {
case "${command_to_run}" in
run_tests | run_docker_compose)
build_images::prepare_ci_build
build_images::rebuild_ci_image_if_needed
;;
enter_breeze)
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
build_images::prepare_prod_build
else
build_images::prepare_ci_build
build_images::rebuild_ci_image_if_needed
fi
;;
build_docs | perform_static_checks | perform_generate_constraints | \
perform_prepare_provider_documentation | perform_prepare_provider_packages | \
perform_prepare_airflow_packages)
build_images::prepare_ci_build
build_images::rebuild_ci_image_if_needed
;;
perform_push_image)
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
build_images::prepare_prod_build
else
build_images::prepare_ci_build
build_images::rebuild_ci_image_if_needed
fi
;;
build_image)
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
build_images::prepare_prod_build
build_images::build_prod_images
else
build_images::prepare_ci_build
build_images::rebuild_ci_image_if_needed
fi
;;
cleanup_image | run_exec)
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
build_images::prepare_prod_build
else
build_images::prepare_ci_build
fi
;;
perform_initialize_local_virtualenv | perform_setup_autocomplete | \
toggle_suppress_cheatsheet | toggle_suppress_asciiart )
;;
manage_kind_cluster)
if [[ ${KIND_CLUSTER_OPERATION} == "start" ]]; then
echo "Starts KinD cluster"
elif [[ ${KIND_CLUSTER_OPERATION} == "stop" ]]; then
echo "Stops KinD cluster"
elif [[ ${KIND_CLUSTER_OPERATION} == "restart" ]]; then
echo "Restarts KinD cluster"
elif [[ ${KIND_CLUSTER_OPERATION} == "recreate" ]]; then
echo "Recreates KinD cluster"
elif [[ ${KIND_CLUSTER_OPERATION} == "status" ]]; then
echo "Checks status of KinD cluster"
elif [[ ${KIND_CLUSTER_OPERATION} == "deploy" ]]; then
echo "Deploys Airflow to KinD cluster"
build_images::prepare_prod_build
build_images::build_prod_images
elif [[ ${KIND_CLUSTER_OPERATION} == "test" ]]; then
echo "Run Kubernetes tests with the KinD cluster "
elif [[ ${KIND_CLUSTER_OPERATION} == "shell" ]]; then
echo "Enter an interactive shell for kubernetes testing"
elif [[ ${KIND_CLUSTER_OPERATION} == "k9s" ]]; then
echo "Run k9s cli to debug in style"
elif [[ -z ${KIND_CLUSTER_OPERATION=} ]]; then
echo
echo "Please provide an operation to run"
echo
echo "Should be one of:"
echo "${FORMATTED_KIND_OPERATIONS}"
echo
exit 1
else
echo
echo "ERROR: Unknown Kind Kubernetes cluster operation: '${KIND_CLUSTER_OPERATION}'"
echo
echo "Should be one of:"
echo "${FORMATTED_KIND_OPERATIONS}"
echo
exit 1
fi
;;
*)
echo
echo "${COLOR_RED}ERROR: Unknown command to run ${command_to_run} ${COLOR_RESET}"
echo
exit 1
;;
esac
}
# executes command
function breeze::run_command() {
"${@}"
}
# print command instead of executing
function breeze::print_command() {
echo
echo "${COLOR_YELLOW}" "${@}" "${COLOR_RESET}"
echo
}
#######################################################################################################
#
# Runs the actual command - depending on the command chosen it will use the right
# Convenient script and run the right command for the script.
#
# Used variables:
# command_to_run
#
# Used global constants:
# PRODUCTION_IMAGE
# SCRIPTS_CI_DIR
# BUILD_CACHE_DIR
# KIND_CLUSTER_OPERATION
# EXTRA_DC_OPTIONS
#
# Set Global variables:
# RUN_TESTS
#######################################################################################################
function breeze::run_breeze_command() {
set +u
local dc_run_file
local run_command="breeze::run_command"
if [[ ${DRY_RUN_DOCKER=} != "false" ]]; then
run_command="breeze::print_command"
fi
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD}"
else
dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}"
fi
case "${command_to_run}" in
enter_breeze)
docker_engine_resources::check_all_resources
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh"
else
${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}"
fi
;;
run_exec)
docker_engine_resources::check_all_resources
# Unfortunately `docker-compose exec` does not support exec'ing into containers started with run :(
# so we have to find it manually
set +e
local airflow_testing_container
airflow_testing_container=$("${dc_run_file}" ps | grep airflow | awk '{print $1}' 2>/dev/null)
: "${airflow_testing_container:?"ERROR! Breeze must be running in order to exec into running container"}"
set -e
docker exec -it "${airflow_testing_container}" \
"/opt/airflow/scripts/in_container/entrypoint_exec.sh" "${@}"
;;
run_tests)
docker_engine_resources::check_all_resources
export RUN_TESTS="true"
readonly RUN_TESTS
${run_command} "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" run --service-ports --rm airflow "$@"
;;
run_docker_compose)
docker_engine_resources::check_all_resources
set +u
${run_command} "${dc_run_file}" "${docker_compose_command}" "${EXTRA_DC_OPTIONS[@]}" "$@"
set -u
;;
perform_static_checks)
docker_engine_resources::check_all_resources
breeze::make_sure_precommit_is_installed
breeze::run_static_checks "${@}"
;;
build_image) ;;
cleanup_image)
breeze::remove_images
;;
perform_generate_constraints)
docker_engine_resources::check_all_resources
runs::run_generate_constraints
;;
perform_prepare_airflow_packages)
docker_engine_resources::check_all_resources
build_airflow_packages::build_airflow_packages
;;
perform_prepare_provider_packages)
docker_engine_resources::check_all_resources
runs::run_prepare_provider_packages "${@}"
;;
perform_prepare_provider_documentation)
docker_engine_resources::check_all_resources
runs::run_prepare_provider_documentation "${@}"
;;
perform_push_image)
if [[ ${PRODUCTION_IMAGE} == "true" ]]; then
push_pull_remove_images::push_prod_images
else
push_pull_remove_images::push_ci_images
fi
;;
perform_initialize_local_virtualenv)
breeze::initialize_virtualenv
;;
perform_setup_autocomplete)
breeze::setup_autocomplete
;;
manage_kind_cluster)
docker_engine_resources::check_all_resources
kind::make_sure_kubernetes_tools_are_installed
kind::get_kind_cluster_name
kind::perform_kind_cluster_operation "${KIND_CLUSTER_OPERATION}"
;;
build_docs)
docker_engine_resources::check_all_resources
runs::run_docs "${@}"
;;
toggle_suppress_cheatsheet)
if [[ -f "${SUPPRESS_CHEATSHEET_FILE}" ]]; then
echo
echo "Cheatsheet disabled"
echo
else
echo
echo "Cheatsheet enabled"
echo
fi
;;
toggle_suppress_asciiart)
if [[ -f "${SUPPRESS_ASCIIART_FILE}" ]]; then
echo
echo "ASCIIart disabled"
echo
else
echo
echo "ASCIIart enabled"
echo
fi
;;
*)
echo
echo "${COLOR_RED}ERROR: Unknown command to run ${command_to_run} ${COLOR_RESET}"
echo
;;
esac
set -u
}
#######################################################################################################
#
# We have different versions of images depending on the python version used. We keep up with the
# Latest patch-level changes in Python (this is done automatically during CI builds) so we have
# To only take into account MAJOR and MINOR version of python. This variable keeps the major/minor
# version of python in X.Y format (3.6, 3.7, 3.8 etc).
#
# In Breeze the precedence of setting the version is as follows:
# 1. --python flag (if set, it will explicitly override it in the next step)
# 2. PYTHON_MAJOR_MINOR_VERSION exported from outside
# 3. last used version stored in ./build/PYTHON_MAJOR_MINOR_VERSION
# 4. DEFAULT_PYTHON_MAJOR_MINOR_VERSION from scripts/ci/libraries/_initialization.sh
#
# Here points 2. and 3. are realized. If result is empty string , the 4. will be set in
# the next step (sanity_checks::basic_sanity_checks() is called and the version is still not set by then)
# finally, if --python flag is specified, it will override whatever is set above.
#
# We need to run after initialization::initialize_common_environment (so that parameters::read_from_file function is present)
# But before we set the default value for Python
#
# Used and modified global constants:
# PYTHON_MAJOR_MINOR_VERSION
#######################################################################################################
function breeze::determine_python_version_to_use_in_breeze() {
PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION:=$(parameters::read_from_file PYTHON_MAJOR_MINOR_VERSION)}"
export PYTHON_MAJOR_MINOR_VERSION
}
breeze::setup_default_breeze_constants
initialization::initialize_common_environment
initialization::get_environment_for_builds_on_ci
breeze::determine_python_version_to_use_in_breeze
sanity_checks::basic_sanity_checks
start_end::script_start
traps::add_trap start_end::script_end EXIT
breeze::prepare_formatted_versions
breeze::prepare_usage
set +u
breeze::parse_arguments "${@}"
breeze::print_header_line
build_images::forget_last_answer
breeze::read_saved_environment_variables
breeze::check_and_save_all_params
build_images::determine_docker_cache_strategy
build_images::get_docker_image_names
initialization::make_constants_read_only
sanity_checks::sanitize_mounted_files
breeze::prepare_command_files
breeze::run_build_command
breeze::print_header_line
breeze::print_badge
breeze::print_cheatsheet
breeze::print_setup_instructions
set +u # Account for an empty array
breeze::run_breeze_command "${REMAINING_ARGS[@]}"
set +u # Account for an empty array
if [[ -n ${second_command_to_run} ]]; then
command_to_run=${second_command_to_run}
breeze::run_breeze_command "${REMAINING_ARGS[@]}"
fi