| #!/usr/bin/env bash |
| |
| # Licensed to the Apache Software Foundation (ASF) under one |
| # or more contributor license agreements. See the NOTICE file |
| # distributed with this work for additional information |
| # regarding copyright ownership. The ASF licenses this file |
| # to you under the Apache License, Version 2.0 (the |
| # "License"); you may not use this file except in compliance |
| # with the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, |
| # software distributed under the License is distributed on an |
| # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| # KIND, either express or implied. See the License for the |
| # specific language governing permissions and limitations |
| # under the License. |
| set -euo pipefail |
| |
| MY_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" |
| |
| # Bash arrays need to be defined outside of functions unfortunately :( |
| # Array with extra options for Docker compose |
| declare -a EXTRA_DC_OPTIONS |
| # Array with selected integrations |
| declare -a INTEGRATIONS |
| # This is where remaining args are passed |
| declare -a REMAINING_ARGS |
| # This is where static check options are defined |
| declare -a EXTRA_STATIC_CHECK_OPTIONS |
| |
| |
| function setup_default_breeze_variables() { |
| # Whether to actually run docker compose with the command set given |
| COMMAND_TO_RUN="enter_breeze" |
| SECOND_COMMAND_TO_RUN="" |
| export BREEZE=true |
| export MAX_SCREEN_WIDTH=100 |
| |
| export AIRFLOW_SOURCES="${MY_DIR}" |
| |
| # Directory where all CI scripts are located |
| SCRIPTS_CI_DIR="${MY_DIR}/scripts/ci" |
| |
| BUILD_CACHE_DIR="${MY_DIR}/.build" |
| FILES_DIR="${MY_DIR}/files" |
| TMP_DIR="${MY_DIR}/tmp" |
| |
| mkdir -pv "${BUILD_CACHE_DIR}" |
| mkdir -pv "${TMP_DIR}" |
| mkdir -pv "${FILES_DIR}" |
| |
| # Note - we do not use __script_init.sh here because it can only be used from within |
| # the CI directory and we need to overrride PYTHON_MAJOR_MINOR_VERSION based on what we store |
| # in the .build directory |
| |
| # Beginning of the initialisation here |
| |
| # shellcheck source=scripts/ci/_utils.sh |
| . "${SCRIPTS_CI_DIR}/_utils.sh" |
| |
| export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION:=$(read_from_file PYTHON_MAJOR_MINOR_VERSION)}" |
| |
| if [[ ${FORCE_SCREEN_WIDTH:="false"} != "true" ]]; then |
| # Sets width of the screen from terminal |
| SCREEN_WIDTH="$(tput cols)" |
| if [[ -z ${SCREEN_WIDTH} ]]; then |
| SCREEN_WIDTH=${MAX_SCREEN_WIDTH} |
| fi |
| if (( SCREEN_WIDTH > MAX_SCREEN_WIDTH )); then |
| SCREEN_WIDTH=${MAX_SCREEN_WIDTH} |
| fi |
| else |
| SCREEN_WIDTH=${MAX_SCREEN_WIDTH} |
| fi |
| |
| # Name of the script |
| CMDNAME="$(basename -- "$0")" |
| |
| # Update short and long options in the breeze-complete script |
| # This way autocomplete will work automatically with all options |
| # shellcheck source=breeze-complete |
| . "${MY_DIR}/breeze-complete" |
| |
| # Skips mounting local Airflow sources |
| MOUNT_LOCAL_SOURCES="true" |
| |
| # Holds last subcommand used |
| LAST_SUBCOMMAND="" |
| |
| # Determines if help should be run (set to true by --help flag) |
| RUN_HELP="false" |
| |
| # Holds chosen command if the -x flag is used. |
| RUN_COMMAND="" |
| |
| # Holds the test target if the -t flag is used. |
| TEST_TARGET="" |
| |
| # Holds docker compose command if the -d flag is used. |
| DOCKER_COMPOSE_COMMAND="" |
| |
| # If true, the docker images are rebuilt locally. |
| export NEEDS_DOCKER_BUILD="false" |
| |
| # By default we only pull images if we do not have them locally. |
| # This can be overridden by -p flag |
| export FORCE_PULL_IMAGES="false" |
| |
| # Runtime is empty initially (might be set to kubernetes in case kubernetes is chosen) |
| RUNTIME="" |
| |
| # Do not enable Kind Kubernetes cluster by default |
| export ENABLE_KIND_CLUSTER="false" |
| |
| # By default we do not push images. This can be overridden by -u flag. |
| export PUSH_IMAGES=${PUSH_IMAGES:="false"} |
| |
| # Forward credentials to docker |
| export FORWARD_CREDENTIALS="false" |
| |
| # Reset DB at entry |
| export DB_RESET="false" |
| |
| # If install released airflow is set to specified version, then the source version of airflow |
| # is removed and the specified version of airflow is installed from pypi |
| export INSTALL_AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION:=""} |
| |
| # Determine version of the Airflow from version.py |
| AIRFLOW_VERSION=$(grep version "airflow/version.py" | awk '{print $3}' | sed "s/['+]//g") |
| export AIRFLOW_VERSION |
| |
| # Whether to force build without checking if it is needed |
| export FORCE_BUILD_IMAGES=${FORCE_BUILD_IMAGES:="false"} |
| |
| # Files determining whether asciiart/cheatsheet are suppressed |
| SUPPRESS_CHEATSHEET_FILE="${MY_DIR}/.suppress_cheatsheet" |
| SUPPRESS_ASCIIART_FILE="${MY_DIR}/.suppress_asciiart" |
| |
| # Default values for flags |
| |
| _BREEZE_DEFAULT_BACKEND="sqlite" |
| _BREEZE_DEFAULT_KUBERNETES_MODE="git_mode" |
| _BREEZE_DEFAULT_KUBERNETES_VERSION="v1.15.3" |
| _BREEZE_DEFAULT_POSTGRES_VERSION="9.6" |
| _BREEZE_DEFAULT_MYSQL_VERSION="5.7" |
| |
| STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION=3.6 |
| } |
| # End of initialisation here |
| |
| function initialize_virtualenv() { |
| # Check if we are in virtualenv |
| set +e |
| echo -e "import sys\nif not hasattr(sys,'base_prefix'):\n sys.exit(1)" | "python${PYTHON_MAJOR_MINOR_VERSION}" |
| RES=$? |
| set -e |
| if [[ ${RES} != "0" ]]; then |
| echo >&2 |
| echo >&2 "ERROR: Initializing local virtualenv only works when you have virtualenv activated" |
| echo >&2 |
| echo >&2 "Please enter your local virtualenv before (for example using 'workon') " |
| echo >&2 |
| exit 1 |
| else |
| # If no Airflow Home defined - fallback to ${HOME}/airflow |
| AIRFLOW_HOME_DIR=${AIRFLOW_HOME:=${HOME}/airflow} |
| export CASS_DRIVER_NO_CYTHON="1" |
| echo |
| echo "Initializing the virtualenv: $(command -v python)!" |
| echo |
| echo "This will wipe out ${AIRFLOW_HOME_DIR} and reset all the databases!" |
| echo |
| "${MY_DIR}/confirm" "Proceeding with the initialization" |
| echo |
| pushd "${MY_DIR}" |
| set +e |
| pip install -e ".[devel]" --constraint "requirements/requirements-python${PYTHON_MAJOR_MINOR_VERSION}.txt" |
| RES=$? |
| set -e |
| popd |
| if [[ ${RES} != "0" ]]; then |
| echo "#######################################################################" |
| echo " You had some troubles installing the venv !!!!!" |
| echo " Try runnning the command below and rerun virtualenv installation" |
| echo |
| SYSTEM=$(uname -s) |
| if [[ ${SYSTEM} == "Darwin" ]]; then |
| echo " brew install sqlite mysql postgresql" |
| else |
| echo " sudo apt install build-essentials python3.6-dev python3.7-dev python-dev openssl \\" |
| echo " sqlite sqlite-dev default-libmysqlclient-dev libmysqld-dev postgresql" |
| fi |
| echo |
| echo "#######################################################################" |
| exit ${RES} |
| fi |
| echo |
| echo "Wiping and recreating ${AIRFLOW_HOME_DIR}" |
| echo |
| rm -rvf "${AIRFLOW_HOME_DIR}" |
| mkdir -p "${AIRFLOW_HOME_DIR}" |
| echo |
| echo "Resetting AIRFLOW sqlite database" |
| echo |
| unset AIRFLOW__CORE__UNIT_TEST_MODE |
| airflow db reset -y |
| echo |
| echo "Resetting AIRFLOW sqlite unit test database" |
| echo |
| AIRFLOW__CORE__UNIT_TEST_MODE=True airflow db reset -y |
| exit 0 |
| fi |
| } |
| |
| function setup_autocomplete() { |
| echo "Installing bash/zsh completion for local user" |
| echo "Note that completion for zsh is just limited to flags - without their values" |
| echo |
| echo |
| set +e |
| grep ".bash_completion.d" "${HOME}/.bashrc" >/dev/null 2>&1 |
| RES=$? |
| set -e |
| if [[ "${RES}" == "0" ]]; then |
| echo >&2 |
| echo >&2 "ERROR: Bash completion already setup before." |
| echo >&2 |
| exit 1 |
| fi |
| "${MY_DIR}/confirm" "This will create ~/.bash_completion.d/ directory and modify ~/.*rc files" |
| echo |
| echo |
| mkdir -pv ~/.bash_completion.d |
| ln -sf "${MY_DIR}/breeze-complete" "${HOME}/.bash_completion.d/" |
| touch ~/.bashrc |
| cat >>~/.bashrc <<"EOF" |
| for BCFILE in ~/.bash_completion.d/* ; do |
| . ${BCFILE} |
| done |
| EOF |
| cat >>~/.zshrc <<"EOF" |
| autoload compinit && compinit |
| autoload bashcompinit && bashcompinit |
| source ~/.bash_completion.d/breeze-complete |
| EOF |
| if [[ "${OSTYPE}" == "darwin"* ]]; then |
| # For MacOS we have to handle the special case where terminal app DOES NOT run .bashrc by default |
| # But re-runs .bash_profile :( |
| # See https://scriptingosx.com/2017/04/about-bash_profile-and-bashrc-on-macos/ |
| set +e |
| grep ".bashrc" "${HOME}/.bash_profile" |
| RES=$? |
| set -e |
| if [[ "${RES}" == "0" ]]; then |
| echo " Seems you already source .bashrc in your .bash_profile so not adding it." |
| else |
| "${MY_DIR}/confirm" "This will modify ~/.bash_profile and source .bashrc from it" |
| echo |
| echo |
| cat >>~/.bash_profile <<"EOF" |
| if [ -r ~/.bashrc ]; then |
| source ~/.bashrc |
| fi |
| EOF |
| fi |
| fi |
| echo |
| echo |
| echo "Breeze bash completion installed to ~/.bash_completion.d/breeze-complete" |
| echo |
| echo |
| echo "Please re-enter bash or run '. ~/.bash_completion.d/breeze-complete'" |
| echo |
| exit 0 |
| } |
| |
| function print_badge { |
| if [[ ${BACKEND} == "postgres" ]]; then |
| BACKEND_VERSION="${POSTGRES_VERSION}" |
| elif [[ ${BACKEND} == "mysql" ]]; then |
| BACKEND_VERSION="${MYSQL_VERSION}" |
| else |
| BACKEND_VERSION="" |
| fi |
| if [[ ! -f "${SUPPRESS_ASCIIART_FILE}" && ${COMMAND_TO_RUN} == "enter_breeze" ]]; then |
| cat <<EOF |
| |
| |
| |
| |
| @&&&&&&@ |
| @&&&&&&&&&&&@ |
| &&&&&&&&&&&&&&&& |
| &&&&&&&&&& |
| &&&&&&& |
| &&&&&&& |
| @@@@@@@@@@@@@@@@ &&&&&& |
| @&&&&&&&&&&&&&&&&&&&&&&&&&& |
| &&&&&&&&&&&&&&&&&&&&&&&&&&&& |
| &&&&&&&&&&&& |
| &&&&&&&&& |
| &&&&&&&&&&&& |
| @@&&&&&&&&&&&&&&&@ |
| @&&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&& |
| &&&&&&&&&&&&&&&&&&&&&&&&&&&& &&&&&& |
| &&&&&&&&&&&&&&&&&&&&&&&& &&&&&& |
| &&&&&& |
| &&&&&&& |
| @&&&&&&&& |
| @&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& |
| &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& |
| &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& |
| |
| |
| |
| @&&&@ && @&&&&&&&&&&& &&&&&&&&&&&& && &&&&&&&&&& &&& &&& &&& |
| &&& &&& && @&& &&& && && &&& &&&@ &&& &&&&& &&& |
| &&& &&& && @&&&&&&&&&&&& &&&&&&&&&&& && && &&& &&& &&& &&@ &&& |
| &&&&&&&&&&& && @&&&&&&&&& && && &&@ &&& &&@&& &&@&& |
| &&& &&& && @&& &&&@ && &&&&&&&&&&& &&&&&&&&&&&& &&&& &&&& |
| |
| &&&&&&&&&&&& &&&&&&&&&&&& &&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&& |
| &&& &&& && &&& && &&& &&&& && |
| &&&&&&&&&&&&@ &&&&&&&&&&&& &&&&&&&&&&& &&&&&&&&&&& &&&& &&&&&&&&&& |
| &&& && && &&&& && &&& &&&& && |
| &&&&&&&&&&&&& && &&&&@ &&&&&&&&&&&@ &&&&&&&&&&&& @&&&&&&&&&&& &&&&&&&&&&& |
| |
| |
| Branch name: ${BRANCH_NAME} |
| Docker image: ${AIRFLOW_CI_IMAGE} |
| Airflow source version: ${AIRFLOW_VERSION} |
| Airflow installed: ${INSTALL_AIRFLOW_VERSION} |
| Python version: ${PYTHON_MAJOR_MINOR_VERSION} |
| DockerHub user: ${DOCKERHUB_USER} |
| DockerHub repo: ${DOCKERHUB_REPO} |
| Backend: ${BACKEND} ${BACKEND_VERSION} |
| EOF |
| if [[ ${RUNTIME} == "kubernetes" ]]; then |
| cat <<EOF |
| |
| Kubernetes RUNTIME |
| |
| Kubernetes mode: ${KUBERNETES_MODE} |
| Kubernetes version: ${KUBERNETES_VERSION} |
| |
| Enable kind: ${ENABLE_KIND_CLUSTER} |
| Cluster operation: ${KIND_CLUSTER_OPERATION} |
| EOF |
| fi |
| else |
| cat <<EOF |
| |
| Branch name: ${BRANCH_NAME} |
| Docker image: ${AIRFLOW_CI_IMAGE} |
| Airflow source version: ${AIRFLOW_VERSION} |
| Airflow installed: ${INSTALL_AIRFLOW_VERSION} |
| Python version: ${PYTHON_MAJOR_MINOR_VERSION} |
| DockerHub user: ${DOCKERHUB_USER} |
| DockerHub repo: ${DOCKERHUB_REPO} |
| Backend: ${BACKEND} ${BACKEND_VERSION} |
| EOF |
| if [[ ${RUNTIME} == "kubernetes" ]]; then |
| cat <<EOF |
| |
| Kubernetes RUNTIME |
| |
| Kubernetes mode: ${KUBERNETES_MODE} |
| Kubernetes version: ${KUBERNETES_VERSION} |
| |
| Enable kind: ${ENABLE_KIND_CLUSTER} |
| Cluster operation: ${KIND_CLUSTER_OPERATION} |
| EOF |
| fi |
| fi |
| } |
| |
| function prepare_command_file() { |
| local FILE="${1}" |
| local CMD="${2}" |
| local TESTS="${3}" |
| local EXPANSION="${4-@}" |
| cat <<EOF > "${FILE}" |
| #!/usr/bin/env bash |
| cd "\$(pwd)" || exit |
| export DOCKERHUB_USER=${DOCKERHUB_USER} |
| export DOCKERHUB_REPO=${DOCKERHUB_REPO} |
| export COMPOSE_FILE="${COMPOSE_FILE}" |
| export PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" |
| export BACKEND="${BACKEND}" |
| export RUNTIME="${RUNTIME}" |
| export ENABLE_KIND_CLUSTER="${ENABLE_KIND_CLUSTER}" |
| export KUBERNETES_MODE="${KUBERNETES_MODE}" |
| export KUBERNETES_VERSION="${KUBERNETES_VERSION}" |
| export AIRFLOW_VERSION="${AIRFLOW_VERSION}" |
| export INSTALL_AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}" |
| export RUN_TESTS="${TESTS}" |
| export WEBSERVER_HOST_PORT="${WEBSERVER_HOST_PORT}" |
| export POSTGRES_HOST_PORT="${POSTGRES_HOST_PORT}" |
| export POSTGRES_VERSION="${POSTGRES_VERSION}" |
| export MYSQL_HOST_PORT="${MYSQL_HOST_PORT}" |
| export MYSQL_VERSION="${MYSQL_VERSION}" |
| export AIRFLOW_CI_IMAGE="${AIRFLOW_CI_IMAGE}" |
| docker-compose --log-level INFO ${CMD}\$${EXPANSION}" |
| EOF |
| chmod u+x "${FILE}" |
| } |
| |
| function prepare_command_files() { |
| MAIN_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/base.yml |
| BACKEND_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml |
| LOCAL_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/local.yml |
| KUBERNETES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/runtime-kubernetes.yml |
| REMOVE_SOURCES_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml |
| FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE=${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml |
| |
| COMPOSE_FILE=${MAIN_DOCKER_COMPOSE_FILE}:${BACKEND_DOCKER_COMPOSE_FILE} |
| |
| if [[ "${MOUNT_LOCAL_SOURCES}" != "false" ]]; then |
| COMPOSE_FILE=${COMPOSE_FILE}:${LOCAL_DOCKER_COMPOSE_FILE} |
| fi |
| |
| if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then |
| COMPOSE_FILE=${COMPOSE_FILE}:${FORWARD_CREDENTIALS_DOCKER_COMPOSE_FILE} |
| fi |
| |
| if [[ ${INSTALL_AIRFLOW_VERSION} != "" ]]; then |
| COMPOSE_FILE=${COMPOSE_FILE}:${REMOVE_SOURCES_DOCKER_COMPOSE_FILE} |
| fi |
| |
| if [[ ${RUNTIME} == "kubernetes" ]]; then |
| COMPOSE_FILE=${COMPOSE_FILE}:${KUBERNETES_DOCKER_COMPOSE_FILE} |
| fi |
| |
| set +u |
| # shellcheck disable=SC2207 |
| UNIQUE_INTEGRATIONS=($(echo "${INTEGRATIONS[@]}" | tr ' ' '\n' | sort -u | tr '\n' ' ')) |
| |
| for _INT in "${UNIQUE_INTEGRATIONS[@]}" |
| do |
| COMPOSE_FILE=${COMPOSE_FILE}:${SCRIPTS_CI_DIR}/docker-compose/integration-${_INT}.yml |
| done |
| set -u |
| |
| export COMPOSE_FILE |
| |
| CI_ENTRYPOINT_FILE="/opt/airflow/scripts/ci/in_container/entrypoint_ci.sh" |
| |
| # Base python image for the build |
| export PYTHON_BASE_IMAGE=python:${PYTHON_MAJOR_MINOR_VERSION}-slim-buster |
| export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci" |
| export BUILT_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}" |
| |
| DC_RUN_COMMAND="run --service-ports --rm airflow-testing \"${CI_ENTRYPOINT_FILE} " |
| |
| LAST_DC_RUN_FILE="cmd_run" |
| LAST_DC_TEST_FILE="test_run" |
| LAST_DC_FILE="dc" |
| |
| # Prepare script for "run command" |
| prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" "${DC_RUN_COMMAND}" "false" '*' |
| |
| # Prepare script for "run test" |
| prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE}" "${DC_RUN_COMMAND}" "true" '*' |
| |
| # Prepare script for "run docker compose command" |
| prepare_command_file "${BUILD_CACHE_DIR}/${LAST_DC_FILE}" '"' "false" |
| } |
| |
| do_help_all() { |
| echo |
| print_line |
| usage |
| print_line |
| echo |
| echo |
| echo "Detailed usage" |
| echo |
| print_line |
| echo |
| for SUBCOMMAND in ${ALL_BREEZE_COMMANDS} |
| do |
| detailed_usage "${SUBCOMMAND}" |
| print_star_line |
| done |
| echo |
| print_line |
| echo |
| echo |
| echo "Flags" |
| echo |
| print_line |
| echo |
| flags |
| } |
| |
| function parse_arguments() { |
| set -u |
| if ! PARAMS=$(getopt \ |
| -o "${_BREEZE_GETOPT_SHORT_OPTIONS:=}" \ |
| -l "${_BREEZE_GETOPT_LONG_OPTIONS:=}" \ |
| --name "$CMDNAME" -- "$@") |
| then |
| flags |
| exit 1 |
| fi |
| |
| eval set -- "${PARAMS}" |
| unset PARAMS |
| |
| # Parse Flags. |
| # Please update short and long options in the breeze-complete script |
| # This way autocomplete will work out-of-the-box |
| while true |
| do |
| case "${1}" in |
| -h|--help) |
| RUN_HELP="true" |
| shift ;; |
| -p|--python) |
| export PYTHON_MAJOR_MINOR_VERSION="${2}"; |
| echo "Python version: ${PYTHON_MAJOR_MINOR_VERSION}" |
| echo |
| shift 2 ;; |
| -b|--backend) |
| export BACKEND="${2}"; |
| echo "Backend: ${BACKEND}" |
| echo |
| shift 2 ;; |
| -i|--integration) |
| INTEGRATION=${2} |
| check_and_save_allowed_param "INTEGRATION" "integration" "--integration" |
| echo "Integration: ${INTEGRATION}" |
| if [[ ${INTEGRATION} == "all" ]]; then |
| for _INT in ${_BREEZE_ALLOWED_INTEGRATIONS} |
| do |
| if [[ ${_INT} != "all" ]]; then |
| echo "${_INT}" |
| INTEGRATIONS+=("${_INT}") |
| fi |
| done |
| else |
| INTEGRATIONS+=("${INTEGRATION}"); |
| fi |
| echo |
| shift 2 ;; |
| -K|--kubernetes-mode) |
| export KUBERNETES_MODE="${2}"; |
| echo "Kubernetes mode: ${KUBERNETES_MODE}" |
| echo |
| shift 2 ;; |
| -V|--kubernetes-version) |
| export KUBERNETES_VERSION="${2}"; |
| echo "Kubernetes version: ${KUBERNETES_VERSION}" |
| echo |
| shift 2 ;; |
| --postgres-version) |
| export POSTGRES_VERSION="${2}"; |
| echo "Postgres version: ${POSTGRES_VERSION}" |
| echo |
| shift 2 ;; |
| --mysql-version) |
| export MYSQL_VERSION="${2}"; |
| echo "MySQL version: ${MYSQL_VERSION}" |
| echo |
| shift 2 ;; |
| -l|--skip-mounting-local-sources) |
| MOUNT_LOCAL_SOURCES="false" |
| echo "Mount local sources: ${MOUNT_LOCAL_SOURCES}" |
| echo |
| shift ;; |
| -a|--install-airflow-version) |
| INSTALL_AIRFLOW_VERSION="${2}" |
| echo "Installs version of Airflow: ${INSTALL_AIRFLOW_VERSION}" |
| echo |
| shift 2 ;; |
| -d|--db-reset) |
| echo "Resetting the DB!" |
| echo |
| export DB_RESET="true" |
| shift 1 ;; |
| -v|--verbose) |
| export VERBOSE="true" |
| echo "Verbose output" |
| echo |
| shift ;; |
| -y|--assume-yes) |
| export FORCE_ANSWER_TO_QUESTIONS="yes" |
| echo "Assuming 'yes' answer to all questions." |
| echo |
| shift ;; |
| -n|--assume-no) |
| export FORCE_ANSWER_TO_QUESTIONS="no" |
| echo "Assuming 'no' answer to all questions." |
| echo |
| shift ;; |
| -q|--assume-quit) |
| export FORCE_ANSWER_TO_QUESTIONS="quit" |
| echo "Assuming 'quit' answer to all questions." |
| echo |
| shift ;; |
| -F|--force-build-images) |
| echo "Force build images" |
| echo |
| export FORCE_BUILD_IMAGES="true" |
| # if you want to force build an image - assume you want to build it :) |
| export FORCE_ANSWER_TO_QUESTIONS="yes" |
| shift ;; |
| -C|--force-clean-images) |
| echo "Clean build of images without cache" |
| echo |
| export DOCKER_CACHE="no-cache" |
| export FORCE_BUILD_IMAGES="true" |
| shift ;; |
| -s|--kind-cluster-start) |
| export RUNTIME=kubernetes |
| export ENABLE_KIND_CLUSTER="true" |
| export KIND_CLUSTER_OPERATION="start" |
| echo "Starting kubernetes cluster" |
| echo |
| shift ;; |
| -r|--kind-cluster-recreate) |
| export RUNTIME=kubernetes |
| export ENABLE_KIND_CLUSTER="true" |
| export KIND_CLUSTER_OPERATION="recreate" |
| echo "Recreating kind cluster" |
| echo |
| shift ;; |
| -x|--kind-cluster-stop) |
| export RUNTIME=kubernetes |
| export ENABLE_KIND_CLUSTER="true" |
| export KIND_CLUSTER_OPERATION="stop" |
| echo "Stop kind cluster" |
| echo |
| shift ;; |
| -L|--use-local-cache) |
| echo "Use local cache to build images" |
| echo |
| export DOCKER_CACHE="local" |
| shift ;; |
| -P|--force-pull-images) |
| echo "Force pulling images before build. Uses pulled images as cache." |
| echo |
| export FORCE_PULL_IMAGES="true" |
| export FORCE_BUILD_IMAGES="true" |
| # if you want to force build an image - assume you want to build it :) |
| export FORCE_ANSWER_TO_QUESTIONS="yes" |
| shift ;; |
| -D|--dockerhub-user) |
| export DOCKERHUB_USER="${2}" |
| echo "Dockerhub user ${DOCKERHUB_USER}" |
| echo |
| shift 2 ;; |
| -R|--dockerhub-repo) |
| export DOCKERHUB_REPO="${2}" |
| echo "Dockerhub repo ${DOCKERHUB_REPO}" |
| echo |
| shift 2 ;; |
| -f|--forward-credentials) |
| echo "Fowarding credentials. Be careful as your credentials ar available in the container!" |
| echo |
| export FORWARD_CREDENTIALS="true" |
| shift 1 ;; |
| -u|--push-images) |
| echo |
| echo "Pushing images to DockerHub" |
| echo |
| export PUSH_IMAGES="true" |
| export FORCE_BUILD_IMAGES="true" |
| shift ;; |
| --) |
| shift ; |
| break ;; |
| *) |
| flags |
| echo >&2 |
| echo >&2 "ERROR: Unknown flag ${1}" |
| echo >&2 |
| exit 1 |
| ;; |
| esac |
| done |
| # Parse commaands |
| if [[ "$#" -ne 0 ]]; then |
| case "${1}" in |
| shell) |
| LAST_SUBCOMMAND="${1}" |
| shift ;; |
| exec) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="run_exec" |
| shift ;; |
| build-docs) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="build_docs" |
| shift 1 ;; |
| build-only) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="build_ci_images_only" |
| # if you want to build an image - assume you want to build it :) |
| export FORCE_ANSWER_TO_QUESTIONS="yes" |
| # and assume you want to build it no matter if it is needed |
| export FORCE_BUILD_IMAGES="true" |
| echo "Only build. Do not enter airflow-testing container" |
| echo |
| shift ;; |
| cleanup-images) |
| LAST_SUBCOMMAND="${1}" |
| echo "Cleanup the image" |
| echo |
| COMMAND_TO_RUN="cleanup_images" |
| shift ;; |
| docker-compose) |
| if [[ "$#" -lt 2 ]]; then |
| echo "You should specify docker compose command to run" |
| exit 1 |
| fi |
| LAST_SUBCOMMAND="${1}" |
| DOCKER_COMPOSE_COMMAND="${2}" |
| COMMAND_TO_RUN="run_docker_compose" |
| shift 2 ;; |
| execute-command) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="run_in_bash" |
| shift ;; |
| generate-requirements) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="perform_generate_requirements" |
| shift ;; |
| initialize-local-virtualenv) |
| LAST_SUBCOMMAND="${1}" |
| echo "Initializing local virtualenv" |
| echo |
| COMMAND_TO_RUN="perform_initialize_local_virtualenv" |
| shift ;; |
| setup-autocomplete) |
| LAST_SUBCOMMAND="${1}" |
| echo "Setting up autocomplete" |
| echo |
| COMMAND_TO_RUN="perform_setup_autocomplete" |
| shift ;; |
| static-check ) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="perform_static_checks" |
| if [[ "$#" -lt 2 ]]; then |
| echo "You should specify static check that you would like to run or 'all' to run all checks." |
| echo "One of [${_BREEZE_ALLOWED_STATIC_CHECKS:=}]." |
| echo |
| echo "For example:" |
| echo "${CMDNAME} static-check mypy" |
| exit 1 |
| fi |
| export PYTHON_MAJOR_MINOR_VERSION=${STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION} |
| export STATIC_CHECK="${2}" |
| export STATIC_CHECK_ALL_FILES="false" |
| EXTRA_STATIC_CHECK_OPTIONS+=("--show-diff-on-failure") |
| shift 2 ;; |
| static-check-all-files) |
| LAST_SUBCOMMAND="${1}" |
| if [[ "$#" -lt 2 ]]; then |
| echo "You should specify static check that you would like to run or 'all' to run all checks." |
| echo "One of [${_BREEZE_ALLOWED_STATIC_CHECKS:=}]." |
| echo |
| echo "For example:" |
| echo "${CMDNAME} static-check-all-files mypy" |
| exit 1 |
| fi |
| COMMAND_TO_RUN="perform_static_checks" |
| export PYTHON_MAJOR_MINOR_VERSION=${STATIC_CHECK_PYTHON_MAJOR_MINOR_VERSION} |
| export STATIC_CHECK="${2}" |
| export STATIC_CHECK_ALL_FILES="true" |
| EXTRA_STATIC_CHECK_OPTIONS+=("--all-files" "--show-diff-on-failure") |
| shift 2 ;; |
| stop) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="run_docker_compose" |
| DOCKER_COMPOSE_COMMAND="down" |
| EXTRA_DC_OPTIONS+=("--remove-orphans") |
| shift ;; |
| restart) |
| LAST_SUBCOMMAND="${1}" |
| COMMAND_TO_RUN="run_docker_compose" |
| DOCKER_COMPOSE_COMMAND="down" |
| EXTRA_DC_OPTIONS+=("--remove-orphans") |
| SECOND_COMMAND_TO_RUN="enter_breeze" |
| echo "Restarts the environment. Includes emptying the databases." |
| shift ;; |
| test-target) |
| LAST_SUBCOMMAND="${1}" |
| if [[ "${TEST_TARGET}" == "." ]]; then |
| export TEST_TARGET="" |
| else |
| export TEST_TARGET="${2}" |
| fi |
| COMMAND_TO_RUN="run_tests" |
| shift 2 ;; |
| toggle-suppress-cheatsheet) |
| LAST_SUBCOMMAND="${1}" |
| if [[ -f "${SUPPRESS_CHEATSHEET_FILE}" ]]; then |
| rm -f "${SUPPRESS_CHEATSHEET_FILE}" |
| else |
| touch "${SUPPRESS_CHEATSHEET_FILE}" |
| fi |
| echo "Toggle suppress cheatsheet" |
| echo |
| shift ;; |
| toggle-suppress-asciiart) |
| LAST_SUBCOMMAND="${1}" |
| if [[ -f "${SUPPRESS_ASCIIART_FILE}" ]]; then |
| rm -f "${SUPPRESS_ASCIIART_FILE}" |
| else |
| touch "${SUPPRESS_ASCIIART_FILE}" |
| fi |
| echo "Toggle suppress asciiart" |
| echo |
| shift ;; |
| flags) |
| flags |
| exit 0 ;; |
| help) |
| usage |
| flag_footer |
| exit 0 ;; |
| help-all) |
| do_help_all |
| exit 0 ;; |
| *) |
| usage |
| echo >&2 |
| echo >&2 "ERROR: Unknown command ${1}" |
| echo >&2 |
| exit 1 |
| ;; |
| esac |
| else |
| : |
| # By default, start interactive terminal |
| fi |
| |
| if [[ ${RUN_HELP} == "true" ]]; then |
| if [[ ${LAST_SUBCOMMAND} == "" ]]; then |
| usage |
| flag_footer |
| else |
| detailed_usage "${LAST_SUBCOMMAND}" |
| flag_footer |
| fi |
| exit 0 |
| fi |
| REMAINING_ARGS+=("$@") |
| } |
| |
| prepare_allowed_versions() { |
| INDENT=15 |
| LIST_PREFIX=$(printf "%-${INDENT}s" " ") |
| WIDTH=$((SCREEN_WIDTH - INDENT)) |
| ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS=$(echo "${_BREEZE_ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_BACKENDS=$(echo "${_BREEZE_ALLOWED_BACKENDS=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_STATIC_CHECKS=$(echo "${_BREEZE_ALLOWED_STATIC_CHECKS=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_INTEGRATIONS=$(echo "${_BREEZE_ALLOWED_INTEGRATIONS=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_KUBERNETES_MODES=$(echo "${_BREEZE_ALLOWED_KUBERNETES_MODES=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_KUBERNETES_VERSIONS=$(echo "${_BREEZE_ALLOWED_KUBERNETES_VERSIONS=""}" | tr '\n' ' ' | \ |
| fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_INSTALL_AIRFLOW_VERSIONS=$(echo "${_BREEZE_ALLOWED_INSTALL_AIRFLOW_VERSIONS=""}" | \ |
| tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_POSTGRES_VERSIONS=$(echo "${_BREEZE_ALLOWED_POSTGRES_VERSIONS=""}" | \ |
| tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| ALLOWED_MYSQL_VERSIONS=$(echo "${_BREEZE_ALLOWED_MYSQL_VERSIONS=""}" | \ |
| tr '\n' ' ' | fold -w "${WIDTH}" -s | sed "s/^/${LIST_PREFIX}/") |
| } |
| |
| prepare_usage() { |
| # Note that MacOS uses Bash 3.* and we cannot use associative arrays |
| export USAGE_SHELL="[Default] Enters interactive shell in the container" |
| export USAGE_EXEC="Execs into running breeze container in new terminal" |
| export USAGE_BUILD_DOCS="Builds documentation in the container" |
| export USAGE_BUILD_ONLY="Only builds docker images without entering container" |
| export USAGE_CLEANUP_IMAGES="Cleans up the container images created" |
| export USAGE_DOCKER_COMPOSE="Executes specified docker-compose command" |
| export USAGE_EXECUTE_COMMAND="Executes specified command in the container" |
| export USAGE_FLAGS="Shows all breeze's flags" |
| export USAGE_GENERATE_REQUIREMENTS="Generates pinned requirements for pip dependencies" |
| export USAGE_INITIALIZE_LOCAL_VIRTUALENV="Initializes local virtualenv" |
| export USAGE_SETUP_AUTOCOMPLETE="Sets up autocomplete for breeze" |
| export USAGE_STOP="Stops the docker-compose evironment" |
| export USAGE_RESTART="Stops the docker-compose evironment including DB cleanup" |
| export USAGE_STATIC_CHECK="Performs selected static check for changed files" |
| export USAGE_STATIC_CHECK_ALL_FILES="Performs selected static check for all files" |
| export USAGE_TOGGLE_SUPPRESS_CHEATSHEET="Toggles on/off cheatsheet" |
| export USAGE_TOGGLE_SUPPRESS_ASCIIART="Toggles on/off asciiart" |
| export USAGE_TEST_TARGET="Runs selected test target in the container" |
| export USAGE_HELP="Shows this help message" |
| export USAGE_HELP_ALL="Shows detailed help for all commands and flags" |
| |
| |
| export DETAILED_USAGE_SHELL=" |
| This is default subcommand if no subcommand is used. |
| |
| Enters interactive shell where you can run all tests, start airflow webserver, scheduler, |
| workers, interact with the database, run DAGs etc. It is the default command if no command |
| is selected. The shell is executed in the container and in case integrations are chosen, |
| the integrations will be started as separated docker containers - under the docker-compose |
| supervision. Local sources are by default mounted to within the container so you can edit |
| them locally and run tests immediately in the container. Several folders ('files', 'dist') |
| are also mounted so that you can exchange files between the host and container. |
| |
| The 'files/airflow-breeze-config/variables.env' file can contain additional variables |
| and setup. This file is automatically sourced when you enter the container. Database |
| and webserver ports are forwarded to appropriate database/webserver so that you can |
| connect to it from your host environment. |
| " |
| export DETAILED_USAGE_EXEC=" |
| Execs into interactive shell to an already running container. The container mus be started |
| already by breeze shell command. If you are not familiar with tmux, this is the best |
| way to run multiple processes in the same container at the same time for example scheduler, |
| webserver, workers, database console and interactive terminal. |
| " |
| export DETAILED_USAGE_BUILD_DOCS=" |
| Builds airflow documentation. The documentation is build inside docker container - to |
| maintain the same build environment for everyone. Appropriate sources are mapped from |
| the host to the container so that latest sources are used. The folders where documentation |
| is generated ('docs/build') are also mounted to the container - this way results of |
| the documentation build is available in the host. |
| " |
| export DETAILED_USAGE_BUILD_ONLY=" |
| Do not enter docker container - just build the docker images needed. You can (similarly as |
| with other commands) pass additional options to this command, such as '--force-build-image', |
| '--force-pull-image' in order to force latest images to be built/pulled. |
| " |
| export DETAILED_USAGE_CLEANUP_IMAGES=" |
| Removes the breeze-related images created in your local docker image cache. This will |
| not reclaim space in docker cache. You need to 'docker system prune' (optionally |
| with --all) to reclaim that space. |
| " |
| export DETAILED_USAGE_DOCKER_COMPOSE=" |
| Run docker-compose command instead of entering the environment. Use 'help' as command |
| to see available commands. The <EXTRA_ARGS> passed after -- are treated |
| as additional options passed to docker-compose. For example |
| |
| '${CMDNAME} docker-compose pull -- --ignore-pull-failures' |
| " |
| export DETAILED_USAGE_EXECUTE_COMMAND=" |
| Run chosen command instead of entering the environment. The command is run using |
| 'bash -c \"<command with args>\" if you need to pass arguments to your command, you need |
| to pass them together with command surrounded with \" or '. Alternatively you can |
| pass arguments as <EXTRA_ARGS> passed after --. For example: |
| |
| '${CMDNAME} execute-command \"ls -la\"' or |
| '${CMDNAME} execute-command ls -- --la' |
| " |
| export DETAILED_USAGE_FLAGS=" |
| Explains in detail all the flags that can be used with breeze. |
| " |
| export DETAILED_USAGE_GENERATE_REQUIREMENTS=" |
| Generates pinned requirements from setup.py. Those requirements are generated in requirements |
| directory - separately for different python version. Those requirements are used to run |
| CI builds as well as run repeatable production image builds. You can use those requirements |
| to predictably install released airflow versions. You should run it always after you update |
| setup.py. |
| " |
| export DETAILED_USAGE_INITIALIZE_LOCAL_VIRTUALENV=" |
| Initializes locally created virtualenv installing all dependencies of Airflow |
| taking into account the frozen requirements from requirements folder. |
| This local virtualenv can be used to aid autocompletion and IDE support as |
| well as run unit tests directly from the IDE. You need to have virtualenv |
| activated before running this command. |
| " |
| export DETAILED_USAGE_SETUP_AUTOCOMPLETE=" |
| Sets up autocomplete for breeze commands. Once you do it you need to re-enter the bash |
| shell and when typing breeze command <TAB> will provide autocomplete for |
| parameters and values. |
| " |
| export DETAILED_USAGE_STOP=" |
| Brings down running docker compose environment. When you start the environment, the docker |
| containers will continue running so that startup time is shorter. But they take quite a lot of |
| memory and CPU. This command stops all running containers from the environment. |
| " |
| export DETAILED_USAGE_RESTART=" |
| Restarts running docker compose environment. When you restart the environment, the docker |
| containers will be restarted. That includes cleaning up the databases. This is |
| especially useful if you switch between different versions of airflow. |
| " |
| export DETAILED_USAGE_STATIC_CHECK=" |
| Run selected static checks for currently changed files. You should specify static check that |
| you would like to run or 'all' to run all checks. One of: |
| |
| ${ALLOWED_STATIC_CHECKS} |
| |
| You can pass extra arguments including options to to the pre-commit framework as |
| <EXTRA_ARGS> passed after --. For example: |
| |
| '${CMDNAME} static-check mypy' or |
| '${CMDNAME} static-check mypy -- --files tests/core.py' |
| |
| You can see all the options by adding --help EXTRA_ARG: |
| |
| '${CMDNAME} static-check mypy -- --help' |
| " |
| export DETAILED_USAGE_STATIC_CHECK_ALL_FILES=" |
| Run selected static checks for all applicable files. You should specify static check that |
| you would like to run or 'all' to run all checks. One of: |
| |
| ${ALLOWED_STATIC_CHECKS} |
| |
| You can pass extra arguments including options to the pre-commit framework as |
| <EXTRA_ARGS> passed after --. For example: |
| |
| '${CMDNAME} static-check-all-files mypy' or |
| '${CMDNAME} static-check-all-files mypy -- --verbose' |
| |
| You can see all the options by adding --help EXTRA_ARG: |
| |
| '${CMDNAME} static-check-all-files mypy -- --help' |
| " |
| export DETAILED_USAGE_TEST_TARGET=" |
| Run the specified unit test target. There might be multiple |
| targets specified separated with comas. The <EXTRA_ARGS> passed after -- are treated |
| as additional options passed to pytest. For example: |
| |
| '${CMDNAME} test-target tests/test_core.py -- --logging-level=DEBUG' |
| " |
| export DETAILED_USAGE_TOGGLE_SUPPRESS_CHEATSHEET=" |
| Toggles on/off cheatsheet displayed before starting bash shell. |
| " |
| export DETAILED_USAGE_TOGGLE_SUPPRESS_ASCIIART=" |
| Toggles on/off asciiart displayed before starting bash shell. |
| " |
| export DETAILED_USAGE_HELP=" |
| Shows this help message. |
| " |
| export DETAILED_USAGE_HELP_ALL=" |
| Shows detailed help for all commands and flags. |
| " |
| } |
| |
| get_variable_from_lowercase_name() { |
| PREFIX="${1}" |
| NAME="${2}" |
| SUFFIX="$(echo "${NAME}" | tr "[:lower:]-" "[:upper:]_")" |
| VARIABLE_NAME="${PREFIX}_${SUFFIX}" |
| echo "${!VARIABLE_NAME}" |
| } |
| |
| get_usage() { |
| get_variable_from_lowercase_name USAGE "${1}" |
| } |
| |
| get_detailed_usage() { |
| get_variable_from_lowercase_name DETAILED_USAGE "${1}" |
| } |
| |
| |
| usage() { |
| echo " |
| |
| Usage: ${CMDNAME} [FLAGS] [COMMAND] -- <EXTRA_ARGS> |
| |
| By default the script enters IT environment and drops you to bash shell, but you can choose one |
| of the commands to run specific actions instead. Add --help after each command to see details: |
| |
| Commands without arguments: |
| " |
| for SUBCOMMAND in ${BREEZE_COMMANDS} |
| do |
| printf " %-40s %s\n" "${SUBCOMMAND}" "$(get_usage "${SUBCOMMAND}")" |
| done |
| echo " |
| Commands with arguments: |
| " |
| for SUBCOMMAND in ${BREEZE_EXTRA_ARG_COMMANDS} |
| do |
| printf " %-30s%-10s %s\n" "${SUBCOMMAND}" "<ARG>" "$(get_usage "${SUBCOMMAND}")" |
| done |
| echo " |
| Help commands: |
| " |
| for SUBCOMMAND in ${BREEZE_HELP_COMMANDS} |
| do |
| printf " %-40s %s\n" "${SUBCOMMAND}" "$(get_usage "${SUBCOMMAND}")" |
| done |
| echo |
| } |
| |
| detailed_usage() { |
| SUBCOMMAND=${1} |
| echo " |
| ${CMDNAME} [FLAGS] ${SUBCOMMAND} -- <EXTRA_ARGS> |
| $(get_detailed_usage "${SUBCOMMAND}")" |
| } |
| |
| flag_footer() { |
| echo " |
| Run '${CMDNAME} flags' to see available flags |
| " |
| } |
| |
| flags() { |
| echo " |
| $(print_star_line) |
| |
| List of flags supported by breeze: |
| |
| $(print_star_line) |
| Choose Airflow variant |
| $(print_star_line) |
| |
| -p, --python <PYTHON_MAJOR_MINOR_VERSION> |
| Python version used for the image. This is always major/minor version. |
| One of: |
| |
| ${ALLOWED_PYTHON_MAJOR_MINOR_VERSIONS} |
| |
| -b, --backend <BACKEND> |
| Backend to use for tests - it determines which database is used. |
| One of: |
| |
| ${ALLOWED_BACKENDS} |
| |
| Default: ${_BREEZE_DEFAULT_BACKEND:=} |
| |
| -d, --db-reset |
| Resets the database at entry to the envvironment. It will drop all the tables |
| and data and recreate the DB from scratch even if 'restart' command was not used. |
| Combined with 'restart' command it enters the environment in the state that is |
| ready to start airflow webserver/scheduler/worker. Without the switch, the database |
| does not have any tables and you need to run reset db manually. |
| |
| -i, --integration <INTEGRATION> |
| Integration to start during tests - it determines which integrations are started |
| for integration tests. There can be more than one integration started, or all to |
| start all integrations. Selected integrations are not saved for future execution. |
| One of: |
| |
| ${ALLOWED_INTEGRATIONS} |
| |
| $(print_star_line) |
| Manage Kind kubernetes cluster (optional) |
| $(print_star_line) |
| |
| |
| Acion for the cluster : only one of the --kind-cluster-* flags can be used at a time: |
| |
| -s, --kind-cluster-start |
| Starts kind Kubernetes cluster after entering the environment. The cluster is started using |
| Kubernetes Mode selected and Kubernetes version specified via --kubernetes-mode and |
| --kubernetes-version flags. |
| |
| -x, --kind-cluster-stop |
| Stops kind Kubernetes cluster if one has already been created. By default, if you do not |
| stop environment, the Kubernetes cluster created for testing is continuously running and |
| when you start Kubernetes testing again it will be reused. You can force deletion and |
| recreation of such cluster with this flag. |
| |
| -r, --kind-cluster-recreate |
| |
| Recreates kind Kubernetes cluster if one has already been created. By default, if you do |
| not stop environment, the Kubernetes cluster created for testing is continuously running |
| and when you start Kubernetes testing again it will be reused. You can force deletion and |
| recreation of such cluster with this flag. |
| |
| Kubernetes mode/version flags: |
| |
| -K, --kubernetes-mode <KUBERNETES_MODE> |
| Kubernetes mode - only used in case one of --kind-cluster-* commands is used. |
| One of: |
| |
| ${ALLOWED_KUBERNETES_MODES} |
| |
| Default: ${_BREEZE_DEFAULT_KUBERNETES_MODE:=} |
| |
| -V, --kubernetes-version <KUBERNETES_VERSION> |
| Kubernetes version - only used in case one of --kind-cluster-* commands is used. |
| One of: |
| |
| ${ALLOWED_KUBERNETES_VERSIONS} |
| |
| Default: ${_BREEZE_DEFAULT_KUBERNETES_VERSION:=} |
| |
| $(print_star_line) |
| Manage mounting local files |
| $(print_star_line) |
| |
| -l, --skip-mounting-local-sources |
| Skips mounting local volume with sources - you get exactly what is in the |
| docker image rather than your current local sources of airflow. |
| |
| $(print_star_line) |
| Install Airflow if different than current |
| $(print_star_line) |
| |
| -a, --install-airflow-version <INSTALL_AIRFLOW_VERSION> |
| If specified, removes the source-installed airflow and installs a |
| released version of Airflow instead. One of: |
| |
| ${ALLOWED_INSTALL_AIRFLOW_VERSIONS} |
| |
| $(print_star_line) |
| Database versions |
| $(print_star_line) |
| |
| --postgres-version <POSTGRES_VERSION> |
| Postgres version used. One of: |
| |
| ${ALLOWED_POSTGRES_VERSIONS} |
| |
| |
| --mysql-version <MYSQL_VERSION> |
| Mysql version used. One of: |
| |
| ${ALLOWED_MYSQL_VERSIONS} |
| |
| |
| $(print_star_line) |
| Assume answers to questions |
| $(print_star_line) |
| |
| -y, --assume-yes |
| Assume 'yes' answer to all questions. |
| |
| -n, --assume-no |
| Assume 'no' answer to all questions. |
| |
| -q, --assume-quit |
| Assume 'quit' answer to all questions. |
| |
| $(print_star_line) |
| Credentials |
| $(print_star_line) |
| |
| -f, --forward-credentials |
| Forwards host credentials to docker container. Use with care as it will make |
| your credentials available to everything you install in Docker. |
| |
| $(print_star_line) |
| Increase verbosity of the script |
| $(print_star_line) |
| |
| -v, --verbose |
| Show verbose information about executed commands (enabled by default for running test). |
| Note that you can further increase verbosity and see all the commands executed by breeze |
| by running 'export VERBOSE_COMMANDS=\"true\"' before running breeze. |
| |
| $(print_star_line) |
| Flags for building the docker images |
| $(print_star_line) |
| |
| -F, --force-build-images |
| Forces building of the local docker images. The images are rebuilt |
| automatically for the first time or when changes are detected in |
| package-related files, but you can force it using this flag. |
| |
| -p, --force-pull-images |
| Forces pulling of images from DockerHub before building to populate cache. The |
| images are pulled by default only for the first time you run the |
| environment, later the locally build images are used as cache. |
| |
| -C, --force-clean-images |
| Force build images with cache disabled. This will remove the pulled or build images |
| and start building images from scratch. This might take a long time. |
| |
| -L, --use-local-cache |
| Uses local cache to build images. No pulled images will be used, but results of local |
| builds in the Docker cache are used instead. |
| |
| $(print_star_line) |
| Flags for pushing the docker images |
| $(print_star_line) |
| |
| -u, --push-images |
| After building - uploads the images to DockerHub |
| It is useful in case you use your own DockerHub user to store images and you want |
| to build them locally. Note that you need to use 'docker login' before you upload images. |
| |
| $(print_star_line) |
| User and repo used to login to github registry |
| $(print_star_line) |
| |
| -D, --dockerhub-user |
| DockerHub user used to pull, push and build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_USER:=}. |
| |
| -H, --dockerhub-repo |
| DockerHub repository used to pull, push, build images. Default: ${_BREEZE_DEFAULT_DOCKERHUB_REPO:=}. |
| |
| $(print_star_line) |
| " |
| } |
| |
| function print_header_line() { |
| if [ ${VERBOSE:="false"} == "true" ]; then |
| echo |
| printf '=%.0s' $(seq "${SCREEN_WIDTH}") |
| echo |
| fi |
| } |
| |
| |
| function print_line { |
| printf '#%.0s' $(seq "${SCREEN_WIDTH}") |
| } |
| |
| function print_star_line { |
| printf '*%.0s' $(seq "${SCREEN_WIDTH}") |
| } |
| |
| function read_saved_environment_variables { |
| export BACKEND="${BACKEND:=$(read_from_file BACKEND)}" |
| export BACKEND=${BACKEND:-${_BREEZE_DEFAULT_BACKEND}} |
| |
| export KUBERNETES_MODE="${KUBERNETES_MODE:=$(read_from_file KUBERNETES_MODE)}" |
| export KUBERNETES_MODE=${KUBERNETES_MODE:=${_BREEZE_DEFAULT_KUBERNETES_MODE}} |
| |
| export KUBERNETES_VERSION="${KUBERNETES_VERSION:=$(read_from_file KUBERNETES_VERSION)}" |
| export KUBERNETES_VERSION=${KUBERNETES_VERSION:=${_BREEZE_DEFAULT_KUBERNETES_VERSION}} |
| |
| export POSTGRES_VERSION="${POSTGRES_VERSION:=$(read_from_file POSTGRES_VERSION)}" |
| export POSTGRES_VERSION=${POSTGRES_VERSION:=${_BREEZE_DEFAULT_POSTGRES_VERSION}} |
| |
| export MYSQL_VERSION="${MYSQL_VERSION:=$(read_from_file MYSQL_VERSION)}" |
| export MYSQL_VERSION=${MYSQL_VERSION:=${_BREEZE_DEFAULT_MYSQL_VERSION}} |
| |
| # Here you read DockerHub user/account that you use |
| # You can populate your own images in DockerHub this way and work with the, |
| # You can override it with "-d" option and it will be stored in .build directory |
| export DOCKERHUB_USER="${DOCKERHUB_USER:=$(read_from_file DOCKERHUB_USER)}" |
| export DOCKERHUB_USER="${DOCKERHUB_USER:=${_BREEZE_DEFAULT_DOCKERHUB_USER}}" |
| |
| # Here you read DockerHub repo that you use |
| # You can populate your own images in DockerHub this way and work with them |
| # You can override it with "-d" option and it will be stored in .build directory |
| export DOCKERHUB_REPO="${DOCKERHUB_REPO:=$(read_from_file DOCKERHUB_REPO)}" |
| export DOCKERHUB_REPO="${DOCKERHUB_REPO:=${_BREEZE_DEFAULT_DOCKERHUB_REPO}}" |
| } |
| |
| function check_and_save_all_params() { |
| check_and_save_allowed_param "PYTHON_MAJOR_MINOR_VERSION" "Python version" "--python" |
| check_and_save_allowed_param "BACKEND" "backend" "--backend" |
| check_and_save_allowed_param "KUBERNETES_MODE" "Kubernetes mode" "--kubernetes-mode" |
| check_and_save_allowed_param "KUBERNETES_VERSION" "Kubernetes version" "--kubernetes-version" |
| check_and_save_allowed_param "POSTGRES_VERSION" "Postgres version" "--postgres-version" |
| check_and_save_allowed_param "MYSQL_VERSION" "Mysql version" "--mysql-version" |
| |
| # Can't verify those |
| save_to_file DOCKERHUB_USER |
| save_to_file DOCKERHUB_REPO |
| } |
| |
| function fix_local_file { |
| if [[ -d "${MY_DIR}/${1}" ]]; then |
| rm -rf "${MY_DIR:?}/${1}" |
| fi |
| touch "${MY_DIR}/${1}" |
| |
| } |
| |
| function touch_local_files { |
| # Those files are mounted into container when run locally |
| # .bash_history is preserved and you can modify .bash_aliases and .inputrc |
| # according to your liking |
| fix_local_file ".bash_history" |
| fix_local_file ".bash_aliases" |
| fix_local_file ".inputrc" |
| # When kind cluster is created, the folder keeps authentication information |
| # across sessiosn |
| mkdir -pv "${MY_DIR}/.kube" |
| } |
| |
| function print_cheatsheet() { |
| if [[ ! -f ${SUPPRESS_CHEATSHEET_FILE} && ${COMMAND_TO_RUN} == "enter_breeze" ]]; then |
| echo |
| echo |
| print_line |
| echo |
| echo " Airflow Breeze CHEATSHEET" |
| echo |
| print_line |
| echo |
| echo |
| print_line |
| echo |
| echo " Quick scripts:" |
| echo " * Enter the environm : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" |
| echo " * Run command in the environment : ${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE} "\ |
| "[command with args] [bash options]" |
| echo " * Run tests in the environment : ${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE} "\ |
| "[test target] [nosetest options]" |
| echo " * Run Docker compose command : ${BUILD_CACHE_DIR}/${LAST_DC_FILE} "\ |
| "[docker compose command] [docker-compose options]" |
| echo |
| |
| set +e |
| if ! command -v breeze; then |
| print_line |
| echo |
| echo " Adding breeze to your path:" |
| echo " When you exit the environment, you can add sources of airflow to the path - you can" |
| echo " run breeze or the scripts above from any directory by calling 'breeze' commands directly" |
| echo |
| echo " export PATH=\${PATH}:\"${MY_DIR}\"" |
| echo |
| fi |
| set -e |
| print_line |
| |
| echo |
| echo " Port forwarding:" |
| echo |
| echo " Ports are forwarded to the running docker containers for webserver and database" |
| echo " * ${WEBSERVER_HOST_PORT} -> forwarded to airflow webserver -> airflow-testing:8080" |
| echo " * ${POSTGRES_HOST_PORT} -> forwarded to postgres database -> postgres:5432" |
| echo " * ${MYSQL_HOST_PORT} -> forwarded to mysql database -> mysql:3306" |
| echo |
| echo " Here are links to those services that you can use on host:" |
| echo " * Webserver: http://127.0.0.1:28080" |
| echo " * Postgres: jdbc:postgresql://127.0.0.1:25433/airflow?user=postgres&password=airflow" |
| echo " * Mysql: jdbc:mysql://localhost:23306/airflow?user=root" |
| echo |
| else |
| echo |
| fi |
| } |
| |
| function print_setup_instructions { |
| if [[ ${COMMAND_TO_RUN} == "enter_breeze" ]] ; then |
| # shellcheck disable=SC2034 # Unused variables left for comp_breeze usage |
| if ! typeset -f "_comp_breeze" > /dev/null; then |
| print_line |
| echo |
| echo " You can setup autocomplete by running '${CMDNAME} setup-autocomplete'" |
| echo |
| echo |
| fi |
| print_line |
| echo |
| echo " You can toggle ascii/cheatsheet by running:" |
| echo " * ${CMDNAME} toggle-suppress-cheatsheet" |
| echo " * ${CMDNAME} toggle-suppress-asciiart" |
| echo |
| print_line |
| echo |
| echo |
| echo |
| echo |
| fi |
| } |
| |
| function make_sure_precommit_is_installed { |
| echo |
| echo "Making sure pre-commit is installed" |
| echo |
| if command -v pip3 >/dev/null; then |
| PIP_BIN=pip3 |
| elif command -v pip >/dev/null; then |
| PIP_BIN=pip |
| else |
| echo >&2 |
| echo >&2 "ERROR: You need to have pip or pip3 in your PATH" |
| echo >&2 |
| S |
| exit 1 |
| fi |
| "${PIP_BIN}" install --upgrade pre-commit >/dev/null 2>&1 |
| # Add ~/.local/bin to the path in case pip is run outside of virtualenv |
| export PATH="${PATH}":~/.local/bin |
| } |
| |
| function remove_images { |
| docker rmi "${PYTHON_BASE_IMAGE}" || true |
| docker rmi "${AIRFLOW_CI_IMAGE}" || true |
| rm -f "${BUILT_IMAGE_FLAG_FILE}" |
| } |
| |
| function run_static_checks { |
| if [[ ${STATIC_CHECK} == "all" ]]; then |
| echo |
| echo "Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| echo |
| pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| elif [[ ${STATIC_CHECK} == "all-but-pylint" ]]; then |
| echo |
| echo "Setting SKIP=pylint. Running: pre-commit run" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| echo |
| echo |
| SKIP=pylint pre-commit run "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| else |
| echo |
| echo "Running: pre-commit run" "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| echo |
| pre-commit run "${STATIC_CHECK}" "${EXTRA_STATIC_CHECK_OPTIONS[@]}" "$@" |
| fi |
| } |
| |
| function run_build_command { |
| prepare_build |
| case "${COMMAND_TO_RUN}" in |
| enter_breeze|build_docs|run_tests|run_docker_compose|run_in_bash) |
| rebuild_ci_image_if_needed |
| ;; |
| perform_static_checks|build_ci_images_only|perform_generate_requirements) |
| rebuild_ci_image_if_needed |
| ;; |
| cleanup_images|run_exec) |
| ;; |
| perform_initialize_local_virtualenv|perform_setup_autocomplete) |
| ;; |
| *) |
| echo >&2 |
| echo >&2 "ERROR: Unknown command to run ${COMMAND_TO_RUN}" |
| echo >&2 |
| ;; |
| esac |
| } |
| |
| function run_breeze_command { |
| set -u |
| case "${COMMAND_TO_RUN}" in |
| enter_breeze) |
| "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" |
| ;; |
| run_exec) |
| # Unfortunately docker-compose exec does not support execing into containers started with run :( |
| set +e |
| AIRFLOW_TESTING_CONTAINER=$("${BUILD_CACHE_DIR}/${LAST_DC_FILE}" ps | \ |
| grep airflow-testing | awk '{print $1}' 2>/dev/null) |
| if [[ -z ${AIRFLOW_TESTING_CONTAINER} ]]; then |
| echo |
| echo "ERROR! Breeze must be running in order to exec into running container" |
| echo |
| exit 1 |
| fi |
| set -e |
| docker exec -it "${AIRFLOW_TESTING_CONTAINER}" \ |
| "/opt/airflow/scripts/ci/in_container/entrypoint_exec.sh" |
| ;; |
| run_tests) |
| "${BUILD_CACHE_DIR}/${LAST_DC_TEST_FILE}" "\"${TEST_TARGET}\"" "$@" |
| ;; |
| run_docker_compose) |
| set +u |
| "${BUILD_CACHE_DIR}/${LAST_DC_FILE}" "${DOCKER_COMPOSE_COMMAND}" "${EXTRA_DC_OPTIONS[@]}" "$@" |
| set -u |
| ;; |
| run_in_bash) |
| "${BUILD_CACHE_DIR}/${LAST_DC_RUN_FILE}" "${RUN_COMMAND}" "$@" |
| ;; |
| perform_static_checks) |
| make_sure_precommit_is_installed |
| run_static_checks "$@" |
| ;; |
| build_ci_images_only) |
| if [[ ${PUSH_IMAGES} == "true" ]]; then |
| push_image |
| fi |
| ;; |
| cleanup_images) |
| remove_images |
| ;; |
| perform_generate_requirements) |
| run_generate_requirements |
| ;; |
| perform_initialize_local_virtualenv) |
| initialize_virtualenv |
| ;; |
| perform_setup_autocomplete) |
| setup_autocomplete |
| ;; |
| build_docs) |
| run_docs |
| ;; |
| *) |
| echo >&2 |
| echo >&2 "ERROR: Unknown command to run ${COMMAND_TO_RUN}" |
| echo >&2 |
| ;; |
| esac |
| } |
| |
| setup_default_breeze_variables |
| |
| initialize_common_environment |
| |
| basic_sanity_checks |
| |
| script_start |
| |
| trap script_end EXIT |
| |
| prepare_allowed_versions |
| |
| prepare_usage |
| |
| set +u |
| parse_arguments "${@}" |
| |
| print_header_line |
| |
| forget_last_answer |
| |
| read_saved_environment_variables |
| |
| check_and_save_all_params |
| |
| touch_local_files |
| |
| prepare_command_files |
| |
| run_build_command |
| |
| print_header_line |
| |
| print_badge |
| |
| print_cheatsheet |
| |
| print_setup_instructions |
| |
| set +u # Account for an empty array |
| run_breeze_command "${REMAINING_ARGS[@]}" |
| |
| set +u # Account for an empty array |
| if [[ -n ${SECOND_COMMAND_TO_RUN} ]]; then |
| COMMAND_TO_RUN=${SECOND_COMMAND_TO_RUN} |
| run_breeze_command "${REMAINING_ARGS[@]}" |
| fi |