blob: 43d843267c3bf48717328246fed89fc0f12cefcd [file] [log] [blame]
name: core
on:
push:
pull_request:
branches:
- master
- branch-*
types: [opened, synchronize]
env:
# Disable keepAlive and pool
# https://github.com/actions/virtual-environments/issues/1499#issuecomment-689467080
MAVEN_OPTS: >-
-Xms1024M -Xmx2048M -XX:MaxMetaspaceSize=1024m -XX:-UseGCOverheadLimit -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn
-Dhttp.keepAlive=false
-Dmaven.wagon.http.pool=false
-Dmaven.wagon.http.retryHandler.count=3
ZEPPELIN_HELIUM_REGISTRY: helium
SPARK_PRINT_LAUNCH_COMMAND: "true"
SPARK_LOCAL_IP: 127.0.0.1
ZEPPELIN_LOCAL_IP: 127.0.0.1
# Use the bash login, because we are using miniconda
defaults:
run:
shell: bash -l {0}
jobs:
# test on core-modules (zeppelin-interpreter,zeppelin-zengine,zeppelin-server),
# some interpreters are included, because zeppelin-server test depends on them: spark, shell & markdown
core-modules:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
hadoop: [hadoop2, hadoop3]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install application with some interpreter
run: ./mvnw install -Pbuild-distr -DskipRat -DskipTests -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -B
- name: install and test plugins
run: ./mvnw package -DskipRat -pl zeppelin-plugins -amd -B
- name: Setup conda environment with python 3.7 and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_3.7_with_R.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
conda list
conda info
- name: run tests with ${{ matrix.hadoop }} # skip spark test because we would run them in other CI
run: ./mvnw verify -Pusing-packaged-distr -DskipRat -pl zeppelin-server,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,angular,shell -am -Phelium-dev -Pexamples -P${{ matrix.hadoop }} -Dtests.to.exclude=**/org/apache/zeppelin/spark/* -DfailIfNoTests=false
# test interpreter modules except spark, flink, python, rlang, jupyter
interpreter-test-non-core:
runs-on: ubuntu-20.04
env:
INTERPRETERS: 'hbase,jdbc,file,flink-cmd,cassandra,elasticsearch,bigquery,alluxio,livy,groovy,java,neo4j,submarine,sparql,mongodb,influxdb,ksql'
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment
run: ./mvnw install -DskipTests -DskipRat -am -pl .,zeppelin-interpreter,zeppelin-interpreter-shaded,${INTERPRETERS} -Pscala-2.10 -B
- name: Setup conda environment with python 3.7 and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R_and_tensorflow
environment-file: testing/env_python_3_with_R_and_tensorflow.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: verify interpreter
run: ./mvnw verify -DskipRat -pl ${INTERPRETERS} -Pscala-2.10 -B
# test interpreter modules for jupyter, python, rlang
interpreter-test-jupyter-python-rlang:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
python: [ 3.7, 3.8 ]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: Setup conda environment with python ${{ matrix.python }} and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml
python-version: ${{ matrix.python }}
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
- name: install environment
run: |
./mvnw install -DskipTests -DskipRat -pl python,rlang,zeppelin-jupyter-interpreter -am -Phadoop2 -B
- name: run tests with ${{ matrix.python }}
run: |
./mvnw test -DskipRat -pl python,rlang,zeppelin-jupyter-interpreter -DfailIfNoTests=false -B
# zeppelin integration test except Spark & Flink
zeppelin-integration-test:
runs-on: ubuntu-20.04
steps:
# user/password => root/root
- name: Start mysql
run: sudo systemctl start mysql.service
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment
run: |
./mvnw install -DskipTests -DskipRat -Phadoop2 -Pintegration -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown,flink-cmd,flink/flink-scala-2.11,flink/flink-scala-2.12,jdbc,shell -am -Pflink-114
./mvnw package -DskipRat -pl zeppelin-plugins -amd -DskipTests -B
- name: Setup conda environment with python 3.7 and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_3_with_R.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
- name: run tests
run: ./mvnw test -DskipRat -pl zeppelin-interpreter-integration -Phadoop2 -Pintegration -DfailIfNoTests=false -Dtest=ZeppelinClientIntegrationTest,ZeppelinClientWithAuthIntegrationTest,ZSessionIntegrationTest,ShellIntegrationTest,JdbcIntegrationTest
- name: Print zeppelin logs
if: always()
run: if [ -d "logs" ]; then cat logs/*; fi
flink-test-and-flink-integration-test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
flink: [112, 113, 114, 115]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment for flink before 1.15 (exclusive)
if: matrix.flink != '115'
run: |
./mvnw install -DskipTests -DskipRat -am -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
- name: install environment for flink after 1.15 (inclusive)
if: matrix.flink == '115'
run: |
./mvnw install -DskipTests -DskipRat -am -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -B
./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
- name: Setup conda environment with python 3.7 and
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_flink
environment-file: testing/env_python_3_with_flink_${{ matrix.flink }}.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: run tests for flink before 1.15 (exclusive)
if: matrix.flink != '115'
run: ./mvnw test -DskipRat -pl flink/flink-scala-2.11,flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
- name: run tests for flink before 1.15 (inclusive)
if: matrix.flink == '115'
run: ./mvnw test -DskipRat -pl flink/flink-scala-2.12,flink-cmd,zeppelin-interpreter-integration -Pflink-${{ matrix.flink }} -Phadoop2 -Pintegration -DfailIfNoTests=false -B -Dtest=org.apache.zeppelin.flink.*Test,FlinkIntegrationTest${{ matrix.flink }}
- name: Print zeppelin logs
if: always()
run: if [ -d "logs" ]; then cat logs/*; fi
spark-integration-test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
hadoop: [ 2, 3 ]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment
run: |
./mvnw install -DskipTests -DskipRat -pl zeppelin-interpreter-integration,zeppelin-web,spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13,markdown -am -Phadoop2 -Pintegration -B
./mvnw clean package -pl zeppelin-plugins -amd -DskipTests -B
- name: Setup conda environment with python 3.7 and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_3_with_R.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
- name: run tests on hadoop${{ matrix.hadoop }}
run: ./mvnw test -DskipRat -pl zeppelin-interpreter-integration -Phadoop${{ matrix.hadoop }} -Pintegration -B -Dtest=SparkSubmitIntegrationTest,ZeppelinSparkClusterTest24,SparkIntegrationTest24,ZeppelinSparkClusterTest30,SparkIntegrationTest30,ZeppelinSparkClusterTest31,SparkIntegrationTest31,ZeppelinSparkClusterTest32,SparkIntegrationTest32,ZeppelinSparkClusterTest33,SparkIntegrationTest33 -DfailIfNoTests=false
# test on spark for each spark version & scala version
spark-test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
python: [ 3.7, 3.8 ]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment
run: ./mvnw install -DskipTests -DskipRat -pl spark-submit,spark/scala-2.11,spark/scala-2.12,spark/scala-2.13 -am -Phadoop2 -B
- name: Setup conda environment with python ${{ matrix.python }} and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_${{ matrix.python }}_with_R.yml
python-version: ${{ matrix.python }}
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
- name: run spark-2.4 tests with scala-2.11 and python-${{ matrix.python }}
if: matrix.python == '3.7' # Spark 2.4 doesn't support python 3.8
run: ./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.11 -DfailIfNoTests=false -B
- name: run spark-2.4 tests with scala-2.12 and python-${{ matrix.python }}
if: matrix.python == '3.7' # Spark 2.4 doesn't support python 3.8
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-2.4 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.0 tests with scala-2.12 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.0 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.1 tests with scala-2.12 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.1 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.2 tests with scala-2.12 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.12 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.2 tests with scala-2.13 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.2 -Pspark-scala-2.13 -Phadoop2 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.3 tests with scala-2.12 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.12 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
- name: run spark-3.3 tests with scala-2.13 and python-${{ matrix.python }}
run: |
rm -rf spark/interpreter/metastore_db
./mvnw test -DskipRat -pl spark-submit,spark/interpreter -Pspark-3.3 -Pspark-scala-2.13 -Phadoop3 -Pintegration -B -DfailIfNoTests=false
livy-0-7-with-spark-2-2-0-under-python3:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: install environment
run: |
./mvnw install -DskipTests -DskipRat -pl livy -am -B
./testing/downloadSpark.sh "2.2.0" "2.6"
./testing/downloadLivy.sh "0.7.1-incubating"
- name: Setup conda environment with python 3.7 and R
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: python_3_with_R
environment-file: testing/env_python_3_with_R.yml
python-version: 3.7
mamba-version: "*"
channels: conda-forge,defaults
channel-priority: true
auto-activate-base: false
use-mamba: true
- name: Make IRkernel available to Jupyter
run: |
R -e "IRkernel::installspec()"
- name: run tests
run: ./mvnw verify -DskipRat -pl livy -am -B
default-build:
runs-on: ubuntu-20.04
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Tune Runner VM
uses: ./.github/actions/tune-runner-vm
- name: Set up JDK 8
uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: 8
- name: Cache local Maven repository
uses: actions/cache@v2
with:
path: |
~/.m2/repository
!~/.m2/repository/org/apache/zeppelin/
~/.spark-dist
~/.cache
key: ${{ runner.os }}-zeppelin-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-zeppelin-
- name: build without any profiles
run: ./mvnw clean package -DskipTests