| # Licensed to the Apache Software Foundation (ASF) under one |
| # or more contributor license agreements. See the NOTICE file |
| # distributed with this work for additional information |
| # regarding copyright ownership. The ASF licenses this file |
| # to you under the Apache License, Version 2.0 (the |
| # "License"); you may not use this file except in compliance |
| # with the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, |
| # software distributed under the License is distributed on an |
| # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY |
| # KIND, either express or implied. See the License for the |
| # specific language governing permissions and limitations |
| # under the License. |
| |
| version: 2.1 |
| commands: |
| install_common: |
| steps: |
| - run: sudo apt-get update |
| # Somehow, the certs already installed in the java 11 image are installed strangely |
| # in that there's a directory called `cacerts` with a symlink in it named `cacerts` |
| # (/etc/ssl/certs/java/cacerts/cacerts) rather than just a symlink to |
| # `cacerts` in the /etc/ssl/certs/java directory - if this is the case, fix it by |
| # moving it down a directory and then ant should install cleanly |
| - run: | |
| if [ -f /etc/ssl/certs/java/cacerts/cacerts ]; then |
| sudo mv /etc/ssl/certs/java/cacerts/ /etc/ssl/certs/java/cacerts-old |
| sudo mv /etc/ssl/certs/java/cacerts-old/cacerts /etc/ssl/certs/java/ |
| sudo rmdir /etc/ssl/certs/java/cacerts-old |
| fi |
| # The image already has java installed, but `apt-get install ant` reinstalls it. |
| # Therefore, we download just the deb package for ant and install it with dpkg. |
| - run: | |
| apt-get download ant ant-optional |
| sudo dpkg --force-all -i ant*.deb |
| rm ant*.deb |
| # We need aliases for localhost2,localhost3, ..., localhost20 in hosts |
| - run: | |
| sudo bash -c 'for i in {2..20}; do echo 127.0.0.${i} localhost${i} >> /etc/hosts; done' |
| cat /etc/hosts |
| run_build: |
| parameters: |
| spark: |
| type: string |
| scala: |
| type: string |
| jdk: |
| type: string |
| use_jdk11: |
| type: string |
| default: "false" |
| description: Build and test against Spark <<parameters.spark>> Scala <<parameters.scala>> <<parameters.jdk>> |
| steps: |
| - run: |
| environment: |
| SPARK_VERSION: "<<parameters.spark>>" |
| SCALA_VERSION: "<<parameters.scala>>" |
| JDK_VERSION: "<<parameters.jdk>>" |
| INTEGRATION_MAX_PARALLEL_FORKS: 1 |
| INTEGRATION_MAX_HEAP_SIZE: "1500M" |
| CASSANDRA_USE_JDK11: <<parameters.use_jdk11>> |
| command: | |
| # Run compile/unit tests, skipping integration tests |
| ./gradlew --stacktrace clean assemble check -x cassandra-analytics-integration-tests:test |
| |
| run_integration: |
| parameters: |
| spark: |
| type: string |
| scala: |
| type: string |
| jdk: |
| type: string |
| use_jdk11: |
| type: string |
| default: "false" |
| description: Build and integration test against Spark <<parameters.spark>> Scala <<parameters.scala>> <<parameters.jdk>> |
| steps: |
| - run: |
| environment: |
| SPARK_VERSION: "<<parameters.spark>>" |
| SCALA_VERSION: "<<parameters.scala>>" |
| JDK_VERSION: "<<parameters.jdk>>" |
| INTEGRATION_MAX_PARALLEL_FORKS: 1 |
| INTEGRATION_MAX_HEAP_SIZE: "2500M" |
| CASSANDRA_USE_JDK11: <<parameters.use_jdk11>> |
| command: | |
| # Run compile but not unit tests (which are run in run_build) |
| ./gradlew --stacktrace clean assemble |
| # Run integration tests in parallel |
| cd cassandra-analytics-integration-tests/src/test/java |
| # Get list of classnames of tests that should run on this node |
| # NOTE: This assumes all actual tests suites end in "Test.java" |
| # AND NO OTHER FILES do, so "BaseTest" classes were renamed to |
| # "TestBase" |
| CLASSNAMES=$(circleci tests glob "**/*Test.java" \ |
| | cut -c 1- | sed 's@/@.@g' \ |
| | sed 's/.\{5\}$//' \ |
| | circleci tests split --split-by=timings --timings-type=classname) |
| cd ../../../.. |
| # Format the arguments to "./gradlew test" |
| # GRADLE_ARGS=$(echo $CLASSNAMES | awk '{for (i=1; i<=NF; i++) print "--tests",$i}') |
| echo "CircleCI assigned the following classes for testing: $CLASSNAMES" |
| # collect up exit statuses for all of the test classes and exit with that result at the end. |
| # If no gradle processes exit with non-zero status, it will still be 0 |
| EXIT_STATUS=0 |
| for TEST_NAME in $CLASSNAMES; do |
| ./gradlew cassandra-analytics-integration-tests:test --tests $TEST_NAME --no-daemon || EXIT_STATUS=$?; |
| done; |
| exit $EXIT_STATUS |
| no_output_timeout: 30m |
| |
| jobs: |
| build-deps-jdk8: |
| docker: |
| - image: cimg/openjdk:8.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - run: |
| name: Build dependencies for jdk8 builds |
| command: | |
| ./scripts/build-dependencies.sh |
| - persist_to_workspace: |
| root: dependencies |
| paths: |
| - "*.jar" |
| - "org/**/*" |
| |
| build-deps-jdk11: |
| docker: |
| - image: cimg/openjdk:11.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - run: |
| name: Build dependencies for jdk11 builds |
| command: | |
| CASSANDRA_USE_JDK11=true ./scripts/build-dependencies.sh |
| - persist_to_workspace: |
| root: dependencies |
| paths: |
| - "*.jar" |
| - "org/**/*" |
| |
| spark2-2_11-jdk8: |
| docker: |
| - image: cimg/openjdk:8.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_build: |
| spark: "2" |
| scala: "2.11" |
| jdk: "1.8" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| int-spark2-2_11-jdk8: |
| parallelism: 8 |
| docker: |
| - image: cimg/openjdk:8.0 |
| resource_class: large |
| steps: |
| - setup_remote_docker |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_integration: |
| spark: "2" |
| scala: "2.11" |
| jdk: "1.8" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| spark2-2_12-jdk8: |
| docker: |
| - image: cimg/openjdk:8.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_build: |
| spark: "2" |
| scala: "2.12" |
| jdk: "1.8" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| int-spark2-2_12-jdk8: |
| parallelism: 8 |
| docker: |
| - image: cimg/openjdk:8.0 |
| resource_class: large |
| steps: |
| - setup_remote_docker |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_integration: |
| spark: "2" |
| scala: "2.12" |
| jdk: "1.8" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| spark3-2_12-jdk11: |
| docker: |
| - image: cimg/openjdk:11.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_build: |
| spark: "3" |
| scala: "2.12" |
| jdk: "11" |
| use_jdk11: "true" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| int-spark3-2_12-jdk11: |
| parallelism: 8 |
| docker: |
| - image: cimg/openjdk:11.0 |
| resource_class: large |
| steps: |
| - setup_remote_docker |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_integration: |
| spark: "3" |
| scala: "2.12" |
| jdk: "11" |
| use_jdk11: "true" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| spark3-2_13-jdk11: |
| docker: |
| - image: cimg/openjdk:11.0 |
| resource_class: large |
| steps: |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_build: |
| spark: "3" |
| scala: "2.12" |
| jdk: "11" |
| use_jdk11: "true" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| int-spark3-2_13-jdk11: |
| parallelism: 8 |
| docker: |
| - image: cimg/openjdk:11.0 |
| resource_class: large |
| steps: |
| - setup_remote_docker |
| - install_common |
| - checkout |
| - attach_workspace: |
| at: dependencies |
| - run_integration: |
| spark: "3" |
| scala: "2.12" |
| jdk: "11" |
| use_jdk11: "true" |
| |
| - store_artifacts: |
| path: build/test-reports |
| destination: test-reports |
| |
| - store_artifacts: |
| path: build/reports |
| destination: reports |
| |
| - store_test_results: |
| path: build/test-reports |
| |
| workflows: |
| version: 2 |
| build-and-test: |
| jobs: |
| - build-deps-jdk8 |
| - build-deps-jdk11 |
| - spark2-2_11-jdk8: |
| requires: |
| - build-deps-jdk8 |
| - spark2-2_12-jdk8: |
| requires: |
| - build-deps-jdk8 |
| - spark3-2_12-jdk11: |
| requires: |
| - build-deps-jdk11 |
| - spark3-2_13-jdk11: |
| requires: |
| - build-deps-jdk11 |
| - int-spark2-2_11-jdk8: |
| requires: |
| - build-deps-jdk8 |
| - int-spark2-2_12-jdk8: |
| requires: |
| - build-deps-jdk8 |
| - int-spark3-2_12-jdk11: |
| requires: |
| - build-deps-jdk11 |
| - int-spark3-2_13-jdk11: |
| requires: |
| - build-deps-jdk11 |