| #!/usr/bin/env bash |
| |
| # |
| # Licensed to the Apache Software Foundation (ASF) under one or more |
| # contributor license agreements. See the NOTICE file distributed with |
| # this work for additional information regarding copyright ownership. |
| # The ASF licenses this file to You under the Apache License, Version 2.0 |
| # (the "License"); you may not use this file except in compliance with |
| # the License. You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| |
| if ! [ -x "$(command -v docker)" ]; then |
| echo "Error: Docker is not installed." >&2 |
| exit 1 |
| fi |
| |
| DOCKER_CACHE_IMG="ghcr.io/apache/spark/apache-spark-github-action-image-docs-cache:master" |
| REPO_OWNER="apache/spark" |
| REPOSITORY="apache-spark-ci-image-docs" |
| IMG_TAG=$(date +%s) |
| IMG_NAME="${REPOSITORY}:${IMG_TAG}" |
| IMG_URL="$REPO_OWNER/$IMG_NAME" |
| DOCKER_MOUNT_SPARK_HOME="/__w/spark/spark" |
| BUILD_DOCS_SCRIPT_PATH="${DOCKER_MOUNT_SPARK_HOME}/dev/spark-test-image-util/docs/run-in-container" |
| |
| FWDIR="$(cd "`dirname "${BASH_SOURCE[0]}"`"; pwd)" |
| SPARK_HOME="$(cd "`dirname "${BASH_SOURCE[0]}"`"/../../..; pwd)" |
| |
| # 1.Compile spark outside the container to prepare for generating documents inside the container. |
| build/sbt -Phive -Pkinesis-asl clean unidoc package |
| |
| # 2.Build container image. |
| docker buildx build \ |
| --cache-from type=registry,ref="${DOCKER_CACHE_IMG}" \ |
| --tag "${IMG_URL}" "${FWDIR}" \ |
| --file "${SPARK_HOME}/dev/spark-test-image/docs/Dockerfile" |
| |
| # 3.Build docs on container: `error docs`, `scala doc`, `python doc`, `sql doc`. |
| docker run \ |
| --mount type=bind,source="${SPARK_HOME}",target="${DOCKER_MOUNT_SPARK_HOME}" \ |
| --interactive --tty "${IMG_URL}" \ |
| /bin/bash -c "sh ${BUILD_DOCS_SCRIPT_PATH}" |
| |
| if [[ "$SKIP_RDOC" != "1" ]]; then |
| # 4.Build docs on host: `r doc`. |
| # |
| # Why does `r` document need to be compiled outside the container? |
| # Because when compiling inside the container, the permission of the directory |
| # `/__w/spark/spark/R/pkg/docs` automatically generated by `RScript` is `dr-xr--r-x`, |
| # and when writing to subsequent files, will throw an error as: |
| # `! [EACCES] Failed to copy '/usr/local/lib/R/site-library/pkgdown/BS5/assets/katex-auto.js' |
| # to '/__w/spark/spark/R/pkg/docs/katex-auto.js': permission denied` |
| export SKIP_ERRORDOC=1 |
| export SKIP_SCALADOC=1 |
| export SKIP_PYTHONDOC=1 |
| export SKIP_SQLDOC=1 |
| cd docs |
| bundle exec jekyll build |
| fi |
| |
| # 5.Remove container image. |
| IMG_ID=$(docker images | grep "${IMG_TAG}" | awk '{print $3}') |
| docker image rm --force "${IMG_ID}" |
| |
| echo "Build doc done." |