blob: 31fe0217c6e702c11fb93d1b6795499e003df506 [file] [log] [blame]
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Use the spark connect JVM client to connect to a spark connect server.
#
# Start a local server:
# A local spark-connect server with default settings can be started using the following command:
# `sql/connect/bin/spark-connect`
# The client should be able to connect to this server directly with the default client settings.
#
# Connect to a remote server:
# To connect to a remote server, use env var `SPARK_REMOTE` to configure the client connection
# string. e.g.
# `export SPARK_REMOTE="sc://<URL>:<port>/;token=<auth token>;<param1>=<value1>"`
#
# Set SCBUILD=0 to skip rebuilding the spark-connect server.
# Set SCCLASSPATH to the client classpath to skip resolving it with sbt.
# Go to the Spark project root directory
FWDIR="$(cd "`dirname "$0"`"/../../..; pwd)"
cd "$FWDIR"
export SPARK_HOME=$FWDIR
# Determine the Scala version used in Spark
SCALA_BINARY_VER=`grep "scala.binary.version" "${SPARK_HOME}/pom.xml" | head -n1 | awk -F '[<>]' '{print $3}'`
SCALA_VER=`grep "scala.version" "${SPARK_HOME}/pom.xml" | grep ${SCALA_BINARY_VER} | head -n1 | awk -F '[<>]' '{print $3}'`
SCALA_ARG="-Pscala-${SCALA_BINARY_VER}"
SCBUILD="${SCBUILD:-1}"
if [ "$SCBUILD" -eq "1" ]; then
# Build the jars needed for spark connect JVM client
build/sbt "${SCALA_ARG}" "connect-client-jvm/package" || exit 1
fi
if [ -z "$SCCLASSPATH" ]; then
SCCLASSPATH=$(sql/connect/bin/spark-connect-scala-client-classpath)
fi
JVM_ARGS="-XX:+IgnoreUnrecognizedVMOptions \
--add-opens=java.base/java.lang=ALL-UNNAMED \
--add-opens=java.base/java.lang.invoke=ALL-UNNAMED \
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED \
--add-opens=java.base/java.io=ALL-UNNAMED \
--add-opens=java.base/java.net=ALL-UNNAMED \
--add-opens=java.base/java.nio=ALL-UNNAMED \
--add-opens=java.base/java.util=ALL-UNNAMED \
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED \
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED \
--add-opens=java.base/jdk.internal.ref=ALL-UNNAMED \
--add-opens=java.base/sun.nio.ch=ALL-UNNAMED \
--add-opens=java.base/sun.nio.cs=ALL-UNNAMED \
--add-opens=java.base/sun.security.action=ALL-UNNAMED \
--add-opens=java.base/sun.util.calendar=ALL-UNNAMED \
--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED \
-Djdk.reflect.useDirectMethodHandle=false \
-Dio.netty.tryReflectionSetAccessible=true \
--enable-native-access=ALL-UNNAMED"
exec java $JVM_ARGS -cp "$SCCLASSPATH" org.apache.spark.sql.application.ConnectRepl "$@"