blob: c5431c0989ae0314dac819271d754b459547abd6 [file] [log] [blame]
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Spark + Paimon for integration tests: writes tables to a local volume
# so paimon-rust can read them (no MinIO/S3).
#
# Usage:
# 1. Generate test data (run once):
# docker compose -f dev/docker-compose.yaml run --rm spark-paimon
# 2. Run read integration tests on the host:
# PAIMON_TEST_WAREHOUSE=/tmp/paimon-warehouse cargo test -p paimon-integration-tests read_from_spark
#
# To re-provision: remove /tmp/paimon-warehouse and run step 1 again.
services:
spark-paimon:
build:
context: ./spark
dockerfile: Dockerfile
volumes:
# Bind warehouse to host so paimon-rust can read it
- /tmp/paimon-warehouse:/tmp/paimon-warehouse
# Run as root to avoid permission issues with mounted volumes
user: root
# Run provision script and exit (data remains in /tmp/paimon-warehouse).
# Use full path: entrypoint exec does not have SPARK_HOME/bin in PATH.
command: ["/opt/spark/bin/spark-submit", "--master", "local[*]", "/opt/provision.py"]