blob: b61511ee56dea4167c7682fdef8e4eef823afc11 [file] [log] [blame]
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Development containers for Arrow
version: '3.5'
x-ubuntu-volumes:
&ubuntu-volumes
- .:/arrow:delegated
- ${ARROW_DOCKER_CACHE_DIR:-./docker_cache}/ubuntu:/build:delegated
x-alpine-volumes:
&alpine-volumes
- .:/arrow:delegated
- ${ARROW_DOCKER_CACHE_DIR:-./docker_cache}/alpine:/build:delegated
services:
######################### Language Containers ###############################
c_glib:
# Usage:
# docker-compose build cpp
# docker-compose build c_glib
# docker-compose run c_glib
image: arrow:c_glib
build:
context: .
dockerfile: c_glib/Dockerfile
volumes: *ubuntu-volumes
cpp:
# Usage:
# docker-compose build cpp
# docker-compose run cpp
image: arrow:cpp
shm_size: 2G
build:
context: .
dockerfile: cpp/Dockerfile
environment:
PARQUET_TEST_DATA: /arrow/cpp/submodules/parquet-testing/data
volumes: *ubuntu-volumes
cpp-cmake32:
# Usage:
# docker-compose build cpp-cmake32
# docker-compose run cpp-cmake32
image: arrow:cpp-cmake32
shm_size: 2G
build:
context: .
dockerfile: cpp/Dockerfile
args:
EXTRA_CONDA_PKGS: cmake=3.2
environment:
ARROW_ORC: "OFF"
PARQUET_TEST_DATA: /arrow/cpp/submodules/parquet-testing/data
volumes: *ubuntu-volumes
cpp-alpine:
# Usage:
# docker-compose build cpp-alpine
# docker-compose run cpp-alpine
image: arrow:cpp-alpine
shm_size: 2G
build:
context: .
dockerfile: cpp/Dockerfile.alpine
environment:
PARQUET_TEST_DATA: /arrow/cpp/submodules/parquet-testing/data
volumes: *alpine-volumes
go:
# Usage:
# docker-compose build go
# docker-compose run go
image: arrow:go
build:
context: .
dockerfile: go/Dockerfile
volumes: *ubuntu-volumes
java:
# Usage:
# docker-compose build java
# docker-compose run java
image: arrow:java
build:
context: .
dockerfile: java/Dockerfile
volumes:
- .:/arrow:delegated
- $HOME/.m2:/root/.m2:delegated
js:
image: arrow:js
build:
context: .
dockerfile: js/Dockerfile
python:
# Usage:
# export PYTHON_VERSION=2.7|3.6|3.7
# docker-compose build cpp
# docker-compose build python
# docker-compose run python
image: arrow:python-${PYTHON_VERSION:-3.6}
shm_size: 2G
build:
context: .
dockerfile: python/Dockerfile
args:
PYTHON_VERSION: ${PYTHON_VERSION:-3.6}
volumes: *ubuntu-volumes
python-alpine:
# Usage:
# export PYTHON_VERSION=2.7|3.6 (minor version is ignored)
# docker-compose build cpp-alpine
# docker-compose build python-alpine
# docker-compose run python-alpine
image: arrow:python-${PYTHON_VERSION:-3.6}-alpine
shm_size: 2G
build:
context: .
dockerfile: python/Dockerfile.alpine
args:
PYTHON_VERSION: ${PYTHON_VERSION:-3.6}
volumes: *alpine-volumes
rust:
# Usage:
# docker-compose build rust
# docker-compose run rust
image: arrow:rust
build:
context: .
dockerfile: rust/Dockerfile
environment:
PARQUET_TEST_DATA: /arrow/cpp/submodules/parquet-testing/data
volumes: *ubuntu-volumes
r:
# Usage:
# docker-compose build cpp
# docker-compose build r
# docker-compose run r
image: arrow:r
build:
context: .
dockerfile: r/Dockerfile
volumes: *ubuntu-volumes
######################### Tools and Linters #################################
# TODO(kszucs): site
# TODO(kszucs): {cpp,java,glib,js}-apidoc
lint:
# Usage:
# docker-compose build lint
# docker-compose run lint
image: arrow:lint
build:
context: .
dockerfile: dev/lint/Dockerfile
command: arrow/dev/lint/run_linters.sh
volumes: *ubuntu-volumes
iwyu:
# Usage:
# docker-compose build lint
# docker-compose run iwyu
image: arrow:lint
environment:
CC: clang
CXX: clang++
command: arrow/dev/lint/run_iwyu.sh
volumes: *ubuntu-volumes
clang-format:
# Usage:
# docker-compose build cpp
# docker-compose build python
# docker-compose build lint
# docker-compose run clang-format
image: arrow:lint
command: arrow/dev/lint/run_clang_format.sh
volumes: *ubuntu-volumes
docs:
# Usage:
# docker-compose build cpp
# docker-compose build python
# docker-compose build docs
# docker-compose run docs
image: arrow:docs
build:
context: .
dockerfile: docs/Dockerfile
volumes: *ubuntu-volumes
######################### Integration Tests #################################
# impala:
# image: cpcloud86/impala:java8-1
# ports:
# - "21050"
# hostname: impala
pandas-master:
# Usage:
# export PYTHON_VERSION=3.6
# docker-compose build cpp
# docker-compose build python
# docker-compose build --no-cache pandas-master
# docker-compose run pandas-master
image: arrow:pandas-master
build:
context: .
dockerfile: integration/pandas/Dockerfile
shm_size: 2G
volumes: *ubuntu-volumes
hdfs-namenode:
image: gelog/hadoop
shm_size: 2G
ports:
- "9000:9000"
- "50070:50070"
command: hdfs namenode
hostname: hdfs-namenode
hdfs-datanode-1:
image: gelog/hadoop
command: hdfs datanode
ports:
# The host port is randomly assigned by Docker, to allow scaling
# to multiple DataNodes on the same host
- "50075"
links:
- hdfs-namenode:hdfs-namenode
hdfs-datanode-2:
image: gelog/hadoop
command: hdfs datanode
ports:
# The host port is randomly assigned by Docker, to allow scaling
# to multiple DataNodes on the same host
- "50075"
links:
- hdfs-namenode:hdfs-namenode
# TODO(kszucs): pass hdfs client version explicitly as a build argument
hdfs-integration:
# Usage:
# export PYTHON_VERSION=3.6
# docker-compose build cpp
# docker-compose build python
# docker-compose build hdfs-integration
# docker-compose run hdfs-integration
links:
- hdfs-namenode:hdfs-namenode
- hdfs-datanode-1:hdfs-datanode-1
- hdfs-datanode-2:hdfs-datanode-2
environment:
- ARROW_HDFS_TEST_HOST=hdfs-namenode
- ARROW_HDFS_TEST_PORT=9000
- ARROW_HDFS_TEST_USER=root
build:
context: .
dockerfile: integration/hdfs/Dockerfile
volumes: *ubuntu-volumes
# TODO(kszucs): pass dask version explicitly as a build argument
dask-integration:
# Usage:
# export PYTHON_VERSION=3.6
# docker-compose build cpp
# docker-compose build python
# docker-compose build dask-integration
# docker-compose run dask-integration
build:
context: .
dockerfile: integration/dask/Dockerfile
volumes: *ubuntu-volumes
# TODO(kszucs): hive-integration
# TODO(kszucs): spark-integration