blob: 1b8da2593215ef235222716119b045175d80850c [file] [log] [blame]
;
; Licensed to the Apache Software Foundation (ASF) under one or more
; contributor license agreements. See the NOTICE file distributed with
; this work for additional information regarding copyright ownership.
; The ASF licenses this file to You under the Apache License, Version 2.0
; (the "License"); you may not use this file except in compliance with
; the License. You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
;
[tox]
# new environments will be excluded by default unless explicitly added to envlist.
envlist = py37,py38,py39,py37-{cloud,cython,lint,mypy},py38-{cloud,cython,docs,cloudcoverage},py39-{cloud,cython},whitespacelint
toxworkdir = {toxinidir}/target/{env:ENV_NAME:.tox}
[pycodestyle]
# Disable all errors and warnings except for the ones related to blank lines.
# pylint does not check the number of blank lines.
select = E3
# Shared environment options.
[testenv]
# allow apps that support color to use it.
passenv=TERM
# Set [] options for pip installation of apache-beam tarball.
extras = test,dataframe
# Don't warn that these commands aren't installed.
whitelist_externals =
false
time
bash
rm
deps =
cython: cython==0.29.27
-r build-requirements.txt
setenv =
RUN_SKIPPED_PY3_TESTS=0
# Use an isolated tmp dir for tests that get slowed down by scanning /tmp.
TMPDIR={envtmpdir}
# Silence warning about ignoring PYTHONPATH.
PYTHONPATH=
# These 2 magic command overrides are required for Jenkins builds.
# Otherwise we get "OSError: [Errno 2] No such file or directory" errors.
# Source:
# https://github.com/tox-dev/tox/issues/123#issuecomment-284714629
install_command = {envbindir}/python {envbindir}/pip install --retries 10 {opts} {packages}
list_dependencies_command = {envbindir}/python {envbindir}/pip freeze
commands_pre =
python --version
pip --version
pip check
bash {toxinidir}/scripts/run_tox_cleanup.sh
commands_post =
bash {toxinidir}/scripts/run_tox_cleanup.sh
commands = false {envname} is misconfigured
[testenv:py{37,38,39}]
commands =
python apache_beam/examples/complete/autocomplete_test.py
{toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
[testenv:py{37,38,39}-win]
commands =
python apache_beam/examples/complete/autocomplete_test.py
bash {toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
install_command = {envbindir}/python.exe {envbindir}/pip.exe install --retries 10 {opts} {packages}
list_dependencies_command = {envbindir}/python.exe {envbindir}/pip.exe freeze
[testenv:py{37,38,39}-cython]
# cython tests are only expected to work in linux (2.x and 3.x)
# If we want to add other platforms in the future, it should be:
# `platform = linux2|darwin|...`
# See https://docs.python.org/2/library/sys.html#sys.platform for platform codes
platform = linux
commands =
# TODO(https://github.com/apache/beam/issues/20051): Remove this build_ext invocation once local source no longer
# shadows the installed apache_beam.
python setup.py build_ext --inplace
python apache_beam/examples/complete/autocomplete_test.py
{toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
[testenv:py{37,38,39}-cloud]
extras = test,gcp,interactive,dataframe,aws,azure
commands =
{toxinidir}/scripts/run_pytest.sh {envname} "{posargs}"
[testenv:py38-cloudcoverage]
# More recent versions of pytest-cov do not support pytest 4.4.0
deps =
codecov
pytest-cov==2.9.0
passenv = GIT_* BUILD_* ghprb* CHANGE_ID BRANCH_NAME JENKINS_* CODECOV_*
extras = test,gcp,interactive,dataframe,aws
commands =
-rm .coverage
{toxinidir}/scripts/run_pytest.sh {envname} "{posargs}" "--cov-report=xml --cov=. --cov-append"
codecov -F python
[testenv:py37-lint]
# Don't set TMPDIR to avoid "AF_UNIX path too long" errors in pylint.
setenv =
# keep the version of pylint in sync with the 'rev' in .pre-commit-config.yaml
deps =
-r build-requirements.txt
astroid<2.9,>=2.8.0
pycodestyle==2.8.0
pylint==2.11.1
isort==4.2.15
flake8==4.0.1
commands =
pylint --version
time {toxinidir}/scripts/run_pylint.sh
[testenv:whitespacelint]
setenv =
deps =
whitespacelint==1.1.0
commands =
time {toxinidir}/scripts/run_whitespacelint.sh
[testenv:py37-mypy]
deps =
-r build-requirements.txt
mypy==0.782
# make extras available in case any of these libs are typed
extras =
gcp
commands =
mypy --version
python setup.py mypy
[testenv:py38-docs]
extras = test,gcp,docs,interactive,dataframe
deps =
Sphinx==1.8.5
sphinx_rtd_theme==0.4.3
docutils<0.18
Jinja2==3.0.3 # TODO(https://github.com/apache/beam/issues/21587): Sphinx version is too old.
torch
commands =
time {toxinidir}/scripts/generate_pydoc.sh
[testenv:hdfs_integration_test]
# Used by hdfs_integration_test.sh. Do not run this directly, as it depends on
# nodes defined in hdfs_integration_test/docker-compose.yml.
deps =
-r build-requirements.txt
holdup==1.8.0
extras =
gcp
whitelist_externals =
echo
sleep
passenv = HDFSCLI_CONFIG
commands =
holdup -t 45 http://namenode:50070 http://datanode:50075
echo "Waiting for safe mode to end."
sleep 45
wget storage.googleapis.com/dataflow-samples/shakespeare/kinglear.txt
hdfscli -v -v -v upload -f kinglear.txt /
python -m apache_beam.examples.wordcount \
--input hdfs://kinglear* \
--output hdfs://py-wordcount-integration \
--hdfs_host namenode --hdfs_port 50070 --hdfs_user root
python -m apache_beam.examples.wordcount \
--input hdfs://unused_server/kinglear* \
--output hdfs://unused_server/py-wordcount-integration \
--hdfs_host namenode --hdfs_port 50070 --hdfs_user root --hdfs_full_urls
commands_pre =
pip check
[testenv:py3-yapf]
# keep the version of yapf in sync with the 'rev' in .pre-commit-config.yaml
deps =
yapf==0.29.0
commands =
yapf --version
time yapf --in-place --parallel --recursive apache_beam
[testenv:py3-yapf-check]
# keep the version of yapf in sync with the 'rev' in .pre-commit-config.yaml
deps =
yapf==0.29.0
commands =
yapf --version
time yapf --diff --parallel --recursive apache_beam
[testenv:py3-dependency-check]
# TODO(https://github.com/apache/beam/issues/20337): botocore, a part of [aws], wants docutils<0.16, but Sphinx
# pulls in the latest docutils. Uncomment this line once botocore does not
# conflict with Sphinx:
# extras = docs,test,gcp,aws,interactive,interactive_test
extras = test,gcp,aws,dataframe,interactive,interactive_test
passenv = WORKSPACE
commands =
time {toxinidir}/scripts/run_dependency_check.sh
[testenv:jest]
setenv =
deps =
jupyterlab==3.1.18
commands =
time {toxinidir}/scripts/setup_nodejs.sh
time {toxinidir}/scripts/run_jest.sh
[testenv:eslint]
setenv =
deps =
jupyterlab==3.1.18
commands =
time {toxinidir}/scripts/setup_nodejs.sh
time {toxinidir}/scripts/run_eslint.sh
[testenv:flink-runner-test]
extras = test
commands =
{toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/flink_runner_test.py {posargs}
[testenv:samza-runner-test]
extras = test
commands =
{toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/samza_runner_test.py {posargs}
[testenv:spark-runner-test]
extras = test
commands =
{toxinidir}/scripts/pytest_validates_runner.sh {envname} {toxinidir}/apache_beam/runners/portability/spark_runner_test.py {posargs}
[testenv:py{37,38,39}-pyarrow-{0,1,2,3,4,5,6,7}]
deps =
0: pyarrow>=0.15.1,<0.18.0
1: pyarrow>=1,<2
2: pyarrow>=2,<3
# ARROW-11450,BEAM-11731
# pyarrow <3 doesn't work with 1.20.0, but doesn't restrict the bounds
{0,1,2}: numpy<1.20.0
3: pyarrow>=3,<4
4: pyarrow>=4,<5
5: pyarrow>=5,<6
6: pyarrow>=6,<7
7: pyarrow>=7,<8
commands =
# Log pyarrow and numpy version for debugging
/bin/sh -c "pip freeze | grep -E '(pyarrow|numpy)'"
# Run pytest directly rather using run_pytest.sh. It doesn't handle
# selecting tests with -m (BEAM-12985)
pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pyarrow {posargs}
[testenv:py{37,38,39}-pandas-{11,12,13,14}]
deps =
11: pandas>=1.1.0,<1.2.0
12: pandas>=1.2.0,<1.3.0
13: pandas>=1.3.0,<1.4.0
14: pandas>=1.4.0,<1.5.0
commands =
# Log pandas and numpy version for debugging
/bin/sh -c "pip freeze | grep -E '(pandas|numpy)'"
# Run all DataFrame API unit tests
{toxinidir}/scripts/run_pytest.sh {envname} 'apache_beam/dataframe'
[testenv:py{37,38,39}-pytorch-{19,110}]
deps =
-r build-requirements.txt
19: torch>=1.9.0,<1.10.0
110: torch>=1.10.0,<1.11.0
extras = test,gcp
commands =
# Log torch version for debugging
/bin/sh -c "pip freeze | grep -E torch"
# Run all PyTorch unit tests
pytest -o junit_suite_name={envname} --junitxml=pytest_{envname}.xml -n 6 -m uses_pytorch {posargs}