ARROW-6920: [Packaging] Build python 3.8 wheels

adds python3.8 wheels

as far as I can tell python3.8 isn't available for Conda yet (https://github.com/conda-forge/python-feedstock/pull/274), so that's will have to be added later

Closes #5685 from sjhewitt/arrow-6920 and squashes the following commits:

feac49470 <Sutou Kouhei> Add a TODO comment
908a39648 <Sutou Kouhei> Check Docker version for debug
43833a3da <Sutou Kouhei> Add Python 3.8 support to package metadata
f3f9bfad9 <Krisztián Szűcs> Install tensorflow for 32 bit unicode width py27
cd9ae83b1 <Sutou Kouhei> Install TensorFlow for Python 2.7 manually
5293f2bf7 <Sutou Kouhei> Pin base image and remove needless virtualenvs
344eecd71 <Sutou Kouhei> Use apache/arrow-dev again
99cad413d <Sutou Kouhei> Use the uploaded image
f0c8e6927 <Sutou Kouhei> Link to libpython only on Windows
260fa7930 <Sutou Kouhei> Add a missing empty line
52761b413 <Sutou Kouhei> libpython.so may not exist
5e5977eb3 <Sutou Kouhei> Don't link to other Python libraries on non Windows
e047d54d6 <Sutou Kouhei> Don't use local function
10dac6741 <Terence D. Honles> move plasma subprocesses to top level functions
da300ac20 <Sutou Kouhei> Don't use TTY
c4526d87f <Sutou Kouhei> Use more recent numpy
7ace0cb03 <Sutou Kouhei> Ensure using the latest image
655f88c45 <Sutou Kouhei> Use Azure Pipelines instead of Travis CI
2ef6213fc <Sutou Kouhei> Use "call conda.bat activate/deactivate"
1c8427ea4 <Sutou Kouhei> Use conda without call
99251eaba <Sutou Kouhei> Use "conda activate/deactivate"
f403b6fe3 <Sutou Kouhei> Use the latest multibuild
c81884017 <Sutou Kouhei> Remove m suffix
e9f14087f <Sutou Kouhei> Set up Python 3.8
2ab67322b <Simon Hewitt> ARROW-6920:  Build python 3.8 wheels

Lead-authored-by: Sutou Kouhei <kou@clear-code.com>
Co-authored-by: Terence D. Honles <terence@honles.com>
Co-authored-by: Krisztián Szűcs <szucs.krisztian@gmail.com>
Co-authored-by: Simon Hewitt <si@sjhewitt.co.uk>
Signed-off-by: Sutou Kouhei <kou@clear-code.com>
diff --git a/cpp/src/arrow/python/CMakeLists.txt b/cpp/src/arrow/python/CMakeLists.txt
index 5db5a93..b1c35b3 100644
--- a/cpp/src/arrow/python/CMakeLists.txt
+++ b/cpp/src/arrow/python/CMakeLists.txt
@@ -52,10 +52,9 @@
   set_property(SOURCE pyarrow.cc APPEND_STRING PROPERTY COMPILE_FLAGS " -Wno-cast-qual ")
 endif()
 
-set(ARROW_PYTHON_SHARED_LINK_LIBS arrow_shared ${PYTHON_OTHER_LIBS})
-
+set(ARROW_PYTHON_SHARED_LINK_LIBS arrow_shared)
 if(WIN32)
-  set(ARROW_PYTHON_SHARED_LINK_LIBS ${ARROW_PYTHON_SHARED_LINK_LIBS} ${PYTHON_LIBRARIES})
+  list(APPEND ARROW_PYTHON_SHARED_LINK_LIBS ${PYTHON_LIBRARIES} ${PYTHON_OTHER_LIBS})
 endif()
 
 set(ARROW_PYTHON_INCLUDES ${NUMPY_INCLUDE_DIRS} ${PYTHON_INCLUDE_DIRS})
diff --git a/dev/tasks/python-wheels/azure.linux.yml b/dev/tasks/python-wheels/azure.linux.yml
new file mode 100644
index 0000000..e2ad88f
--- /dev/null
+++ b/dev/tasks/python-wheels/azure.linux.yml
@@ -0,0 +1,92 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+jobs:
+- job: linux
+  pool:
+    vmImage: ubuntu-latest
+  timeoutInMinutes: 360
+  steps:
+    - task: CondaEnvironment@1
+      inputs:
+        packageSpecs: 'click github3.py jinja2 jira pygit2 ruamel.yaml setuptools_scm toolz'
+        installOptions: '-c conda-forge'
+        updateConda: false
+
+    - script: |
+        set -ex
+        git clone --no-checkout {{ arrow.remote }} arrow
+        git -C arrow fetch -t {{ arrow.remote }} {{ arrow.branch }}
+        git -C arrow checkout FETCH_HEAD
+        git -C arrow submodule update --init --recursive
+      displayName: Clone arrow
+
+    - script: |
+        set -ex
+        docker -v
+        docker-compose -v
+        cd arrow
+        BUILD_IMAGE=centos-python-{{ wheel_tag }}
+        docker-compose pull $BUILD_IMAGE
+        # TODO(kou): Uncomment this when we resolve "ADD never use cache" problem.
+        # docker-compose build $BUILD_IMAGE
+        docker-compose run \
+          -e SETUPTOOLS_SCM_PRETEND_VERSION={{ arrow.no_rc_version }} \
+          -e PYTHON_VERSION="{{ python_version }}" \
+          -e UNICODE_WIDTH="{{ unicode_width }}" \
+          $BUILD_IMAGE
+      displayName: Build wheel
+
+    # auditwheel does always exit with 0 so it is mostly for debugging
+    # purposes
+    - script: |
+        set -ex
+        cd arrow
+        docker run \
+          -v $(pwd):/arrow quay.io/pypa/{{ wheel_tag }}_x86_64 \
+          /bin/bash -c \
+            "auditwheel show /arrow/python/{{ wheel_tag }}/dist/*.whl"
+      displayName: Audit wheel
+
+    - script: |
+        set -ex
+        cd arrow
+        test_args=""
+        {%- if test_remove_system_libs %}
+        test_args="${test_args} --remove-system-libs"
+        {%- endif %}
+        {%- for image in test_docker_images %}
+        docker run \
+          --shm-size 2G \
+          -v $(pwd):/arrow \
+          -e WHEEL_TAG="{{ wheel_tag }}" \
+          {{ image }} \
+          /arrow/dev/tasks/python-wheels/manylinux-test.sh ${test_args}
+        {%- endfor %}
+      displayName: Test wheel
+
+    - script: |
+        set -ex
+        python arrow/dev/tasks/crossbow.py \
+          --queue-path . \
+          --queue-remote {{ queue.remote_url }} \
+          upload-artifacts \
+          --pattern "arrow/python/{{ wheel_tag }}/dist/*" \
+          --sha {{ task.branch }} \
+          --tag {{ task.tag }}
+      env:
+        CROSSBOW_GITHUB_TOKEN: $(CROSSBOW_GITHUB_TOKEN)
+      displayName: Upload packages as a GitHub release
diff --git a/dev/tasks/python-wheels/travis.linux.yml b/dev/tasks/python-wheels/travis.linux.yml
deleted file mode 100644
index d8d9ef3..0000000
--- a/dev/tasks/python-wheels/travis.linux.yml
+++ /dev/null
@@ -1,88 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-os: linux
-
-dist: bionic
-
-language: python
-
-services:
-  - docker
-
-# don't build twice
-if: tag IS blank
-
-env:
-  global:
-    - PLAT=x86_64
-    - TRAVIS_TAG={{ task.tag }}
-
-script:
-  - git clone --no-checkout {{ arrow.remote }} arrow
-  - git -C arrow fetch -t {{ arrow.remote }} {{ arrow.branch }}
-  - git -C arrow checkout FETCH_HEAD
-  - mkdir -p dist
-
-  # build wheel
-  - pushd arrow
-
-  # Pull testing resources
-  - git submodule init
-  - git submodule update
-
-  # don't attempt to build it, because the base image by pypa is updated
-  # regularly and would cause undeterministic builds
-  - BUILD_IMAGE=centos-python-{{ wheel_tag }}
-  - docker-compose pull $BUILD_IMAGE
-  - docker-compose run
-      -e SETUPTOOLS_SCM_PRETEND_VERSION={{ arrow.no_rc_version }}
-      -e PYTHON_VERSION="{{ python_version }}"
-      -e UNICODE_WIDTH="{{ unicode_width }}"
-      $BUILD_IMAGE
-
-  # run auditwheel, it does always exit with 0 so it is mostly for debugging
-  # purposes
-  - docker run -v `pwd`:/arrow quay.io/pypa/{{ wheel_tag }}_x86_64 /bin/bash -c
-      "auditwheel show /arrow/python/{{ wheel_tag }}/dist/*.whl"
-
-  # test on multiple distributions
-  {%- for image in test_docker_images %}
-  - docker run -it --shm-size 2G -v `pwd`:/arrow -e WHEEL_TAG="{{ wheel_tag }}"
-    {%- if test_remove_system_libs %}
-      {{ image }} /arrow/dev/tasks/python-wheels/manylinux-test.sh --remove-system-libs
-    {%- else %}
-      {{ image }} /arrow/dev/tasks/python-wheels/manylinux-test.sh
-    {%- endif %}
-  {%- endfor %}
-
-  - popd
-
-  # prepare for deployment
-  - sudo mv arrow/python/{{ wheel_tag }}/dist/* dist/
-
-deploy:
-  provider: releases
-  api_key: $CROSSBOW_GITHUB_TOKEN
-  file_glob: true
-  file: dist/*.whl
-  skip_cleanup: true
-  on:
-    tags: true
-
-notifications:
-  email:
-    - {{ job.email }}
diff --git a/dev/tasks/python-wheels/travis.osx.yml b/dev/tasks/python-wheels/travis.osx.yml
index 6b161b7..1e08263 100644
--- a/dev/tasks/python-wheels/travis.osx.yml
+++ b/dev/tasks/python-wheels/travis.osx.yml
@@ -31,8 +31,8 @@
     - MACOSX_DEPLOYMENT_TARGET="10.9"
 
 before_install:
-  - git clone https://github.com/matthew-brett/multibuild # TODO pin it
-  - git -C multibuild checkout 4e7a9396e9a50731bb83fc0d16bb98fb0c4032d7
+  - git clone https://github.com/matthew-brett/multibuild
+  - git -C multibuild checkout 68a4af043e2adb0d9353d4a0e1f3d871203237aa
 
   - git clone --no-checkout {{ arrow.remote }} arrow
   - git -C arrow fetch -t {{ arrow.remote }} {{ arrow.branch }}
diff --git a/dev/tasks/python-wheels/win-build.bat b/dev/tasks/python-wheels/win-build.bat
index 345a8f0..b46c59d 100644
--- a/dev/tasks/python-wheels/win-build.bat
+++ b/dev/tasks/python-wheels/win-build.bat
@@ -20,14 +20,14 @@
 @rem create conda environment for compiling
 conda update --yes --quiet conda
 
-call conda create -n wheel-build -q -y -c conda-forge ^
+conda create -n wheel-build -q -y -c conda-forge ^
     --file=%ARROW_SRC%\ci\conda_env_cpp.yml ^
     --file=%ARROW_SRC%\ci\conda_env_gandiva.yml ^
     python=%PYTHON_VERSION% ^
     numpy=%NUMPY_VERSION% ^
     || exit /B
 
-call activate wheel-build
+call conda.bat activate wheel-build
 
 @rem Cannot use conda_env_python.yml here because conda-forge has
 @rem ceased providing up-to-date packages for Python 3.5
@@ -84,17 +84,17 @@
 python setup.py bdist_wheel || exit /B
 popd
 
-call deactivate
+call conda.bat deactivate
 
 set ARROW_TEST_DATA=%ARROW_SRC%\testing\data
 
 @rem test the wheel
 @rem TODO For maximum reliability, we should test in a plain virtualenv instead.
-call conda create -n wheel-test -c conda-forge -q -y ^
+conda create -n wheel-test -c conda-forge -q -y ^
     --file %ARROW_SRC%\ci\conda_env_python.yml ^
     python=%PYTHON_VERSION% ^
     numpy=%NUMPY_VERSION% || exit /B
-call activate wheel-test
+call conda.bat activate wheel-test
 
 @rem install the built wheel
 pip install -vv --no-index --find-links=%ARROW_SRC%\python\dist\ pyarrow || exit /B
diff --git a/dev/tasks/tasks.yml b/dev/tasks/tasks.yml
index 9be55bd..e656e7e 100644
--- a/dev/tasks/tasks.yml
+++ b/dev/tasks/tasks.yml
@@ -37,17 +37,21 @@
     - wheel-manylinux1-cp35m
     - wheel-manylinux1-cp36m
     - wheel-manylinux1-cp37m
+    - wheel-manylinux1-cp38
     - wheel-manylinux2010-cp27m
     - wheel-manylinux2010-cp27mu
     - wheel-manylinux2010-cp35m
     - wheel-manylinux2010-cp36m
     - wheel-manylinux2010-cp37m
+    - wheel-manylinux2010-cp38
     - wheel-osx-cp27m
     - wheel-osx-cp35m
     - wheel-osx-cp36m
     - wheel-osx-cp37m
+    - wheel-osx-cp38
     - wheel-win-cp36m
     - wheel-win-cp37m
+    - wheel-win-cp38
 
   linux:
     - debian-stretch
@@ -208,17 +212,21 @@
     - wheel-manylinux1-cp35m
     - wheel-manylinux1-cp36m
     - wheel-manylinux1-cp37m
+    - wheel-manylinux1-cp38
     - wheel-manylinux2010-cp27m
     - wheel-manylinux2010-cp27mu
     - wheel-manylinux2010-cp35m
     - wheel-manylinux2010-cp36m
     - wheel-manylinux2010-cp37m
+    - wheel-manylinux2010-cp38
     - wheel-osx-cp27m
     - wheel-osx-cp35m
     - wheel-osx-cp36m
     - wheel-osx-cp37m
+    - wheel-osx-cp38
     - wheel-win-cp36m
     - wheel-win-cp37m
+    - wheel-win-cp38
     - debian-stretch
     - debian-buster
     - ubuntu-xenial
@@ -366,9 +374,9 @@
   ############################## Wheel Linux ##################################
 
   wheel-manylinux1-cp27m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 2.7
       unicode_width: 16
@@ -379,9 +387,9 @@
       - pyarrow-{no_rc_version}-cp27-cp27m-manylinux1_x86_64.whl
 
   wheel-manylinux1-cp27mu:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 2.7
       unicode_width: 32
@@ -393,9 +401,9 @@
       - pyarrow-{no_rc_version}-cp27-cp27mu-manylinux1_x86_64.whl
 
   wheel-manylinux1-cp35m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.5
       unicode_width: 16
@@ -407,9 +415,9 @@
       - pyarrow-{no_rc_version}-cp35-cp35m-manylinux1_x86_64.whl
 
   wheel-manylinux1-cp36m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.6
       unicode_width: 16
@@ -421,9 +429,9 @@
       - pyarrow-{no_rc_version}-cp36-cp36m-manylinux1_x86_64.whl
 
   wheel-manylinux1-cp37m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.7
       unicode_width: 16
@@ -434,10 +442,24 @@
     artifacts:
       - pyarrow-{no_rc_version}-cp37-cp37m-manylinux1_x86_64.whl
 
-  wheel-manylinux2010-cp27m:
-    ci: travis
+  wheel-manylinux1-cp38:
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
+    params:
+      python_version: 3.8
+      unicode_width: 16
+      wheel_tag: manylinux1
+      test_docker_images:
+        - python:3.8
+      test_remove_system_libs: true
+    artifacts:
+      - pyarrow-{no_rc_version}-cp38-cp38-manylinux1_x86_64.whl
+
+  wheel-manylinux2010-cp27m:
+    ci: azure
+    platform: linux
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 2.7
       unicode_width: 16
@@ -448,9 +470,9 @@
       - pyarrow-{no_rc_version}-cp27-cp27m-manylinux2010_x86_64.whl
 
   wheel-manylinux2010-cp27mu:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 2.7
       unicode_width: 32
@@ -462,9 +484,9 @@
       - pyarrow-{no_rc_version}-cp27-cp27mu-manylinux2010_x86_64.whl
 
   wheel-manylinux2010-cp35m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.5
       unicode_width: 16
@@ -476,9 +498,9 @@
       - pyarrow-{no_rc_version}-cp35-cp35m-manylinux2010_x86_64.whl
 
   wheel-manylinux2010-cp36m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.6
       unicode_width: 16
@@ -490,9 +512,9 @@
       - pyarrow-{no_rc_version}-cp36-cp36m-manylinux2010_x86_64.whl
 
   wheel-manylinux2010-cp37m:
-    ci: travis
+    ci: azure
     platform: linux
-    template: python-wheels/travis.linux.yml
+    template: python-wheels/azure.linux.yml
     params:
       python_version: 3.7
       unicode_width: 16
@@ -503,6 +525,20 @@
     artifacts:
       - pyarrow-{no_rc_version}-cp37-cp37m-manylinux2010_x86_64.whl
 
+  wheel-manylinux2010-cp38:
+    ci: azure
+    platform: linux
+    template: python-wheels/azure.linux.yml
+    params:
+      python_version: 3.8
+      unicode_width: 16
+      wheel_tag: manylinux2010
+      test_docker_images:
+        - python:3.8
+      test_remove_system_libs: true
+    artifacts:
+      - pyarrow-{no_rc_version}-cp38-cp38-manylinux2010_x86_64.whl
+
   ############################## Wheel OSX ####################################
 
   wheel-osx-cp27m:
@@ -550,6 +586,15 @@
     artifacts:
       - pyarrow-{no_rc_version}-cp37-cp37m-macosx_10_6_intel.whl
 
+  wheel-osx-cp38:
+    ci: travis
+    platform: osx
+    template: python-wheels/travis.osx.yml
+    params:
+      python_version: 3.8
+    artifacts:
+      - pyarrow-{no_rc_version}-cp38-cp38-macosx_10_6_intel.whl
+
   ############################## Wheel Windows ################################
 
   wheel-win-cp36m:
@@ -570,6 +615,15 @@
     artifacts:
       - pyarrow-{no_rc_version}-cp37-cp37m-win_amd64.whl
 
+  wheel-win-cp38:
+    ci: appveyor
+    platform: win
+    template: python-wheels/appveyor.yml
+    params:
+      python_version: 3.8
+    artifacts:
+      - pyarrow-{no_rc_version}-cp38-cp38-win_amd64.whl
+
   ############################## Linux PKGS ####################################
 
   debian-stretch:
diff --git a/docker-compose.yml b/docker-compose.yml
index 1f4229e..4ebfacd 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -560,9 +560,7 @@
   ########################## Python Wheels ####################################
 
   centos-python-manylinux1:
-    # TODO(kszucs): change to ${REPO}:tag once
-    # https://issues.apache.org/jira/browse/INFRA-19499 is resolved
-    image: arrowdev/amd64-centos-5.11-python-manylinux1:latest
+    image: ${REPO}:amd64-centos-5.11-python-manylinux1
     build:
       context: python/manylinux1
       dockerfile: Dockerfile-x86_64_base
@@ -579,9 +577,7 @@
     command: &manylinux-command /io/build_arrow.sh
 
   centos-python-manylinux2010:
-    # TODO(kszucs): change to ${REPO}:tag once
-    # https://issues.apache.org/jira/browse/INFRA-19499 is resolved
-    image: arrowdev/amd64-centos-6.10-python-manylinux2010:latest
+    image: ${REPO}:amd64-centos-6.10-python-manylinux2010
     build:
       context: python/manylinux2010
       dockerfile: Dockerfile-x86_64_base
diff --git a/python/manylinux1/Dockerfile-x86_64_base b/python/manylinux1/Dockerfile-x86_64_base
index e0b5de4..9be6a79 100644
--- a/python/manylinux1/Dockerfile-x86_64_base
+++ b/python/manylinux1/Dockerfile-x86_64_base
@@ -14,7 +14,10 @@
 # KIND, either express or implied.  See the License for the
 # specific language governing permissions and limitations
 # under the License.
-FROM quay.io/pypa/manylinux1_x86_64:latest
+
+# See https://quay.io/repository/pypa/manylinux1_x86_64?tab=history
+# to update base image.
+FROM quay.io/pypa/manylinux1_x86_64@sha256:e7ed385da2363ea796385bb106a98364f7a8a658a5e8dbf1b7e4c71f2f299251
 
 # Install dependencies
 RUN yum install -y xz ccache flex wget && yum clean all
@@ -84,10 +87,7 @@
 ENV GLOG_HOME /usr
 
 WORKDIR /
-RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout ffe59955ad8690c2f8bb74766cb7e9b0d0ee3963
-
-ADD scripts/build_virtualenvs.sh /
-RUN /build_virtualenvs.sh
+RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout 68a4af043e2adb0d9353d4a0e1f3d871203237aa
 
 ADD scripts/build_llvm.sh /
 RUN /build_llvm.sh
diff --git a/python/manylinux1/Dockerfile-x86_64_ubuntu b/python/manylinux1/Dockerfile-x86_64_ubuntu
index c2b5247..eb9caec 100644
--- a/python/manylinux1/Dockerfile-x86_64_ubuntu
+++ b/python/manylinux1/Dockerfile-x86_64_ubuntu
@@ -34,10 +34,7 @@
 RUN /install_cmake.sh
 
 WORKDIR /
-RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout ffe59955ad8690c2f8bb74766cb7e9b0d0ee3963
-
-ADD scripts/build_virtualenvs.sh /
-RUN /build_virtualenvs.sh
+RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout 68a4af043e2adb0d9353d4a0e1f3d871203237aa
 
 ADD scripts/build_openssl.sh /
 RUN /build_openssl.sh
diff --git a/python/manylinux1/build_arrow.sh b/python/manylinux1/build_arrow.sh
index bf12da5..0009244 100755
--- a/python/manylinux1/build_arrow.sh
+++ b/python/manylinux1/build_arrow.sh
@@ -58,7 +58,7 @@
 mkdir -p /io/dist
 
 # Must pass PYTHON_VERSION and UNICODE_WIDTH env variables
-# possible values are: 2.7,16 2.7,32 3.5,16 3.6,16 3.7,16
+# possible values are: 2.7,16 2.7,32 3.5,16 3.6,16 3.7,16 3.8,16
 
 CPYTHON_PATH="$(cpython_path ${PYTHON_VERSION} ${UNICODE_WIDTH})"
 PYTHON_INTERPRETER="${CPYTHON_PATH}/bin/python"
@@ -66,6 +66,13 @@
 # Put our Python first to avoid picking up an antiquated Python from CMake
 PATH="${CPYTHON_PATH}/bin:${PATH}"
 
+echo "=== (${PYTHON_VERSION}) Install the wheel build dependencies ==="
+$PIP install -r requirements-wheel.txt
+if [ "${PYTHON_VERSION}" = "2.7" -a "${UNICODE_WIDTH}" = "32" ]; then
+  # Can't use UNICODE_WIDTH in requirements.txt
+  $PIP install tensorflow
+fi
+
 if [ "${PYTHON_VERSION}" != "2.7" ]; then
   export PYARROW_WITH_FLIGHT=1
   export PYARROW_WITH_GANDIVA=1
@@ -123,9 +130,6 @@
 # Check that we don't expose any unwanted symbols
 /io/scripts/check_arrow_visibility.sh
 
-echo "=== (${PYTHON_VERSION}) Install the wheel build dependencies ==="
-$PIP install -r requirements-wheel.txt
-
 # Clear output directories and leftovers
 rm -rf dist/
 rm -rf build/
diff --git a/python/manylinux1/scripts/build_virtualenvs.sh b/python/manylinux1/scripts/build_virtualenvs.sh
deleted file mode 100755
index 4ae4b66..0000000
--- a/python/manylinux1/scripts/build_virtualenvs.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/bin/bash -e
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Build upon the scripts in https://github.com/matthew-brett/manylinux-builds
-# * Copyright (c) 2013-2016, Matt Terry and Matthew Brett (BSD 2-clause)
-
-PYTHON_VERSIONS="${PYTHON_VERSIONS:-2.7,16 2.7,32 3.5,16 3.6,16, 3.7,16}"
-
-source /multibuild/manylinux_utils.sh
-
-for PYTHON_TUPLE in ${PYTHON_VERSIONS}; do
-    IFS=","
-    set -- $PYTHON_TUPLE;
-    PYTHON=$1
-    U_WIDTH=$2
-    PYTHON_INTERPRETER="$(cpython_path $PYTHON ${U_WIDTH})/bin/python"
-    PIP="$(cpython_path $PYTHON ${U_WIDTH})/bin/pip"
-    PATH="$PATH:$(cpython_path $PYTHON ${U_WIDTH})"
-
-    echo "=== (${PYTHON}, ${U_WIDTH}) Installing build dependencies ==="
-    $PIP install "numpy==1.14.5" "cython==0.29.8" "virtualenv==16.3.0"
-    # Pandas requires numpy and cython
-    $PIP install "pandas==0.24.0"
-
-    # TensorFlow is not supported for Python 2.7 with unicode width 16 or with Python 3.7
-    if [ $PYTHON != "2.7" ] || [ $U_WIDTH = "32" ]; then
-      if [ $PYTHON != "3.7" ]; then
-        $PIP install "tensorflow==1.11.0" "Keras-Preprocessing==1.0.5"
-      fi
-    fi
-
-    echo "=== (${PYTHON}, ${U_WIDTH}) Preparing virtualenv for tests ==="
-    "$(cpython_path $PYTHON ${U_WIDTH})/bin/virtualenv" -p ${PYTHON_INTERPRETER} --no-download /venv-test-${PYTHON}-${U_WIDTH}
-    source /venv-test-${PYTHON}-${U_WIDTH}/bin/activate
-    pip install pytest pytest-faulthandler hypothesis 'numpy==1.14.5' 'pandas==0.24.0'
-    deactivate
-done
-
-# Remove debug symbols from libraries that were installed via wheel.
-find /venv-test-*/lib/*/site-packages/pandas -name '*.so' -exec strip '{}' ';'
-find /venv-test-*/lib/*/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-*/lib/*/site-packages/pandas -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-*/lib/*/site-packages/tensorflow -name '*.so' -exec strip '{}' ';'
-# Only Python 3.6+ packages are stripable as they are built inside of the image
-find /opt/_internal/cpython-3.6.*/lib/python3.6/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-3.7.*/lib/python3.7/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/*/lib/*/site-packages/Cython -name '*.so' -exec strip '{}' ';'
-
-# Remove pip cache again. It's useful during the virtualenv creation but we
-# don't want it persisted in the docker layer, ~264MiB
-rm -rf /root/.cache
-# Remove pandas' tests module as it includes a lot of data, ~27MiB per Python
-# venv, i.e. 216MiB in total
-rm -rf /opt/_internal/*/lib/*/site-packages/pandas/tests
-rm -rf /venv-test-*/lib/*/site-packages/pandas/tests
-# Remove unused Python versions
-rm -rf /opt/_internal/cpython-3.4*
diff --git a/python/manylinux2010/Dockerfile-x86_64_base b/python/manylinux2010/Dockerfile-x86_64_base
index 2d7a984..a2c8237 100644
--- a/python/manylinux2010/Dockerfile-x86_64_base
+++ b/python/manylinux2010/Dockerfile-x86_64_base
@@ -15,7 +15,9 @@
 # specific language governing permissions and limitations
 # under the License.
 
-FROM quay.io/pypa/manylinux2010_x86_64:latest
+# See https://quay.io/repository/pypa/manylinux2010_x86_64?tab=history
+# to update base image.
+FROM quay.io/pypa/manylinux2010_x86_64@sha256:d41631c7360a0028876755aebb7036db639c24e7dcdaf3a9e6dcc0e74a681541
 
 # Install build dependencies
 RUN yum install -y xz bison ccache flex wget
@@ -30,7 +32,7 @@
 RUN /build_zlib.sh
 
 WORKDIR /
-RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout 1a7f31be677185f2dface2643284846e14130c3f
+RUN git clone https://github.com/matthew-brett/multibuild.git && cd multibuild && git checkout 68a4af043e2adb0d9353d4a0e1f3d871203237aa
 
 ADD scripts/build_llvm.sh /
 RUN /build_llvm.sh
@@ -94,6 +96,3 @@
 
 ADD scripts/build_bz2.sh /
 RUN /build_bz2.sh
-
-ADD scripts/build_virtualenvs.sh /
-RUN /build_virtualenvs.sh
diff --git a/python/manylinux2010/build_arrow.sh b/python/manylinux2010/build_arrow.sh
index a6ce49a..d8e41eb 100755
--- a/python/manylinux2010/build_arrow.sh
+++ b/python/manylinux2010/build_arrow.sh
@@ -58,13 +58,20 @@
 mkdir -p /io/dist
 
 # Must pass PYTHON_VERSION and UNICODE_WIDTH env variables
-# possible values are: 2.7,16 2.7,32 3.5,16 3.6,16 3.7,16
+# possible values are: 2.7,16 2.7,32 3.5,16 3.6,16 3.7,16 3.8,16
 
 CPYTHON_PATH="$(cpython_path ${PYTHON_VERSION} ${UNICODE_WIDTH})"
 PYTHON_INTERPRETER="${CPYTHON_PATH}/bin/python"
 PIP="${CPYTHON_PATH}/bin/pip"
 PATH="${PATH}:${CPYTHON_PATH}"
 
+echo "=== (${PYTHON_VERSION}) Install the wheel build dependencies ==="
+$PIP install -r requirements-wheel.txt
+if [ "${PYTHON_VERSION}" = "2.7" -a "${UNICODE_WIDTH}" = "32" ]; then
+  # Can't use UNICODE_WIDTH in requirements.txt
+  $PIP install tensorflow
+fi
+
 if [ "${PYTHON_VERSION}" != "2.7" ]; then
   export PYARROW_WITH_FLIGHT=1
   export PYARROW_WITH_GANDIVA=1
@@ -123,9 +130,6 @@
 # Check that we don't expose any unwanted symbols
 /io/scripts/check_arrow_visibility.sh
 
-echo "=== (${PYTHON_VERSION}) Install the wheel build dependencies ==="
-$PIP install -r requirements-wheel.txt
-
 # Clear output directories and leftovers
 rm -rf dist/
 rm -rf build/
diff --git a/python/manylinux2010/scripts/build_virtualenvs.sh b/python/manylinux2010/scripts/build_virtualenvs.sh
deleted file mode 100755
index 1e8b11e..0000000
--- a/python/manylinux2010/scripts/build_virtualenvs.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/bin/bash -e
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-
-# Build upon the scripts in https://github.com/matthew-brett/manylinux-builds
-# * Copyright (c) 2013-2016, Matt Terry and Matthew Brett (BSD 2-clause)
-
-PYTHON_VERSIONS="${PYTHON_VERSIONS:-2.7,16 2.7,32 3.5,16 3.6,16, 3.7,16}"
-
-source /multibuild/manylinux_utils.sh
-
-for PYTHON_TUPLE in ${PYTHON_VERSIONS}; do
-    IFS=","
-    set -- $PYTHON_TUPLE;
-    PYTHON=$1
-    U_WIDTH=$2
-    PYTHON_INTERPRETER="$(cpython_path $PYTHON ${U_WIDTH})/bin/python"
-    PIP="$(cpython_path $PYTHON ${U_WIDTH})/bin/pip"
-    PATH="$PATH:$(cpython_path $PYTHON ${U_WIDTH})"
-
-    echo "=== (${PYTHON}, ${U_WIDTH}) Installing build dependencies ==="
-    $PIP install "numpy==1.14.*" "cython==0.29.8" "virtualenv==16.3.0"
-    # Pandas requires numpy and cython
-    $PIP install "pandas==0.24.0"
-
-    # TensorFlow is not supported for Python 2.7 with unicode width 16 or with Python 3.7
-    if [ $PYTHON != "2.7" ] || [ $U_WIDTH = "32" ]; then
-      if [ $PYTHON != "3.7" ]; then
-        $PIP install "tensorflow==1.11.0" "Keras-Preprocessing==1.0.5"
-      fi
-    fi
-
-    echo "=== (${PYTHON}, ${U_WIDTH}) Preparing virtualenv for tests ==="
-    "$(cpython_path $PYTHON ${U_WIDTH})/bin/virtualenv" -p ${PYTHON_INTERPRETER} --no-download /venv-test-${PYTHON}-${U_WIDTH}
-    source /venv-test-${PYTHON}-${U_WIDTH}/bin/activate
-    pip install pytest pytest-faulthandler hypothesis "numpy==1.14.*" "pandas==0.24.0"
-    deactivate
-done
-
-# Remove debug symbols from libraries that were installed via wheel.
-find /venv-test-*/lib/*/site-packages/pandas -name '*.so' -exec strip '{}' ';'
-find /venv-test-*/lib/*/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-*/lib/*/site-packages/pandas -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-*/lib/*/site-packages/tensorflow -name '*.so' -exec strip '{}' ';'
-# Only Python 3.6+ packages are stripable as they are built inside of the image
-find /opt/_internal/cpython-3.6.*/lib/python3.6/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/cpython-3.7.*/lib/python3.7/site-packages/numpy -name '*.so' -exec strip '{}' ';'
-find /opt/_internal/*/lib/*/site-packages/Cython -name '*.so' -exec strip '{}' ';'
-
-# Remove pip cache again. It's useful during the virtualenv creation but we
-# don't want it persisted in the docker layer, ~264MiB
-rm -rf /root/.cache
-# Remove pandas' tests module as it includes a lot of data, ~27MiB per Python
-# venv, i.e. 216MiB in total
-rm -rf /opt/_internal/*/lib/*/site-packages/pandas/tests
-rm -rf /venv-test-*/lib/*/site-packages/pandas/tests
-# Remove unused Python versions
-rm -rf /opt/_internal/cpython-3.4*
diff --git a/python/pyarrow/tests/test_plasma.py b/python/pyarrow/tests/test_plasma.py
index f41adbd..658d043 100644
--- a/python/pyarrow/tests/test_plasma.py
+++ b/python/pyarrow/tests/test_plasma.py
@@ -860,18 +860,18 @@
                 self.plasma_client2.create(
                     object_id, DEFAULT_PLASMA_STORE_MEMORY + SMALL_OBJECT_SIZE)
 
-    def test_client_death_during_get(self):
+    @staticmethod
+    def _client_blocked_in_get(plasma_store_name, object_id):
         import pyarrow.plasma as plasma
+        client = plasma.connect(plasma_store_name)
+        # Try to get an object ID that doesn't exist. This should block.
+        client.get([object_id])
 
+    def test_client_death_during_get(self):
         object_id = random_object_id()
 
-        def client_blocked_in_get(plasma_store_name):
-            client = plasma.connect(self.plasma_store_name)
-            # Try to get an object ID that doesn't exist. This should block.
-            client.get([object_id])
-
-        p = multiprocessing.Process(target=client_blocked_in_get,
-                                    args=(self.plasma_store_name, ))
+        p = multiprocessing.Process(target=self._client_blocked_in_get,
+                                    args=(self.plasma_store_name, object_id))
         p.start()
         # Make sure the process is running.
         time.sleep(0.2)
@@ -889,18 +889,18 @@
         # the store is dead.
         self.plasma_client.contains(random_object_id())
 
-    def test_client_getting_multiple_objects(self):
+    @staticmethod
+    def _client_get_multiple(plasma_store_name, object_ids):
         import pyarrow.plasma as plasma
+        client = plasma.connect(plasma_store_name)
+        # Try to get an object ID that doesn't exist. This should block.
+        client.get(object_ids)
 
+    def test_client_getting_multiple_objects(self):
         object_ids = [random_object_id() for _ in range(10)]
 
-        def client_get_multiple(plasma_store_name):
-            client = plasma.connect(self.plasma_store_name)
-            # Try to get an object ID that doesn't exist. This should block.
-            client.get(object_ids)
-
-        p = multiprocessing.Process(target=client_get_multiple,
-                                    args=(self.plasma_store_name, ))
+        p = multiprocessing.Process(target=self._client_get_multiple,
+                                    args=(self.plasma_store_name, object_ids))
         p.start()
         # Make sure the process is running.
         time.sleep(0.2)
diff --git a/python/pyarrow/tests/test_serialization.py b/python/pyarrow/tests/test_serialization.py
index a0269ba..fa78ae1 100644
--- a/python/pyarrow/tests/test_serialization.py
+++ b/python/pyarrow/tests/test_serialization.py
@@ -932,7 +932,11 @@
     pa.read_serialized(f).deserialize()
 
 
-@pytest.mark.skipif(os.name == 'nt', reason="deserialize_regex not pickleable")
+def deserialize_regex(serialized, q):
+    import pyarrow as pa
+    q.put(pa.deserialize(serialized))
+
+
 def test_deserialize_in_different_process():
     from multiprocessing import Process, Queue
     import re
@@ -945,10 +949,6 @@
     serialized = pa.serialize(regex, serialization_context)
     serialized_bytes = serialized.to_buffer().to_pybytes()
 
-    def deserialize_regex(serialized, q):
-        import pyarrow as pa
-        q.put(pa.deserialize(serialized))
-
     q = Queue()
     p = Process(target=deserialize_regex, args=(serialized_bytes, q))
     p.start()
diff --git a/python/requirements-wheel.txt b/python/requirements-wheel.txt
index 4b27ade..99a2666 100644
--- a/python/requirements-wheel.txt
+++ b/python/requirements-wheel.txt
@@ -1,6 +1,12 @@
+Keras-Preprocessing
 cython
-wheel==0.31.1
+futures; python_version < "3.2"
+numpy>=1.16
+pandas
 setuptools_scm==3.2.0
 six>=1.0.0
-numpy==1.14.5
-futures; python_version < "3.2"
+# TODO: TensorFlow doesn't support Python 3.8 yet.
+tensorflow; python_version >= "3.0" and python_version < "3.8"
+# pin wheel, because auditwheel is not compatible with wheel=0.32
+# TODO(kszucs): remove after auditwheel properly supports wheel>0.31
+wheel==0.31.1
diff --git a/python/setup.py b/python/setup.py
index 84888e2..4d3e7c4 100755
--- a/python/setup.py
+++ b/python/setup.py
@@ -592,6 +592,7 @@
         'Programming Language :: Python :: 3.5',
         'Programming Language :: Python :: 3.6',
         'Programming Language :: Python :: 3.7',
+        'Programming Language :: Python :: 3.8',
     ],
     license='Apache License, Version 2.0',
     maintainer='Apache Arrow Developers',