Merge pull request #10568 [BEAM-9061] Add version guards to requirements file for integration tests.

diff --git a/.asf.yaml b/.asf.yaml
new file mode 100644
index 0000000..3da2ec1
--- /dev/null
+++ b/.asf.yaml
@@ -0,0 +1,177 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+github:
+  description: "Apache Beam is a unified programming model for Batch and Streaming"
+  homepage: https://beam.apache.org/
+
+jenkins:
+  github_whitelist:
+    - 11moon11
+    - aaltay
+    - acrites
+    - ageron
+    - aijamalnk
+    - akedin
+    - alanmyrvold
+    - alexvanboxel
+    - amaliujia
+    - andrefaraujo
+    - angoenka
+    - anguillanneuf
+    - angulartist
+    - apilloud
+    - appaquet
+    - Ardagan
+    - aromanenko-dev
+    - aryann
+    - blackml
+    - bmv126
+    - boyuanzz
+    - bradgwest
+    - brunocvcunha
+    - bumblebee-coming
+    - chadrik
+    - chamikaramj
+    - charithe
+    - ChethanUK
+    - chunyang
+    - ClaireMcGinty
+    - cmachgodaddy
+    - cmm08
+    - coheigea
+    - CraigChambersG
+    - DAAC
+    - davidcavazos
+    - davidyan74
+    - dependabot[bot]
+    - derekunimarket
+    - dmvk
+    - dpcollins-google
+    - drobert
+    - dsdinter
+    - dsnet
+    - ecanzonieri
+    - echauchot
+    - elharo
+    - gxercavins
+    - Hannah-Jiang
+    - happygiraffe
+    - henryken
+    - htyleo
+    - HuangLED
+    - ianlancetaylor
+    - ibzib
+    - iemejia
+    - ihji
+    - iht
+    - jackwhelpton
+    - jagthebeetle
+    - jbartok
+    - je-ik
+    - jesusrv1103
+    - jhalaria
+    - jklukas
+    - Jofre
+    - kamilwu
+    - KangZhiDong
+    - kanterov
+    - kennknowles
+    - KevinGG
+    - kkucharc
+    - kmjung
+    - kyle-winkelman
+    - lazylynx
+    - leonardoam
+    - lgajowy
+    - lhaiesp
+    - liumomo315
+    - lloigor
+    - lostluck
+    - Luis-MX
+    - lukecwik
+    - markflyhigh
+    - MattMorgis
+    - milantracy
+    - mrociorg
+    - mwalenia
+    - mxm
+    - nahuellofeudo
+    - nielm
+    - olegbonar
+    - ostrokach
+    - ozturkberkay
+    - pabloem
+    - pawelpasterz
+    - Pehat
+    - pgudlani
+    - piter75
+    - Primevenn
+    - ra1861
+    - rahul8383
+    - rainwoodman
+    - rakeshcusat
+    - rehmanmuradali
+    - reuvenlax
+    - riazela
+    - robertwb
+    - robinyqiu
+    - RochesterinNYC
+    - rohdesamuel
+    - rosetn
+    - RusOr10n
+    - RyanBerti
+    - RyanSkraba
+    - sadovnychyi
+    - salmanVD
+    - samaitra
+    - sambvfx
+    - saulchavez93
+    - scwhittle
+    - sgrj
+    - shusso
+    - snallapa
+    - sorensenjs
+    - soyrice
+    - stefanondisponibile
+    - stephydx
+    - stevekoonce
+    - steveniemitz
+    - sunjincheng121
+    - suztomo
+    - tamanobi
+    - tchiarato
+    - the1plummie
+    - TheNeuralBit
+    - TimvdLippe
+    - ttanay
+    - tudorm
+    - tvalentyn
+    - tweise
+    - tysonjh
+    - udim
+    - vectorijk
+    - violalyu
+    - wcn3
+    - wintermelons
+    - xubii
+    - y1chi
+    - yifanzou
+    - yirutang
+    - youngoli
+    - ziel
+
diff --git a/.test-infra/jenkins/CommonTestProperties.groovy b/.test-infra/jenkins/CommonTestProperties.groovy
index 7bf585e..39d6695 100644
--- a/.test-infra/jenkins/CommonTestProperties.groovy
+++ b/.test-infra/jenkins/CommonTestProperties.groovy
@@ -28,6 +28,7 @@
     enum Runner {
         DATAFLOW("DataflowRunner"),
         SPARK("SparkRunner"),
+        SPARK_STRUCTURED_STREAMING("SparkStructuredStreamingRunner"),
         FLINK("TestFlinkRunner"),
         DIRECT("DirectRunner"),
         PORTABLE("PortableRunner")
@@ -36,6 +37,7 @@
                 JAVA: [
                         DATAFLOW: ":runners:google-cloud-dataflow-java",
                         SPARK: ":runners:spark",
+                        SPARK_STRUCTURED_STREAMING: ":runners:spark",
                         FLINK: ":runners:flink:1.9",
                         DIRECT: ":runners:direct-java"
                 ],
diff --git a/.test-infra/jenkins/NexmarkBuilder.groovy b/.test-infra/jenkins/NexmarkBuilder.groovy
index 9cdef21..9da7537 100644
--- a/.test-infra/jenkins/NexmarkBuilder.groovy
+++ b/.test-infra/jenkins/NexmarkBuilder.groovy
@@ -59,9 +59,19 @@
     options.put('streaming', false)
 
     suite(context, "NEXMARK IN BATCH MODE USING ${runner} RUNNER", runner, sdk, options)
-
     options.put('queryLanguage', 'sql')
     suite(context, "NEXMARK IN SQL BATCH MODE USING ${runner} RUNNER", runner, sdk, options)
+
+    runner = Runner.SPARK_STRUCTURED_STREAMING
+    options = getFullOptions(jobSpecificOptions, runner, triggeringContext)
+    options.put('streaming', false)
+
+    // Skip query 3 (SparkStructuredStreamingRunner does not support State/Timers yet)
+    options.put('skipQueries', '3')
+    suite(context, "NEXMARK IN BATCH MODE USING ${runner} RUNNER", runner, sdk, options)
+    options.put('queryLanguage', 'sql')
+    options.put('skipQueries', '')
+    suite(context, "NEXMARK IN SQL BATCH MODE USING ${runner} RUNNER", runner, sdk, options)
   }
 
   private
diff --git a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy
index 5fb7d24..f41eae5 100644
--- a/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy
+++ b/.test-infra/jenkins/job_PostCommit_Java_Nexmark_Spark.groovy
@@ -64,6 +64,39 @@
               '--manageResources=false',
               '--monitorJobs=true'].join(' '))
     }
+    shell('echo *** RUN NEXMARK IN BATCH MODE USING SPARK STRUCTURED STREAMING RUNNER ***')
+    gradle {
+      rootBuildScriptDir(commonJobProperties.checkoutDir)
+      tasks(':sdks:java:testing:nexmark:run')
+      commonJobProperties.setGradleSwitches(delegate)
+      switches('-Pnexmark.runner=":runners:spark"' +
+              ' -Pnexmark.args="' +
+              [NexmarkBigqueryProperties.nexmarkBigQueryArgs,
+              '--runner=SparkStructuredStreamingRunner',
+              '--streaming=false',
+              '--suite=SMOKE',
+               // Skip query 3 (SparkStructuredStreamingRunner does not support State/Timers yet)
+              '--skipQueries=3',
+              '--streamTimeout=60' ,
+              '--manageResources=false',
+              '--monitorJobs=true'].join(' '))
+    }
+    shell('echo *** RUN NEXMARK SQL IN BATCH MODE USING SPARK STRUCTURED STREAMING RUNNER ***')
+    gradle {
+      rootBuildScriptDir(commonJobProperties.checkoutDir)
+      tasks(':sdks:java:testing:nexmark:run')
+      commonJobProperties.setGradleSwitches(delegate)
+      switches('-Pnexmark.runner=":runners:spark"' +
+              ' -Pnexmark.args="' +
+              [NexmarkBigqueryProperties.nexmarkBigQueryArgs,
+              '--runner=SparkStructuredStreamingRunner',
+              '--queryLanguage=sql',
+              '--streaming=false',
+              '--suite=SMOKE',
+              '--streamTimeout=60' ,
+              '--manageResources=false',
+              '--monitorJobs=true'].join(' '))
+    }
   }
 }
 
diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
index 7e1cf0f..29de08a 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
@@ -297,7 +297,7 @@
 
     // Automatically use the official release version if we are performing a release
     // otherwise append '-SNAPSHOT'
-    project.version = '2.19.0'
+    project.version = '2.20.0'
     if (!isRelease(project)) {
       project.version += '-SNAPSHOT'
     }
@@ -364,6 +364,7 @@
     def aws_java_sdk2_version = "2.5.71"
     def cassandra_driver_version = "3.8.0"
     def classgraph_version = "4.8.56"
+    def gax_version = "1.52.0"
     def generated_grpc_beta_version = "0.44.0"
     def generated_grpc_ga_version = "1.83.0"
     def generated_grpc_dc_beta_version = "0.27.0-alpha"
@@ -372,7 +373,7 @@
     def google_cloud_bigdataoss_version = "1.9.16"
     def google_cloud_core_version = "1.61.0"
     def google_cloud_spanner_version = "1.6.0"
-    def grpc_version = "1.17.1"
+    def grpc_version = "1.25.0"
     def guava_version = "25.1-jre"
     def hadoop_version = "2.8.5"
     def hamcrest_version = "2.1"
@@ -387,7 +388,6 @@
     def protobuf_version = "3.11.1"
     def quickcheck_version = "0.8"
     def spark_version = "2.4.4"
-    def spark_structured_streaming_version = "2.4.0"
 
     // A map of maps containing common libraries used per language. To use:
     // dependencies {
@@ -435,7 +435,8 @@
         commons_lang3                               : "org.apache.commons:commons-lang3:3.9",
         commons_math3                               : "org.apache.commons:commons-math3:3.6.1",
         error_prone_annotations                     : "com.google.errorprone:error_prone_annotations:2.0.15",
-        gax_grpc                                    : "com.google.api:gax-grpc:1.38.0",
+        gax                                         : "com.google.api:gax:$gax_version",
+        gax_grpc                                    : "com.google.api:gax-grpc:$gax_version",
         google_api_client                           : "com.google.api-client:google-api-client:$google_clients_version",
         google_api_client_jackson2                  : "com.google.api-client:google-api-client-jackson2:$google_clients_version",
         google_api_client_java6                     : "com.google.api-client:google-api-client-java6:$google_clients_version",
@@ -465,6 +466,7 @@
         grpc_all                                    : "io.grpc:grpc-all:$grpc_version",
         grpc_auth                                   : "io.grpc:grpc-auth:$grpc_version",
         grpc_core                                   : "io.grpc:grpc-core:$grpc_version",
+        grpc_context                                : "io.grpc:grpc-context:$grpc_version",
         grpc_google_cloud_datacatalog_v1beta1       : "com.google.api.grpc:grpc-google-cloud-datacatalog-v1beta1:$generated_grpc_dc_beta_version",
         grpc_google_cloud_pubsub_v1                 : "com.google.api.grpc:grpc-google-cloud-pubsub-v1:$generated_grpc_ga_version",
         grpc_protobuf                               : "io.grpc:grpc-protobuf:$grpc_version",
@@ -525,7 +527,7 @@
         spark_streaming                             : "org.apache.spark:spark-streaming_2.11:$spark_version",
         stax2_api                                   : "org.codehaus.woodstox:stax2-api:3.1.4",
         vendored_bytebuddy_1_9_3                    : "org.apache.beam:beam-vendor-bytebuddy-1_9_3:0.1",
-        vendored_grpc_1_21_0                        : "org.apache.beam:beam-vendor-grpc-1_21_0:0.1",
+        vendored_grpc_1_26_0                        : "org.apache.beam:beam-vendor-grpc-1_26_0:0.1",
         vendored_guava_26_0_jre                     : "org.apache.beam:beam-vendor-guava-26_0-jre:0.1",
         vendored_calcite_1_20_0                     : "org.apache.beam:beam-vendor-calcite-1_20_0:0.1",
         woodstox_core_asl                           : "org.codehaus.woodstox:woodstox-core-asl:4.4.1",
@@ -1433,19 +1435,21 @@
 
     /** ***********************************************************************************************/
 
+    // applyGrpcNature should only be applied to projects who wish to use
+    // unvendored gRPC / protobuf dependencies.
     project.ext.applyGrpcNature = {
       project.apply plugin: "com.google.protobuf"
       project.protobuf {
         protoc {
           // The artifact spec for the Protobuf Compiler
-          artifact = "com.google.protobuf:protoc:3.6.0" }
+          artifact = "com.google.protobuf:protoc:$protobuf_version" }
 
         // Configure the codegen plugins
         plugins {
           // An artifact spec for a protoc plugin, with "grpc" as
           // the identifier, which can be referred to in the "plugins"
           // container of the "generateProtoTasks" closure.
-          grpc { artifact = "io.grpc:protoc-gen-grpc-java:1.13.1" }
+          grpc { artifact = "io.grpc:protoc-gen-grpc-java:$grpc_version" }
         }
 
         generateProtoTasks {
@@ -1482,6 +1486,8 @@
 
     /** ***********************************************************************************************/
 
+    // applyPortabilityNature should only be applied to projects that want to use
+    // vendored gRPC / protobuf dependencies.
     project.ext.applyPortabilityNature = {
       PortabilityNatureConfiguration configuration = it ? it as PortabilityNatureConfiguration : new PortabilityNatureConfiguration()
 
@@ -1496,10 +1502,10 @@
               archivesBaseName: configuration.archivesBaseName,
               automaticModuleName: configuration.automaticModuleName,
               shadowJarValidationExcludes: it.shadowJarValidationExcludes,
-              shadowClosure: GrpcVendoring_1_21_0.shadowClosure() << {
+              shadowClosure: GrpcVendoring_1_26_0.shadowClosure() << {
                 // We perform all the code relocations but don't include
                 // any of the actual dependencies since they will be supplied
-                // by org.apache.beam:beam-vendor-grpc-v1p21p0:0.1
+                // by org.apache.beam:beam-vendor-grpc-v1p26p0:0.1
                 dependencies {
                   include(dependency { return false })
                 }
@@ -1516,14 +1522,14 @@
       project.protobuf {
         protoc {
           // The artifact spec for the Protobuf Compiler
-          artifact = "com.google.protobuf:protoc:3.7.1" }
+          artifact = "com.google.protobuf:protoc:${GrpcVendoring_1_26_0.protobuf_version}" }
 
         // Configure the codegen plugins
         plugins {
           // An artifact spec for a protoc plugin, with "grpc" as
           // the identifier, which can be referred to in the "plugins"
           // container of the "generateProtoTasks" closure.
-          grpc { artifact = "io.grpc:protoc-gen-grpc-java:1.21.0" }
+          grpc { artifact = "io.grpc:protoc-gen-grpc-java:${GrpcVendoring_1_26_0.grpc_version}" }
         }
 
         generateProtoTasks {
@@ -1537,7 +1543,7 @@
         }
       }
 
-      project.dependencies GrpcVendoring_1_21_0.dependenciesClosure() << { shadow project.ext.library.java.vendored_grpc_1_21_0 }
+      project.dependencies GrpcVendoring_1_26_0.dependenciesClosure() << { shadow project.ext.library.java.vendored_grpc_1_26_0 }
     }
 
     /** ***********************************************************************************************/
diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_21_0.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_21_0.groovy
deleted file mode 100644
index 3c34a6d..0000000
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_21_0.groovy
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.beam.gradle
-
-import org.gradle.api.Project
-
-/**
- * Utilities for working with our vendored version of gRPC.
- */
-class GrpcVendoring_1_21_0 {
-  /** Returns the list of compile time dependencies. */
-  static List<String> dependencies() {
-    return [
-      'com.google.guava:guava:26.0-jre',
-      'com.google.protobuf:protobuf-java:3.7.1',
-      'com.google.protobuf:protobuf-java-util:3.7.1',
-      'com.google.code.gson:gson:2.7',
-      'io.grpc:grpc-auth:1.21.0',
-      'io.grpc:grpc-core:1.21.0',
-      'io.grpc:grpc-context:1.21.0',
-      'io.grpc:grpc-netty:1.21.0',
-      'io.grpc:grpc-protobuf:1.21.0',
-      'io.grpc:grpc-stub:1.21.0',
-      'io.netty:netty-transport-native-epoll:4.1.34.Final',
-      // tcnative version from https://github.com/grpc/grpc-java/blob/master/SECURITY.md#netty
-      'io.netty:netty-tcnative-boringssl-static:2.0.22.Final',
-      'com.google.auth:google-auth-library-credentials:0.13.0',
-      'io.grpc:grpc-testing:1.21.0',
-      'com.google.api.grpc:proto-google-common-protos:1.12.0',
-      'io.opencensus:opencensus-api:0.21.0',
-      'io.opencensus:opencensus-contrib-grpc-metrics:0.21.0',
-    ]
-  }
-
-  /**
-   * Returns the list of runtime time dependencies that should be exported as runtime
-   * dependencies within the vendored jar.
-   */
-  static List<String> runtimeDependencies() {
-    return [
-      'com.google.errorprone:error_prone_annotations:2.3.2',
-    ]
-  }
-
-  static Map<String, String> relocations() {
-    // The relocation paths below specifically use gRPC and the full version string as
-    // the code relocation prefix. See https://lists.apache.org/thread.html/4c12db35b40a6d56e170cd6fc8bb0ac4c43a99aa3cb7dbae54176815@%3Cdev.beam.apache.org%3E
-    // for further details.
-
-    // To produce the list of necessary relocations, one needs to start with a set of target
-    // packages that one wants to vendor, find all necessary transitive dependencies of that
-    // set and provide relocations for each such that all necessary packages and their
-    // dependencies are relocated. Any optional dependency that doesn't need relocation
-    // must be excluded via an 'exclude' rule. There is additional complexity of libraries that use
-    // JNI or reflection and have to be handled on case by case basis by learning whether
-    // they support relocation and how would one go about doing it by reading any documentation
-    // those libraries may provide. The 'validateShadedJarDoesntLeakNonOrgApacheBeamClasses'
-    // ensures that there are no classes outside of the 'org.apache.beam' namespace.
-
-    String version = "v1p21p0";
-    String prefix = "org.apache.beam.vendor.grpc.${version}";
-    List<String> packagesToRelocate = [
-      // guava uses the com.google.common and com.google.thirdparty package namespaces
-      "com.google.common",
-      "com.google.thirdparty",
-      "com.google.protobuf",
-      "com.google.gson",
-      "io.grpc",
-      "com.google.auth",
-      "com.google.api",
-      "com.google.cloud",
-      "com.google.logging",
-      "com.google.longrunning",
-      "com.google.rpc",
-      "com.google.type",
-      "io.opencensus",
-      "io.netty"
-    ]
-
-    return packagesToRelocate.collectEntries {
-      [ (it): "${prefix}.${it}" ]
-    } + [
-      // Adapted from https://github.com/grpc/grpc-java/blob/e283f70ad91f99c7fee8b31b605ef12a4f9b1690/netty/shaded/build.gradle#L41
-      // We       "io.netty": "${prefix}.io.netty",have to be careful with these replacements as they must not match any
-      // string in NativeLibraryLoader, else they cause corruption. Note that
-      // this includes concatenation of string literals and constants.
-      'META-INF/native/libnetty': "META-INF/native/liborg_apache_beam_vendor_grpc_${version}_netty",
-      'META-INF/native/netty': "META-INF/native/org_apache_beam_vendor_grpc_${version}_netty",
-    ]
-  }
-
-  /** Returns the list of shading exclusions. */
-  static List<String> exclusions() {
-    return [
-      // Don't include android annotations, errorprone, checkerframework, JDK8 annotations, objenesis, junit, and mockito in the vendored jar
-      "android/annotation/**/",
-      "com/google/errorprone/**",
-      "com/google/instrumentation/**",
-      "com/google/j2objc/annotations/**",
-      "javax/annotation/**",
-      "junit/**",
-      "org/checkerframework/**",
-      "org/codehaus/mojo/animal_sniffer/**",
-      "org/hamcrest/**",
-      "org/junit/**",
-      "org/mockito/**",
-      "org/objenesis/**",
-    ]
-  }
-
-  /**
-   * Returns a closure contaning the dependencies map used for shading gRPC within the main
-   * Apache Beam project.
-   */
-  static Object dependenciesClosure() {
-    return {
-      dependencies().each { compile it }
-      runtimeDependencies().each { shadow it }
-    }
-  }
-
-  /**
-   * Returns a closure with the code relocation configuration for shading gRPC within the main
-   * Apache Beam project.
-   */
-  static Object shadowClosure() {
-    return {
-      relocations().each { srcNamespace, destNamespace ->
-        relocate srcNamespace, destNamespace
-      }
-      exclusions().each { exclude it }
-    }
-  }
-}
diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_26_0.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_26_0.groovy
index 8c70aa2..de87bdf 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_26_0.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/GrpcVendoring_1_26_0.groovy
@@ -24,40 +24,62 @@
  * Utilities for working with our vendored version of gRPC.
  */
 class GrpcVendoring_1_26_0 {
+
+  static def guava_version = "26.0-jre"
+  static def protobuf_version = "3.11.0"
+  static def grpc_version = "1.26.0"
+  static def gson_version = "2.8.6"
+  static def netty_version = "4.1.42.Final"
+  static def google_auth_version = "0.18.0"
+  static def proto_google_common_protos_version = "1.12.0"
+  static def opencensus_version = "0.24.0"
+  static def perfmark_version = "0.19.0"
+  static def lzma_java_version = "1.3"
+  static def protobuf_javanano_version = "3.0.0-alpha-5"
+  static def jzlib_version = "1.1.3"
+  static def compress_lzf_version = "1.0.3"
+  static def lz4_version = "1.3.0"
+  static def bouncycastle_version = "1.54"
+  static def conscrypt_version = "1.3.0"
+  static def alpn_api_version = "1.1.2.v20150522"
+  static def npn_api_version = "1.1.1.v20141010"
+  static def jboss_marshalling_version = "1.4.11.Final"
+  static def jboss_modules_version = "1.1.0.Beta1"
+
   /** Returns the list of compile time dependencies. */
   static List<String> dependencies() {
     return [
-      'com.google.guava:guava:26.0-jre',
-      'com.google.protobuf:protobuf-java:3.11.0',
-      'com.google.protobuf:protobuf-java-util:3.11.0',
-      'com.google.code.gson:gson:2.8.6',
-      'io.grpc:grpc-auth:1.26.0',
-      'io.grpc:grpc-core:1.26.0',
-      'io.grpc:grpc-context:1.26.0',
-      'io.grpc:grpc-netty:1.26.0',
-      'io.grpc:grpc-protobuf:1.26.0',
-      'io.grpc:grpc-stub:1.26.0',
-      'io.netty:netty-transport-native-epoll:4.1.42.Final',
+      "com.google.guava:guava:$guava_version",
+      "com.google.protobuf:protobuf-java:$protobuf_version",
+      "com.google.protobuf:protobuf-java-util:$protobuf_version",
+      "com.google.code.gson:gson:$gson_version",
+      "io.grpc:grpc-auth:$grpc_version",
+      "io.grpc:grpc-core:$grpc_version",
+      "io.grpc:grpc-context:$grpc_version",
+      "io.grpc:grpc-netty:$grpc_version",
+      "io.grpc:grpc-protobuf:$grpc_version",
+      "io.grpc:grpc-stub:$grpc_version",
+      "io.netty:netty-transport-native-epoll:$netty_version",
       // tcnative version from https://github.com/grpc/grpc-java/blob/master/SECURITY.md#netty
-      'io.netty:netty-tcnative-boringssl-static:2.0.26.Final',
-      'com.google.auth:google-auth-library-credentials:0.18.0',
-      'io.grpc:grpc-testing:1.26.0',
-      'com.google.api.grpc:proto-google-common-protos:1.12.0',
-      'io.opencensus:opencensus-api:0.24.0',
-      'io.opencensus:opencensus-contrib-grpc-metrics:0.24.0',
-      'io.perfmark:perfmark-api:0.19.0',
-      'com.github.jponge:lzma-java:1.3',
-      'com.google.protobuf.nano:protobuf-javanano:3.0.0-alpha-5',
-      'com.jcraft:jzlib:1.1.3',
-      'com.ning:compress-lzf:1.0.3',
-      'net.jpountz.lz4:lz4:1.3.0',
-      'org.bouncycastle:bcpkix-jdk15on:1.54',
-      'org.bouncycastle:bcprov-jdk15on:1.54',
-      'org.conscrypt:conscrypt-openjdk-uber:1.3.0',
-      'org.eclipse.jetty.alpn:alpn-api:1.1.2.v20150522',
-      'org.eclipse.jetty.npn:npn-api:1.1.1.v20141010',
-      'org.jboss.marshalling:jboss-marshalling:1.4.11.Final',
-      'org.jboss.modules:jboss-modules:1.1.0.Beta1'
+      "io.netty:netty-tcnative-boringssl-static:2.0.26.Final",
+      "com.google.auth:google-auth-library-credentials:$google_auth_version",
+      "io.grpc:grpc-testing:$grpc_version",
+      "com.google.api.grpc:proto-google-common-protos:$proto_google_common_protos_version",
+      "io.opencensus:opencensus-api:$opencensus_version",
+      "io.opencensus:opencensus-contrib-grpc-metrics:$opencensus_version",
+      "io.perfmark:perfmark-api:$perfmark_version",
+      "com.github.jponge:lzma-java:$lzma_java_version",
+      "com.google.protobuf.nano:protobuf-javanano:$protobuf_javanano_version",
+      "com.jcraft:jzlib:$jzlib_version",
+      "com.ning:compress-lzf:$compress_lzf_version",
+      "net.jpountz.lz4:lz4:$lz4_version",
+      "org.bouncycastle:bcpkix-jdk15on:$bouncycastle_version",
+      "org.bouncycastle:bcprov-jdk15on:$bouncycastle_version",
+      "org.conscrypt:conscrypt-openjdk-uber:$conscrypt_version",
+      "org.eclipse.jetty.alpn:alpn-api:$alpn_api_version",
+      "org.eclipse.jetty.npn:npn-api:$npn_api_version",
+      "org.jboss.marshalling:jboss-marshalling:$jboss_marshalling_version",
+      "org.jboss.modules:jboss-modules:$jboss_modules_version"
     ]
   }
 
diff --git a/examples/notebooks/get-started/try-apache-beam-java.ipynb b/examples/notebooks/get-started/try-apache-beam-java.ipynb
index 40d648a..101df82 100644
--- a/examples/notebooks/get-started/try-apache-beam-java.ipynb
+++ b/examples/notebooks/get-started/try-apache-beam-java.ipynb
@@ -593,8 +593,8 @@
             "\n", 
             "> Task :runShadow\n", 
             "WARNING: An illegal reflective access operation has occurred\n", 
-            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil (file:/content/build/install/content-shadow/lib/WordCount.jar) to field java.nio.Buffer.address\n", 
-            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil\n", 
+            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil (file:/content/build/install/content-shadow/lib/WordCount.jar) to field java.nio.Buffer.address\n",
+            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil\n",
             "WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations\n", 
             "WARNING: All illegal access operations will be denied in a future release\n", 
             "Mar 04, 2019 11:00:24 PM org.apache.beam.sdk.io.FileBasedSource getEstimatedSizeBytes\n", 
@@ -735,8 +735,8 @@
             "\n", 
             ">> java -jar WordCount.jar\n", 
             "WARNING: An illegal reflective access operation has occurred\n", 
-            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil (file:/content/WordCount.jar) to field java.nio.Buffer.address\n", 
-            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil\n", 
+            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil (file:/content/WordCount.jar) to field java.nio.Buffer.address\n", 
+            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil\n", 
             "WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations\n", 
             "WARNING: All illegal access operations will be denied in a future release\n", 
             "Mar 04, 2019 11:00:49 PM org.apache.beam.sdk.io.FileBasedSource getEstimatedSizeBytes\n", 
@@ -981,8 +981,8 @@
             "\n", 
             "> Task :runShadow\n", 
             "WARNING: An illegal reflective access operation has occurred\n", 
-            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil (file:/content/build/install/content-shadow/lib/WordCount.jar) to field java.nio.Buffer.address\n", 
-            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.UnsafeUtil\n", 
+            "WARNING: Illegal reflective access by org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil (file:/content/build/install/content-shadow/lib/WordCount.jar) to field java.nio.Buffer.address\n", 
+            "WARNING: Please consider reporting this to the maintainers of org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.UnsafeUtil\n", 
             "WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations\n", 
             "WARNING: All illegal access operations will be denied in a future release\n", 
             "Mar 04, 2019 11:01:26 PM org.apache.beam.sdk.io.FileBasedSource getEstimatedSizeBytes\n", 
@@ -1096,4 +1096,4 @@
       ]
     }
   ]
-}
\ No newline at end of file
+}
diff --git a/gradle.properties b/gradle.properties
index d758ae4..2b9eb79 100644
--- a/gradle.properties
+++ b/gradle.properties
@@ -23,7 +23,7 @@
 signing.gnupg.executable=gpg
 signing.gnupg.useLegacyGpg=true
 
-version=2.19.0-SNAPSHOT
-python_sdk_version=2.19.0.dev
+version=2.20.0-SNAPSHOT
+sdk_version=2.20.0.dev
 
 javaVersion=1.8
diff --git a/release/src/main/scripts/set_version.sh b/release/src/main/scripts/set_version.sh
index 5844b73..b52dfc9 100755
--- a/release/src/main/scripts/set_version.sh
+++ b/release/src/main/scripts/set_version.sh
@@ -67,7 +67,7 @@
   sed -i -e "s/version=.*/version=$TARGET_VERSION/" gradle.properties
   sed -i -e "s/project.version = .*/project.version = '$TARGET_VERSION'/" buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
   sed -i -e "s/^__version__ = .*/__version__ = '${TARGET_VERSION}'/" sdks/python/apache_beam/version.py
-  sed -i -e "s/python_sdk_version=.*/python_sdk_version=$TARGET_VERSION/" gradle.properties
+  sed -i -e "s/sdk_version=.*/sdk_version=$TARGET_VERSION/" gradle.properties
   # TODO: [BEAM-4767]
   sed -i -e "s/'dataflow.container_version' : .*/'dataflow.container_version' : 'beam-${RELEASE}'/" runners/google-cloud-dataflow-java/build.gradle
 else
@@ -79,7 +79,7 @@
   sed -i -e "s/version=.*/version=$TARGET_VERSION-SNAPSHOT/" gradle.properties
   sed -i -e "s/project.version = .*/project.version = '$TARGET_VERSION'/" buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
   sed -i -e "s/^__version__ = .*/__version__ = '${TARGET_VERSION}.dev'/" sdks/python/apache_beam/version.py
-  sed -i -e "s/python_sdk_version=.*/python_sdk_version=$TARGET_VERSION.dev/" gradle.properties
+  sed -i -e "s/sdk_version=.*/sdk_version=$TARGET_VERSION.dev/" gradle.properties
   sed -i -e "s/'dataflow.container_version' : .*/'dataflow.container_version' : 'beam-master-.*'/" runners/google-cloud-dataflow-java/build.gradle
 fi
 
diff --git a/runners/core-construction-java/build.gradle b/runners/core-construction-java/build.gradle
index b9c842f..3de798a 100644
--- a/runners/core-construction-java/build.gradle
+++ b/runners/core-construction-java/build.gradle
@@ -36,7 +36,7 @@
   compile project(path: ":model:pipeline", configuration: "shadow")
   compile project(path: ":model:job-management", configuration: "shadow")
   compile project(path: ":sdks:java:core", configuration: "shadow")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile library.java.vendored_guava_26_0_jre
   compile library.java.classgraph
   compile library.java.jackson_core
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ArtifactServiceStager.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ArtifactServiceStager.java
index 4212916..29e47b8 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ArtifactServiceStager.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ArtifactServiceStager.java
@@ -50,9 +50,9 @@
 import org.apache.beam.model.jobmanagement.v1.ArtifactStagingServiceGrpc.ArtifactStagingServiceStub;
 import org.apache.beam.sdk.util.MoreFutures;
 import org.apache.beam.sdk.util.ThrowingSupplier;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Channel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Channel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hasher;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/BeamUrns.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/BeamUrns.java
index f1f30dc..e4fc6d7 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/BeamUrns.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/BeamUrns.java
@@ -18,7 +18,7 @@
 package org.apache.beam.runners.core.construction;
 
 import org.apache.beam.model.pipeline.v1.RunnerApi;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ProtocolMessageEnum;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ProtocolMessageEnum;
 
 /** Returns the standard URN of a given enum annotated with [(standard_urn)]. */
 public class BeamUrns {
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslation.java
index 8e1021d..86f0178 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslation.java
@@ -28,7 +28,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.util.SerializableUtils;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableBiMap;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslators.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslators.java
index 6f156dd..81b7922 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslators.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CoderTranslators.java
@@ -33,7 +33,7 @@
 import org.apache.beam.sdk.util.InstanceBuilder;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 
 /** {@link CoderTranslator} implementations for known coder types. */
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CombineTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CombineTranslation.java
index e5edc35..5ea9ef7 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CombineTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CombineTranslation.java
@@ -39,7 +39,7 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CreatePCollectionViewTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CreatePCollectionViewTranslation.java
index b89c5b6..5027bb4 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CreatePCollectionViewTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/CreatePCollectionViewTranslation.java
@@ -34,7 +34,7 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 
 /**
  * Utility methods for translating a {@link View} transforms to and from {@link RunnerApi}
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DefaultExpansionServiceClientFactory.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DefaultExpansionServiceClientFactory.java
index 1586be8..a25007e 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DefaultExpansionServiceClientFactory.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DefaultExpansionServiceClientFactory.java
@@ -23,7 +23,7 @@
 import org.apache.beam.model.expansion.v1.ExpansionApi;
 import org.apache.beam.model.expansion.v1.ExpansionServiceGrpc;
 import org.apache.beam.model.pipeline.v1.Endpoints;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
 
 /** Default factory for ExpansionServiceClient used by External transform. */
 public class DefaultExpansionServiceClientFactory implements ExpansionServiceClientFactory {
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DisplayDataTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DisplayDataTranslation.java
index c7cd235..57836b9 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DisplayDataTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/DisplayDataTranslation.java
@@ -19,8 +19,8 @@
 
 import org.apache.beam.model.pipeline.v1.RunnerApi;
 import org.apache.beam.sdk.transforms.display.DisplayData;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Any;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.BoolValue;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Any;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.BoolValue;
 
 /** Utilities for going to/from DisplayData protos. */
 public class DisplayDataTranslation {
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/Environments.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/Environments.java
index ed94642..73bf534 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/Environments.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/Environments.java
@@ -32,7 +32,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.StandardEnvironments;
 import org.apache.beam.sdk.util.ReleaseInfo;
 import org.apache.beam.sdk.util.common.ReflectHelpers;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 
@@ -55,7 +55,7 @@
    * container.
    */
   private static final String JAVA_SDK_HARNESS_CONTAINER_URL =
-      "apachebeam/java_sdk:" + ReleaseInfo.getReleaseInfo().getVersion();
+      "apachebeam/java_sdk:" + ReleaseInfo.getReleaseInfo().getSdkVersion();
   public static final Environment JAVA_SDK_HARNESS_ENVIRONMENT =
       createDockerEnvironment(JAVA_SDK_HARNESS_CONTAINER_URL);
 
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
index d58346b..45665fd 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/External.java
@@ -38,8 +38,8 @@
 import org.apache.beam.sdk.values.POutput;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannelBuilder;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ModelCoders.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ModelCoders.java
index 3d6d4dd..929d7a8 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ModelCoders.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ModelCoders.java
@@ -25,7 +25,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.Coder;
 import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
 import org.apache.beam.model.pipeline.v1.RunnerApi.StandardCoders;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
 
 /** Utilities and constants ot interact with coders that are part of the Beam Model. */
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PCollectionViewTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PCollectionViewTranslation.java
index d9ddb93..402e7d6 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PCollectionViewTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PCollectionViewTranslation.java
@@ -30,7 +30,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 
 /** Utilities for interacting with PCollection view protos. */
 public class PCollectionViewTranslation {
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
index 8a191e5..cc3ada9 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ParDoTranslation.java
@@ -79,8 +79,8 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TupleTagList;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslation.java
index baf7c36..56e5d06 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslation.java
@@ -27,9 +27,9 @@
 import java.util.Map;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.util.common.ReflectHelpers;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.JsonFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.JsonFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.CaseFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ReadTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ReadTranslation.java
index 81bafab..94288e5 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ReadTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/ReadTranslation.java
@@ -38,8 +38,8 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
 /**
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/TestStreamTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/TestStreamTranslation.java
index 1b747c1..abbc328 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/TestStreamTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/TestStreamTranslation.java
@@ -36,7 +36,7 @@
 import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TimestampedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowIntoTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowIntoTranslation.java
index 0d72861..d7fafcd 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowIntoTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowIntoTranslation.java
@@ -33,7 +33,7 @@
 import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.transforms.windowing.Window.Assign;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 
 /**
  * Utility methods for translating a {@link Window.Assign} to and from {@link RunnerApi}
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowingStrategyTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowingStrategyTranslation.java
index 63f662f..bbb31a7 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowingStrategyTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WindowingStrategyTranslation.java
@@ -41,10 +41,10 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.values.WindowingStrategy;
 import org.apache.beam.sdk.values.WindowingStrategy.AccumulationMode;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.Durations;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.Timestamps;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Durations;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Timestamps;
 import org.joda.time.Duration;
 
 /** Utilities for working with {@link WindowingStrategy WindowingStrategies}. */
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WriteFilesTranslation.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WriteFilesTranslation.java
index 0df16a2..986a585 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WriteFilesTranslation.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/WriteFilesTranslation.java
@@ -45,7 +45,7 @@
 import org.apache.beam.sdk.values.POutput;
 import org.apache.beam.sdk.values.PValue;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionServer.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionServer.java
index 12b52f4..5859d69 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionServer.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionServer.java
@@ -20,8 +20,8 @@
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.util.concurrent.TimeUnit;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.NettyServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyServerBuilder;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 
 /** A {@link Server gRPC Server} for an ExpansionService. */
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionService.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionService.java
index aab393b..45559bb 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionService.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/expansion/ExpansionService.java
@@ -54,9 +54,9 @@
 import org.apache.beam.sdk.values.PInput;
 import org.apache.beam.sdk.values.POutput;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.CaseFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Converter;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/GreedyPCollectionFusers.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/GreedyPCollectionFusers.java
index cecbee9..3d7d414 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/GreedyPCollectionFusers.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/GreedyPCollectionFusers.java
@@ -31,7 +31,7 @@
 import org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode;
 import org.apache.beam.runners.core.construction.graph.PipelineNode.PTransformNode;
 import org.apache.beam.sdk.transforms.Flatten;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
 import org.slf4j.Logger;
diff --git a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/QueryablePipeline.java b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/QueryablePipeline.java
index 4ed19da..099294d 100644
--- a/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/QueryablePipeline.java
+++ b/runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/QueryablePipeline.java
@@ -62,7 +62,7 @@
 import org.apache.beam.runners.core.construction.PTransformTranslation;
 import org.apache.beam.runners.core.construction.graph.PipelineNode.PCollectionNode;
 import org.apache.beam.runners.core.construction.graph.PipelineNode.PTransformNode;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ArtifactServiceStagerTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ArtifactServiceStagerTest.java
index 99c14e5..4cc2b68 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ArtifactServiceStagerTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ArtifactServiceStagerTest.java
@@ -33,9 +33,9 @@
 import java.util.Set;
 import org.apache.beam.model.jobmanagement.v1.ArtifactApi.ArtifactMetadata;
 import org.apache.beam.runners.core.construction.ArtifactServiceStager.StagedFile;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/CommonCoderTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/CommonCoderTest.java
index 56fae2f..4ec1a7d 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/CommonCoderTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/CommonCoderTest.java
@@ -71,7 +71,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.Row;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Splitter;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ExternalTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ExternalTest.java
index b399472..1cedfc5 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ExternalTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/ExternalTest.java
@@ -38,11 +38,11 @@
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TupleTagList;
 import org.apache.beam.sdk.values.TypeDescriptors;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ConnectivityState;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ConnectivityState;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerBuilder;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/InMemoryArtifactStagerService.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/InMemoryArtifactStagerService.java
index cb850bd..34431b6 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/InMemoryArtifactStagerService.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/InMemoryArtifactStagerService.java
@@ -35,7 +35,7 @@
 import org.apache.beam.model.jobmanagement.v1.ArtifactApi.PutArtifactRequest.ContentCase;
 import org.apache.beam.model.jobmanagement.v1.ArtifactApi.PutArtifactResponse;
 import org.apache.beam.model.jobmanagement.v1.ArtifactStagingServiceGrpc.ArtifactStagingServiceImplBase;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing;
 
 /**
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslationTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslationTest.java
index a482d02..14d8c1c 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslationTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/PipelineOptionsTranslationTest.java
@@ -30,9 +30,9 @@
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.util.common.ReflectHelpers;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.NullValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Value;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.NullValue;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Value;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/WindowIntoTranslationTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/WindowIntoTranslationTest.java
index 2db4d70..4b3f7aa 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/WindowIntoTranslationTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/WindowIntoTranslationTest.java
@@ -38,7 +38,7 @@
 import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.transforms.windowing.Window.Assign;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/expansion/ExpansionServiceTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/expansion/ExpansionServiceTest.java
index 6024c10..b78f1da 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/expansion/ExpansionServiceTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/expansion/ExpansionServiceTest.java
@@ -46,7 +46,7 @@
 import org.apache.beam.sdk.transforms.Count;
 import org.apache.beam.sdk.transforms.Impulse;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProtoOverridesTest.java b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProtoOverridesTest.java
index 5215d72..f27f38d 100644
--- a/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProtoOverridesTest.java
+++ b/runners/core-construction-java/src/test/java/org/apache/beam/runners/core/construction/graph/ProtoOverridesTest.java
@@ -36,7 +36,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
 import org.apache.beam.model.pipeline.v1.RunnerApi.WindowingStrategy;
 import org.apache.beam.runners.core.construction.graph.ProtoOverrides.TransformReplacement;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/direct-java/build.gradle b/runners/direct-java/build.gradle
index b8836a8..6d46528 100644
--- a/runners/direct-java/build.gradle
+++ b/runners/direct-java/build.gradle
@@ -65,7 +65,7 @@
     compile project(it)
   }
   shadow project(path: ":sdks:java:core", configuration: "shadow")
-  shadow library.java.vendored_grpc_1_21_0
+  shadow library.java.vendored_grpc_1_26_0
   shadow library.java.joda_time
   shadow library.java.slf4j_api
   shadow library.java.args4j
diff --git a/runners/flink/flink_runner.gradle b/runners/flink/flink_runner.gradle
index ef6c82f..43e37af 100644
--- a/runners/flink/flink_runner.gradle
+++ b/runners/flink/flink_runner.gradle
@@ -139,7 +139,7 @@
   compile project(":runners:core-construction-java")
   compile project(":runners:java-fn-execution")
   compile project(":sdks:java:extensions:google-cloud-platform-core")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile library.java.jackson_annotations
   compile library.java.slf4j_api
   compile library.java.joda_time
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
index ee40fb6..84a2e05 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
@@ -75,7 +75,7 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobInvoker.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobInvoker.java
index 40257b9..67f664e 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobInvoker.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkJobInvoker.java
@@ -30,7 +30,7 @@
 import org.apache.beam.runners.fnexecution.jobsubmission.PortablePipelineRunner;
 import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
 import org.slf4j.Logger;
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineRunner.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineRunner.java
index 137aa00..f46bf7b 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineRunner.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkPipelineRunner.java
@@ -45,7 +45,7 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
 import org.apache.beam.sdk.options.PortablePipelineOptions.RetrievalServiceType;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.flink.api.common.JobExecutionResult;
 import org.apache.flink.client.program.DetachedEnvironment;
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
index 92b07a4..cbc437b 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
@@ -90,7 +90,7 @@
 import org.apache.beam.sdk.values.TypeDescriptors;
 import org.apache.beam.sdk.values.ValueWithRecordId;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.HashMultiset;
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
index 45fc2a1..23a2a4b 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperator.java
@@ -87,7 +87,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.sdk.v2.sdk.extensions.protobuf.ByteStringCoder;
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtils.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtils.java
index ccd10d4..3e27bb1 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtils.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtils.java
@@ -32,7 +32,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.StructuredCoder;
 import org.apache.beam.sdk.util.CoderUtils;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 
 /**
  * Utility functions for dealing with key encoding. Beam requires keys to be compared in binary
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkJobInvokerTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkJobInvokerTest.java
index 28c2cde..6397d4a 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkJobInvokerTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/FlinkJobInvokerTest.java
@@ -23,7 +23,7 @@
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.io.GenerateSequence;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.flink.client.program.OptimizerPlanEnvironment;
 import org.apache.flink.client.program.PackagedProgram;
 import org.apache.flink.client.program.ProgramInvocationException;
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunctionTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunctionTest.java
index 61d8906..fb79841 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunctionTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/functions/FlinkExecutableStageFunctionTest.java
@@ -45,7 +45,7 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.join.RawUnionValue;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.flink.api.common.cache.DistributedCache;
 import org.apache.flink.api.common.functions.RuntimeContext;
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java
index e3b46e6..3c5f44b 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/ExecutableStageDoFnOperatorTest.java
@@ -96,8 +96,8 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtilsTest.java b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtilsTest.java
index 274b2bf..817b5e8 100644
--- a/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtilsTest.java
+++ b/runners/flink/src/test/java/org/apache/beam/runners/flink/translation/wrappers/streaming/FlinkKeyUtilsTest.java
@@ -26,7 +26,7 @@
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VoidCoder;
 import org.apache.beam.sdk.util.CoderUtils;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 import org.junit.Test;
 
diff --git a/runners/google-cloud-dataflow-java/build.gradle b/runners/google-cloud-dataflow-java/build.gradle
index d8ddffa..f1a1394 100644
--- a/runners/google-cloud-dataflow-java/build.gradle
+++ b/runners/google-cloud-dataflow-java/build.gradle
@@ -81,7 +81,7 @@
   compile library.java.jackson_databind
   compile library.java.joda_time
   compile library.java.slf4j_api
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   testCompile library.java.hamcrest_core
   testCompile library.java.junit
   testCompile project(path: ":sdks:java:io:google-cloud-platform", configuration: "testRuntime")
diff --git a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
index fa45b7f..0d100ad 100644
--- a/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
+++ b/runners/google-cloud-dataflow-java/src/main/java/org/apache/beam/runners/dataflow/DataflowPipelineTranslator.java
@@ -108,8 +108,8 @@
 import org.apache.beam.sdk.values.TimestampedValue;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/build.gradle b/runners/google-cloud-dataflow-java/worker/build.gradle
index 6b866d7..a0e3356 100644
--- a/runners/google-cloud-dataflow-java/worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/build.gradle
@@ -76,7 +76,7 @@
   compile project(":runners:java-fn-execution")
   compile project(":sdks:java:fn-execution")
   compile project(path: ":runners:google-cloud-dataflow-java:worker:windmill", configuration: "shadow")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile google_api_services_dataflow
   compile library.java.avro
   compile library.java.google_api_client
diff --git a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
index 5ee2d63..d2912e4 100644
--- a/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
+++ b/runners/google-cloud-dataflow-java/worker/legacy-worker/build.gradle
@@ -53,7 +53,7 @@
         library.java.jackson_databind,
         library.java.joda_time,
         library.java.slf4j_api,
-        library.java.vendored_grpc_1_21_0,
+        library.java.vendored_grpc_1_26_0,
 ]
 
 def sdk_provided_shaded_project_dependencies = [
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ByteStringCoder.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ByteStringCoder.java
index 3d9a2c4..eab920f 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ByteStringCoder.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ByteStringCoder.java
@@ -23,7 +23,7 @@
 import org.apache.beam.sdk.coders.AtomicCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.util.VarInt;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
 
 /**
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowRunnerHarness.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowRunnerHarness.java
index eda6b03..e636ba8 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowRunnerHarness.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowRunnerHarness.java
@@ -38,7 +38,7 @@
 import org.apache.beam.runners.fnexecution.control.FnApiControlClient;
 import org.apache.beam.runners.fnexecution.state.GrpcStateService;
 import org.apache.beam.sdk.io.FileSystems;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelper.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelper.java
index 5449462..36454db 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelper.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelper.java
@@ -31,7 +31,7 @@
 import org.apache.beam.runners.dataflow.worker.ExperimentContext.Experiment;
 import org.apache.beam.runners.dataflow.worker.logging.DataflowWorkerLoggingInitializer;
 import org.apache.beam.runners.dataflow.worker.logging.DataflowWorkerLoggingMDC;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.conscrypt.OpenSSLProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowParDoFnFactory.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowParDoFnFactory.java
index b12f889..bef1466 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowParDoFnFactory.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/GroupAlsoByWindowParDoFnFactory.java
@@ -58,7 +58,7 @@
 import org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricTrackingWindmillServerStub.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricTrackingWindmillServerStub.java
index 734f49e..f08bc80 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricTrackingWindmillServerStub.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/MetricTrackingWindmillServerStub.java
@@ -30,7 +30,7 @@
 import org.apache.beam.runners.dataflow.worker.windmill.Windmill.KeyedGetDataRequest;
 import org.apache.beam.runners.dataflow.worker.windmill.WindmillServerStub;
 import org.apache.beam.runners.dataflow.worker.windmill.WindmillServerStub.GetDataStream;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.SettableFuture;
 import org.joda.time.Duration;
 
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubSink.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubSink.java
index 147fd76..4cc73a1 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubSink.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/PubsubSink.java
@@ -36,7 +36,7 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
 /**
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ReaderCache.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ReaderCache.java
index 6b00560..8b4439d 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ReaderCache.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/ReaderCache.java
@@ -22,7 +22,7 @@
 import javax.annotation.concurrent.ThreadSafe;
 import org.apache.beam.sdk.io.UnboundedSource;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.Cache;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheBuilder;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StateFetcher.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StateFetcher.java
index 3c804db..1dbbd6c 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StateFetcher.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StateFetcher.java
@@ -38,7 +38,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.Cache;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
index d6d017a..81faed0 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorker.java
@@ -128,8 +128,8 @@
 import org.apache.beam.sdk.util.Sleeper;
 import org.apache.beam.sdk.util.UserCodeException;
 import org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java
index 76aa8b0..a761ae4 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContext.java
@@ -54,7 +54,7 @@
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcher.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcher.java
index 2c00c99..eb95afb 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcher.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcher.java
@@ -48,8 +48,8 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Parser;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Parser;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillNamespacePrefix.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillNamespacePrefix.java
index eba5c5d..2dd7006 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillNamespacePrefix.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillNamespacePrefix.java
@@ -17,7 +17,7 @@
  */
 package org.apache.beam.runners.dataflow.worker;
 
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 
 /**
  * A prefix for a Windmill state or timer tag to separate user state and timers from system state
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillSink.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillSink.java
index 60ddce5..cd35038 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillSink.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillSink.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.ValueWithRecordId;
 import org.apache.beam.sdk.values.ValueWithRecordId.ValueWithRecordIdCoder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
 class WindmillSink<T> extends Sink<WindowedValue<T>> {
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateCache.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateCache.java
index b419a38..eb18ef8 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateCache.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateCache.java
@@ -32,7 +32,7 @@
 import org.apache.beam.runners.dataflow.worker.status.StatusDataProvider;
 import org.apache.beam.sdk.state.State;
 import org.apache.beam.sdk.util.Weighted;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Equivalence;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.Cache;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheBuilder;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternals.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternals.java
index 9c9779e..a3619ab 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternals.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternals.java
@@ -55,7 +55,7 @@
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
 import org.apache.beam.sdk.util.CombineFnUtil;
 import org.apache.beam.sdk.util.Weighted;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateReader.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateReader.java
index 0050602..75ee1cb 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateReader.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillStateReader.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.Weighted;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Function;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Objects;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillTimerInternals.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillTimerInternals.java
index fb33ed3..f444c92 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillTimerInternals.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WindmillTimerInternals.java
@@ -29,7 +29,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.state.TimeDomain;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.HashBasedTable;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Table;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSources.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSources.java
index 4964a89..344798e 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSources.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSources.java
@@ -61,7 +61,7 @@
 import org.apache.beam.sdk.util.FluentBackoff;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.ValueWithRecordId;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlService.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlService.java
index d701083..865d033 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlService.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlService.java
@@ -26,7 +26,7 @@
 import org.apache.beam.runners.dataflow.worker.fn.grpc.BeamFnService;
 import org.apache.beam.runners.fnexecution.HeaderAccessor;
 import org.apache.beam.runners.fnexecution.control.FnApiControlClient;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperation.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperation.java
index bf42c4d..2879c18 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperation.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperation.java
@@ -73,8 +73,8 @@
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.MoreFutures;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcService.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcService.java
index dcde104..cad6a8e 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcService.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcService.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.fn.data.LogicalEndpoint;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingService.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingService.java
index d1b62d1..2045317 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingService.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingService.java
@@ -28,8 +28,8 @@
 import org.apache.beam.runners.dataflow.worker.fn.grpc.BeamFnService;
 import org.apache.beam.runners.dataflow.worker.logging.DataflowWorkerLoggingMDC;
 import org.apache.beam.runners.fnexecution.HeaderAccessor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactory.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactory.java
index 1fabd6d..e51c0ee 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactory.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactory.java
@@ -27,9 +27,9 @@
 import org.apache.beam.sdk.fn.stream.ForwardingClientResponseObserver;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ServerCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ServerCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link StreamObserver} factory that wraps provided {@link CallStreamObserver}s making them flow
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
index fbd10c0..32c1df9 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/CreateExecutableStageNodeFunction.java
@@ -78,8 +78,8 @@
 import org.apache.beam.sdk.util.WindowedValue.WindowedValueCoder;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodes.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodes.java
index 0d96981..83ed105 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodes.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodes.java
@@ -37,7 +37,7 @@
 import org.apache.beam.sdk.transforms.ParDo;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/RegisterNodeFunction.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/RegisterNodeFunction.java
index 7bd66e9..e1c3614 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/RegisterNodeFunction.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/graph/RegisterNodeFunction.java
@@ -78,8 +78,8 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java
index a54733d..7565ba2 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/DirectStreamObserver.java
@@ -19,8 +19,8 @@
 
 import java.util.concurrent.Phaser;
 import javax.annotation.concurrent.ThreadSafe;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link StreamObserver} which uses synchronization on the underlying {@link CallStreamObserver}
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/ForwardingClientResponseObserver.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/ForwardingClientResponseObserver.java
index 74d8e4d..d7eba1f 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/ForwardingClientResponseObserver.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/ForwardingClientResponseObserver.java
@@ -17,9 +17,9 @@
  */
 package org.apache.beam.runners.dataflow.worker.windmill;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientResponseObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientResponseObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link ClientResponseObserver} which delegates all {@link StreamObserver} calls.
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java
index c64803d..632ab07 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServer.java
@@ -84,17 +84,17 @@
 import org.apache.beam.sdk.util.BackOffUtils;
 import org.apache.beam.sdk.util.FluentBackoff;
 import org.apache.beam.sdk.util.Sleeper;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.CallCredentials;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Channel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.auth.MoreCallCredentials;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.GrpcSslContexts;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.NegotiationType;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.NettyChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.CallCredentials;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Channel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.auth.MoreCallCredentials;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.GrpcSslContexts;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NegotiationType;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Splitter;
@@ -232,11 +232,11 @@
    */
   private static class VendoredRequestMetadataCallbackAdapter
       implements com.google.auth.RequestMetadataCallback {
-    private final org.apache.beam.vendor.grpc.v1p21p0.com.google.auth.RequestMetadataCallback
+    private final org.apache.beam.vendor.grpc.v1p26p0.com.google.auth.RequestMetadataCallback
         callback;
 
     private VendoredRequestMetadataCallbackAdapter(
-        org.apache.beam.vendor.grpc.v1p21p0.com.google.auth.RequestMetadataCallback callback) {
+        org.apache.beam.vendor.grpc.v1p26p0.com.google.auth.RequestMetadataCallback callback) {
       this.callback = callback;
     }
 
@@ -260,7 +260,7 @@
    * delegate to reduce maintenance burden.
    */
   private static class VendoredCredentialsAdapter
-      extends org.apache.beam.vendor.grpc.v1p21p0.com.google.auth.Credentials {
+      extends org.apache.beam.vendor.grpc.v1p26p0.com.google.auth.Credentials {
     private final com.google.auth.Credentials credentials;
 
     private VendoredCredentialsAdapter(com.google.auth.Credentials credentials) {
@@ -281,7 +281,7 @@
     public void getRequestMetadata(
         final URI uri,
         Executor executor,
-        final org.apache.beam.vendor.grpc.v1p21p0.com.google.auth.RequestMetadataCallback
+        final org.apache.beam.vendor.grpc.v1p26p0.com.google.auth.RequestMetadataCallback
             callback) {
       credentials.getRequestMetadata(
           uri, executor, new VendoredRequestMetadataCallbackAdapter(callback));
diff --git a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java
index 6731951..0216766 100644
--- a/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java
+++ b/runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/windmill/StreamObserverFactory.java
@@ -20,8 +20,8 @@
 import java.util.function.Function;
 import org.apache.beam.sdk.fn.stream.AdvancingPhaser;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * Uses {@link PipelineOptions} to configure which underlying {@link StreamObserver} implementation
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/harness/test/TestStreams.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/harness/test/TestStreams.java
index 07ccdb1..e88054c 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/harness/test/TestStreams.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/harness/test/TestStreams.java
@@ -19,8 +19,8 @@
 
 import java.util.function.Consumer;
 import java.util.function.Supplier;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /** Utility methods which enable testing of {@link StreamObserver}s. */
 public class TestStreams {
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowMatchers.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowMatchers.java
index 469add3..4b69f07 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowMatchers.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowMatchers.java
@@ -18,7 +18,7 @@
 package org.apache.beam.runners.dataflow.worker;
 
 import java.io.Serializable;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.hamcrest.Description;
 import org.hamcrest.TypeSafeMatcher;
 
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelperTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelperTest.java
index 66be62d..453dbea 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelperTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/DataflowWorkerHarnessHelperTest.java
@@ -32,7 +32,7 @@
 import org.apache.beam.runners.dataflow.worker.testing.RestoreDataflowLoggingMDC;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.testing.RestoreSystemProperties;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubReaderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubReaderTest.java
index fddfdc5..9cae865 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubReaderTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubReaderTest.java
@@ -31,7 +31,7 @@
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Instant;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubSinkTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubSinkTest.java
index 9f45286..a01356e 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubSinkTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/PubsubSinkTest.java
@@ -29,7 +29,7 @@
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Instant;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderCacheTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderCacheTest.java
index 51f20a4..1820ef1 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderCacheTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/ReaderCacheTest.java
@@ -26,7 +26,7 @@
 import java.io.IOException;
 import java.util.concurrent.TimeUnit;
 import org.apache.beam.sdk.io.UnboundedSource;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Stopwatch;
 import org.joda.time.Duration;
 import org.junit.Before;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StateFetcherTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StateFetcherTest.java
index 5d90a85..6dc4f7b 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StateFetcherTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StateFetcherTest.java
@@ -43,7 +43,7 @@
 import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.Cache;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheBuilder;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
index c954bdb..7ce23a4 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingDataflowWorkerTest.java
@@ -137,9 +137,9 @@
 import org.apache.beam.sdk.values.ValueWithRecordId;
 import org.apache.beam.sdk.values.WindowingStrategy;
 import org.apache.beam.sdk.values.WindowingStrategy.AccumulationMode;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString.Output;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString.Output;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowFnsTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowFnsTest.java
index 0acb1dc..bf9e875 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowFnsTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowFnsTest.java
@@ -75,7 +75,7 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
 import org.junit.Before;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowsReshuffleDoFnTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowsReshuffleDoFnTest.java
index 11322a9..81a2bf5 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowsReshuffleDoFnTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingGroupAlsoByWindowsReshuffleDoFnTest.java
@@ -48,7 +48,7 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.hamcrest.Matchers;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java
index 03d4376..b0577e4 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingModeExecutionContextTest.java
@@ -65,7 +65,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
 import org.hamcrest.Matchers;
 import org.joda.time.Instant;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputDoFnRunnerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputDoFnRunnerTest.java
index 24d17ff..8d87a10 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputDoFnRunnerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputDoFnRunnerTest.java
@@ -63,7 +63,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.hamcrest.Matchers;
 import org.joda.time.Duration;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcherTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcherTest.java
index d70d7f6..c1a9945 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcherTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/StreamingSideInputFetcherTest.java
@@ -49,7 +49,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets;
 import org.hamcrest.Matchers;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillKeyedWorkItemTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillKeyedWorkItemTest.java
index 4441f35..de62567 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillKeyedWorkItemTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillKeyedWorkItemTest.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.transforms.windowing.PaneInfo.Timing;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.hamcrest.Matchers;
 import org.joda.time.Instant;
 import org.junit.Before;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillReaderIteratorBaseTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillReaderIteratorBaseTest.java
index bff116f..0b89c99 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillReaderIteratorBaseTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillReaderIteratorBaseTest.java
@@ -26,7 +26,7 @@
 import java.util.List;
 import org.apache.beam.runners.dataflow.worker.windmill.Windmill;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateCacheTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateCacheTest.java
index 64f425a..23fe22f 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateCacheTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateCacheTest.java
@@ -28,7 +28,7 @@
 import org.apache.beam.sdk.state.State;
 import org.apache.beam.sdk.state.StateSpec;
 import org.apache.beam.sdk.transforms.windowing.IntervalWindow;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Instant;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternalsTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternalsTest.java
index f708500..e693672 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternalsTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateInternalsTest.java
@@ -50,7 +50,7 @@
 import org.apache.beam.sdk.transforms.Sum;
 import org.apache.beam.sdk.transforms.windowing.TimestampCombiner;
 import org.apache.beam.sdk.util.CoderUtils;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Futures;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateReaderTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateReaderTest.java
index ef529df..f2628ff 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateReaderTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WindmillStateReaderTest.java
@@ -29,8 +29,8 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.VarIntCoder;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString.Output;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString.Output;
 import org.hamcrest.Matchers;
 import org.joda.time.Instant;
 import org.junit.Before;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java
index 181183d..15ff67f 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/WorkerCustomSourcesTest.java
@@ -102,7 +102,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.ValueWithRecordId;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlServiceTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlServiceTest.java
index 0cac04e..f986783 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlServiceTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/BeamFnControlServiceTest.java
@@ -35,9 +35,9 @@
 import org.apache.beam.runners.fnexecution.control.FnApiControlClient;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.net.HostAndPort;
 import org.junit.Before;
@@ -88,7 +88,7 @@
     Server server = ServerFactory.createDefault().create(ImmutableList.of(service), descriptor);
     String url = service.getApiServiceDescriptor().getUrl();
     BeamFnControlGrpc.BeamFnControlStub clientStub =
-        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext(true).build());
+        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext().build());
 
     // Connect from the client.
     clientStub.control(requestObserver);
@@ -134,9 +134,9 @@
 
     String url = service.getApiServiceDescriptor().getUrl();
     BeamFnControlGrpc.BeamFnControlStub clientStub =
-        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext(true).build());
+        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext().build());
     BeamFnControlGrpc.BeamFnControlStub anotherClientStub =
-        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext(true).build());
+        BeamFnControlGrpc.newStub(ManagedChannelBuilder.forTarget(url).usePlaintext().build());
 
     // Connect from the client.
     clientStub.control(requestObserver);
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java
index eb3d21d..a89dac6 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/RegisterAndProcessBundleOperationTest.java
@@ -80,7 +80,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.ValueInSingleWindow.Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableTable;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/TimerReceiverTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/TimerReceiverTest.java
index 2067a3a..68a54d5 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/TimerReceiverTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/control/TimerReceiverTest.java
@@ -70,7 +70,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcServiceTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcServiceTest.java
index 9c2b57a..ad514a8 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcServiceTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/data/BeamFnDataGrpcServiceTest.java
@@ -51,22 +51,22 @@
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.BindableService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.CallOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Channel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientCall;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientInterceptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ForwardingClientCall.SimpleForwardingClientCall;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata.Key;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.MethodDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerInterceptors;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.BindableService;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.CallOptions;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Channel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientCall;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientInterceptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCall.SimpleForwardingClientCall;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata.Key;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.MethodDescriptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerInterceptors;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingServiceTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingServiceTest.java
index 55b81e0..114ded8 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingServiceTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/logging/BeamFnLoggingServiceTest.java
@@ -38,12 +38,12 @@
 import org.apache.beam.runners.dataflow.worker.fn.stream.ServerStreamObserverFactory;
 import org.apache.beam.runners.fnexecution.GrpcContextHeaderAccessorProvider;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.BindableService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.BindableService;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.net.HostAndPort;
 import org.junit.After;
 import org.junit.Test;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactoryTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactoryTest.java
index 43d6975..e41fd69 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactoryTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/fn/stream/ServerStreamObserverFactoryTest.java
@@ -24,8 +24,8 @@
 import org.apache.beam.sdk.fn.stream.BufferingStreamObserver;
 import org.apache.beam.sdk.fn.stream.DirectStreamObserver;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodesTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodesTest.java
index d1115c9..0e36473 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodesTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/graph/InsertFetchAndFilterStreamingSideInputNodesTest.java
@@ -54,7 +54,7 @@
 import org.apache.beam.sdk.transforms.View;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Equivalence;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Equivalence.Wrapper;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java
index 568fcff..84adfcd 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/logging/DataflowWorkerLoggingHandlerTest.java
@@ -35,7 +35,7 @@
 import org.apache.beam.runners.dataflow.worker.NameContextsForTests;
 import org.apache.beam.runners.dataflow.worker.TestOperationContext.TestDataflowExecutionState;
 import org.apache.beam.runners.dataflow.worker.testing.RestoreDataflowLoggingMDC;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Timestamp;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Timestamp;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Supplier;
 import org.junit.After;
 import org.junit.Before;
diff --git a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServerTest.java b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServerTest.java
index 9adce9f..b889138 100644
--- a/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServerTest.java
+++ b/runners/google-cloud-dataflow-java/worker/src/test/java/org/apache/beam/runners/dataflow/worker/windmill/GrpcWindmillServerTest.java
@@ -59,13 +59,13 @@
 import org.apache.beam.runners.dataflow.worker.windmill.WindmillServerStub.CommitWorkStream;
 import org.apache.beam.runners.dataflow.worker.windmill.WindmillServerStub.GetDataStream;
 import org.apache.beam.runners.dataflow.worker.windmill.WindmillServerStub.GetWorkStream;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.util.MutableHandlerRegistry;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.util.MutableHandlerRegistry;
 import org.hamcrest.Matchers;
 import org.joda.time.Instant;
 import org.junit.After;
diff --git a/runners/java-fn-execution/build.gradle b/runners/java-fn-execution/build.gradle
index f032d8f..434ac02 100644
--- a/runners/java-fn-execution/build.gradle
+++ b/runners/java-fn-execution/build.gradle
@@ -30,7 +30,7 @@
   compile project(":sdks:java:fn-execution")
   compile project(":runners:core-construction-java")
   compile project(path: ":vendor:sdks-java-extensions-protobuf", configuration: "shadow")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile library.java.slf4j_api
   compile library.java.args4j
   testCompile project(":sdks:java:harness")
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/FnService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/FnService.java
index 3055b0b..634657a 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/FnService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/FnService.java
@@ -17,7 +17,7 @@
  */
 package org.apache.beam.runners.fnexecution;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.BindableService;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.BindableService;
 
 /** An interface sharing common behavior with services used during execution of user Fns. */
 public interface FnService extends AutoCloseable, BindableService {
@@ -26,8 +26,8 @@
    *
    * <p>There should be no more calls to any service method by the time a call to {@link #close()}
    * begins. Specifically, this means that a {@link
-   * org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server} that this service is bound to should have
-   * completed a call to the {@link org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server#shutdown()}
+   * org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server} that this service is bound to should have
+   * completed a call to the {@link org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server#shutdown()}
    * method, and all future incoming calls will be rejected.
    */
   @Override
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProvider.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProvider.java
index 5d758a2..4c7899c 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProvider.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProvider.java
@@ -17,14 +17,14 @@
  */
 package org.apache.beam.runners.fnexecution;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Context;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Contexts;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata.Key;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerCall;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerCall.Listener;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerCallHandler;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerInterceptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Context;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Contexts;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata.Key;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerCall;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerCall.Listener;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerCallHandler;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerInterceptor;
 
 /**
  * A HeaderAccessorProvider which intercept the header in a GRPC request and expose the relevant
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcFnServer.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcFnServer.java
index f7a4a4b..fbb12af 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcFnServer.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/GrpcFnServer.java
@@ -20,7 +20,7 @@
 import java.io.IOException;
 import java.util.concurrent.TimeUnit;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 
 /**
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/InProcessServerFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/InProcessServerFactory.java
index a899cb2..e72b0bc 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/InProcessServerFactory.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/InProcessServerFactory.java
@@ -21,10 +21,10 @@
 import java.util.List;
 import java.util.concurrent.atomic.AtomicInteger;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.BindableService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerInterceptors;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.BindableService;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerInterceptors;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
 
 /**
  * A {@link ServerFactory} which creates {@link Server servers} with the {@link
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/ServerFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/ServerFactory.java
index ff0d5b4..30f6b8b 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/ServerFactory.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/ServerFactory.java
@@ -29,16 +29,16 @@
 import java.util.function.Supplier;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.sdk.fn.channel.SocketAddressFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.BindableService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerInterceptors;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.NettyServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollEventLoopGroup;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollServerDomainSocketChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollServerSocketChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.unix.DomainSocketAddress;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.util.internal.ThreadLocalRandom;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.BindableService;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerInterceptors;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollEventLoopGroup;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollServerDomainSocketChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollServerSocketChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.unix.DomainSocketAddress;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.util.internal.ThreadLocalRandom;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.net.HostAndPort;
 
 /** A {@link Server gRPC server} factory. */
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactRetrievalService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactRetrievalService.java
index 72af9e81..a9b04b3 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactRetrievalService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactRetrievalService.java
@@ -29,11 +29,11 @@
 import org.apache.beam.model.jobmanagement.v1.ArtifactApi.ArtifactMetadata;
 import org.apache.beam.model.jobmanagement.v1.ArtifactApi.ProxyManifest;
 import org.apache.beam.model.jobmanagement.v1.ArtifactRetrievalServiceGrpc;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.JsonFormat;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.JsonFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.Cache;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.cache.CacheBuilder;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactStagingService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactStagingService.java
index 0b0fadf..ae951d9 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactStagingService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/AbstractArtifactStagingService.java
@@ -35,11 +35,11 @@
 import org.apache.beam.model.jobmanagement.v1.ArtifactStagingServiceGrpc.ArtifactStagingServiceImplBase;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
 import org.apache.beam.runners.fnexecution.FnService;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.JsonFormat;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.JsonFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hasher;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.hash.Hashing;
 import org.slf4j.Logger;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactStagingService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactStagingService.java
index c9baa17..74bce71 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactStagingService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactStagingService.java
@@ -31,8 +31,8 @@
 import org.apache.beam.sdk.io.fs.ResolveOptions.StandardResolveOptions;
 import org.apache.beam.sdk.io.fs.ResourceId;
 import org.apache.beam.sdk.util.MimeTypes;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClient.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClient.java
index 9051051..e0e5d50 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClient.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClient.java
@@ -29,9 +29,9 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest;
 import org.apache.beam.sdk.fn.stream.SynchronizedStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolService.java
index 598e1db..82409ae 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolService.java
@@ -26,7 +26,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnControlGrpc;
 import org.apache.beam.runners.fnexecution.FnService;
 import org.apache.beam.runners.fnexecution.HeaderAccessor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java
index cd81c0a..9a798ba 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/control/ProcessBundleDescriptors.java
@@ -56,7 +56,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableTable;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java
index 5d58c5c..1c283bf 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/data/GrpcDataService.java
@@ -36,7 +36,7 @@
 import org.apache.beam.sdk.fn.data.LogicalEndpoint;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.SettableFuture;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobService.java
index 2f4df48..80fbd65 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobService.java
@@ -52,11 +52,11 @@
 import org.apache.beam.sdk.fn.stream.SynchronizedStreamObserver;
 import org.apache.beam.sdk.function.ThrowingConsumer;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -344,7 +344,9 @@
           event -> {
             syncResponseObserver.onNext(
                 JobMessagesResponse.newBuilder().setStateResponse(event).build());
-            if (JobInvocation.isTerminated(event.getState())) {
+            // The terminal state is always updated after the last message, that's
+            // why we can end the stream here.
+            if (JobInvocation.isTerminated(invocation.getStateEvent().getState())) {
               responseObserver.onCompleted();
             }
           };
@@ -353,8 +355,11 @@
               syncResponseObserver.onNext(
                   JobMessagesResponse.newBuilder().setMessageResponse(message).build());
 
-      invocation.addStateListener(stateListener);
       invocation.addMessageListener(messageListener);
+      // The order matters here. Make sure to send all the message first because the stream
+      // will be ended by the terminal state request.
+      invocation.addStateListener(stateListener);
+
     } catch (StatusRuntimeException | StatusException e) {
       responseObserver.onError(e);
     } catch (Exception e) {
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocation.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocation.java
index 2da0592..ddc379f 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocation.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocation.java
@@ -36,7 +36,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
 import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
 import org.apache.beam.sdk.PipelineResult;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.Timestamps;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Timestamps;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.FutureCallback;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Futures;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListenableFuture;
@@ -53,12 +53,14 @@
   private final PortablePipelineRunner pipelineRunner;
   private final JobInfo jobInfo;
   private final ListeningExecutorService executorService;
-  private List<Consumer<JobStateEvent>> stateObservers;
-  private List<Consumer<JobMessage>> messageObservers;
+  private final List<JobStateEvent> stateHistory;
+  private final List<JobMessage> messageHistory;
+  private final List<Consumer<JobStateEvent>> stateObservers;
+  private final List<Consumer<JobMessage>> messageObservers;
+
   private JobApi.MetricResults metrics;
   private PortablePipelineResult resultHandle;
   @Nullable private ListenableFuture<PortablePipelineResult> invocationFuture;
-  private List<JobStateEvent> stateHistory;
 
   public JobInvocation(
       JobInfo jobInfo,
@@ -73,6 +75,7 @@
     this.messageObservers = new ArrayList<>();
     this.invocationFuture = null;
     this.stateHistory = new ArrayList<>();
+    this.messageHistory = new ArrayList<>();
     this.metrics = JobApi.MetricResults.newBuilder().build();
     this.setState(JobState.Enum.STOPPED);
   }
@@ -217,6 +220,9 @@
 
   /** Listen for job messages with a {@link Consumer}. */
   public synchronized void addMessageListener(Consumer<JobMessage> messageStreamObserver) {
+    for (JobMessage msg : messageHistory) {
+      messageStreamObserver.accept(msg);
+    }
     messageObservers.add(messageStreamObserver);
   }
 
@@ -243,6 +249,7 @@
   }
 
   private synchronized void sendMessage(JobMessage message) {
+    messageHistory.add(message);
     for (Consumer<JobMessage> observer : messageObservers) {
       observer.accept(message);
     }
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvoker.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvoker.java
index 7612d8b..0f66c38 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvoker.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvoker.java
@@ -22,7 +22,7 @@
 import java.util.concurrent.ThreadFactory;
 import javax.annotation.Nullable;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.MoreExecutors;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ThreadFactoryBuilder;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobPreparation.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobPreparation.java
index a304093..30b0774 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobPreparation.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/JobPreparation.java
@@ -19,7 +19,7 @@
 
 import com.google.auto.value.AutoValue;
 import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 
 /** A job that has been prepared, but not invoked. */
 @AutoValue
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarCreator.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarCreator.java
index c14098a..bb70158 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarCreator.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarCreator.java
@@ -58,9 +58,9 @@
 import org.apache.beam.sdk.metrics.MetricResults;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.MessageOrBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.JsonFormat;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.MessageOrBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.JsonFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarUtils.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarUtils.java
index 291605a..d32f1e1 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarUtils.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/jobsubmission/PortablePipelineJarUtils.java
@@ -27,9 +27,9 @@
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
 import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Message.Builder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.JsonFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Message.Builder;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.JsonFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingService.java
index a37a2f3..aa7117d 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingService.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.LogControl;
 import org.apache.beam.model.fnexecution.v1.BeamFnLoggingGrpc;
 import org.apache.beam.runners.fnexecution.FnService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/JobInfo.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/JobInfo.java
index aea6bb3..f9d9c66 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/JobInfo.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/JobInfo.java
@@ -20,7 +20,7 @@
 import com.google.auto.value.AutoValue;
 import java.io.Serializable;
 import org.apache.beam.model.fnexecution.v1.ProvisionApi;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 
 /**
  * A subset of {@link org.apache.beam.model.fnexecution.v1.ProvisionApi.ProvisionInfo} that
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
index 4fec80c..aeece77 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionService.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.fnexecution.v1.ProvisionServiceGrpc;
 import org.apache.beam.model.fnexecution.v1.ProvisionServiceGrpc.ProvisionServiceImplBase;
 import org.apache.beam.runners.fnexecution.FnService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link ProvisionServiceImplBase provision service} that returns a static response to all calls.
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/splittabledofn/SDFFeederViaStateAndTimers.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/splittabledofn/SDFFeederViaStateAndTimers.java
index 920dae6..d4edbd2 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/splittabledofn/SDFFeederViaStateAndTimers.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/splittabledofn/SDFFeederViaStateAndTimers.java
@@ -43,8 +43,8 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.util.WindowedValue.FullWindowedValueCoder;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.Durations;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Durations;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.joda.time.Instant;
 
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/GrpcStateService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/GrpcStateService.java
index 9c72d81..df10910 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/GrpcStateService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/GrpcStateService.java
@@ -28,8 +28,8 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
 import org.apache.beam.model.fnexecution.v1.BeamFnStateGrpc;
 import org.apache.beam.runners.fnexecution.FnService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ServerCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ServerCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /** An implementation of the Beam Fn State service. */
 public class GrpcStateService extends BeamFnStateGrpc.BeamFnStateImplBase
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/InMemoryBagUserStateFactory.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/InMemoryBagUserStateFactory.java
index f840864..988f63db 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/InMemoryBagUserStateFactory.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/InMemoryBagUserStateFactory.java
@@ -31,7 +31,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.state.BagState;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 
 /**
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/StateRequestHandlers.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/StateRequestHandlers.java
index 26dd6ac..b05776e 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/StateRequestHandlers.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/state/StateRequestHandlers.java
@@ -51,7 +51,7 @@
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.common.Reiterable;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.sdk.v2.sdk.extensions.protobuf.ByteStringCoder;
 
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java
index ce7498b..c3f9bba 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcService.java
@@ -35,7 +35,7 @@
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.runners.fnexecution.FnService;
 import org.apache.beam.runners.fnexecution.HeaderAccessor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClient.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClient.java
index dbd7fa8..eacb3fc 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClient.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClient.java
@@ -29,7 +29,7 @@
 import org.apache.beam.sdk.fn.IdGenerator;
 import org.apache.beam.sdk.fn.IdGenerators;
 import org.apache.beam.sdk.fn.stream.SynchronizedStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/translation/PipelineTranslatorUtils.java b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/translation/PipelineTranslatorUtils.java
index 6e6c80a..f51072b 100644
--- a/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/translation/PipelineTranslatorUtils.java
+++ b/runners/java-fn-execution/src/main/java/org/apache/beam/runners/fnexecution/translation/PipelineTranslatorUtils.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.BiMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableBiMap;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProviderTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProviderTest.java
index 532e904..85763a7 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProviderTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/GrpcContextHeaderAccessorProviderTest.java
@@ -23,16 +23,16 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.Elements;
 import org.apache.beam.model.fnexecution.v1.BeamFnDataGrpc;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.CallOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Channel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientCall;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientInterceptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ForwardingClientCall.SimpleForwardingClientCall;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.MethodDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.CallOptions;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Channel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientCall;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientInterceptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ForwardingClientCall.SimpleForwardingClientCall;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.MethodDescriptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.junit.Assert;
 import org.junit.Test;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/ServerFactoryTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/ServerFactoryTest.java
index 0972d7b..2dbed76 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/ServerFactoryTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/ServerFactoryTest.java
@@ -42,11 +42,11 @@
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
 import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.Epoll;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.Epoll;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.net.HostAndPort;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactServicesTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactServicesTest.java
index 9585530..2479da0 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactServicesTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/BeamFileSystemArtifactServicesTest.java
@@ -58,10 +58,10 @@
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
 import org.apache.beam.sdk.io.FileSystems;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/ClassLoaderArtifactServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/ClassLoaderArtifactServiceTest.java
index 65d54a9..849da70 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/ClassLoaderArtifactServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/artifact/ClassLoaderArtifactServiceTest.java
@@ -42,10 +42,10 @@
 import org.apache.beam.model.jobmanagement.v1.ArtifactStagingServiceGrpc;
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Charsets;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.junit.Assert;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactoryTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactoryTest.java
index b5ac3c6..1d53d35 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactoryTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/DefaultJobBundleFactoryTest.java
@@ -62,8 +62,8 @@
 import org.apache.beam.sdk.options.ExperimentalOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.junit.Assert;
 import org.junit.Before;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolServiceTest.java
index cb65a0e..0af2602 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientPoolServiceTest.java
@@ -35,8 +35,8 @@
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
 import org.apache.beam.sdk.util.MoreFutures;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientTest.java
index 341b53c..63fbe55 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/FnApiControlClientTest.java
@@ -31,7 +31,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionResponse;
 import org.apache.beam.sdk.util.MoreFutures;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
index d9d51d4..6ba0b84 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/control/RemoteExecutionTest.java
@@ -111,7 +111,7 @@
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.PCollectionList;
 import org.apache.beam.sdk.values.PCollectionView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Optional;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Collections2;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/data/GrpcDataServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/data/GrpcDataServiceTest.java
index be08b58..adf843d 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/data/GrpcDataServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/data/GrpcDataServiceTest.java
@@ -47,10 +47,10 @@
 import org.apache.beam.sdk.fn.test.TestStreams;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobServiceTest.java
index e7b01af..a0a2bef 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/InMemoryJobServiceTest.java
@@ -32,9 +32,9 @@
 import org.apache.beam.model.jobmanagement.v1.JobApi;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocationTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocationTest.java
index 30e34d4..f9c256f 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocationTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/jobsubmission/JobInvocationTest.java
@@ -33,7 +33,7 @@
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.metrics.MetricResults;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.MoreExecutors;
 import org.joda.time.Duration;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingServiceTest.java
index 3bfda79..39a5e55 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/logging/GrpcLoggingServiceTest.java
@@ -37,9 +37,9 @@
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
 import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionServiceTest.java
index 850a070..a0fd1b7 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/provisioning/StaticGrpcProvisionServiceTest.java
@@ -31,11 +31,11 @@
 import org.apache.beam.model.fnexecution.v1.ProvisionServiceGrpc.ProvisionServiceBlockingStub;
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ListValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.NullValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Value;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ListValue;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.NullValue;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Value;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/state/GrpcStateServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/state/GrpcStateServiceTest.java
index f8b3f29..aa986c9 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/state/GrpcStateServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/state/GrpcStateServiceTest.java
@@ -31,8 +31,8 @@
 import java.util.concurrent.TimeUnit;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcServiceTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcServiceTest.java
index c9d6544..9b04914 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcServiceTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/BeamWorkerStatusGrpcServiceTest.java
@@ -39,10 +39,10 @@
 import org.apache.beam.runners.fnexecution.GrpcContextHeaderAccessorProvider;
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.InProcessServerFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.testing.GrpcCleanupRule;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.testing.GrpcCleanupRule;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets;
 import org.junit.After;
 import org.junit.Before;
diff --git a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClientTest.java b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClientTest.java
index 0aca49a..5fa143e 100644
--- a/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClientTest.java
+++ b/runners/java-fn-execution/src/test/java/org/apache/beam/runners/fnexecution/status/WorkerStatusClientTest.java
@@ -26,7 +26,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.WorkerStatusRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.WorkerStatusResponse;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
diff --git a/runners/portability/java/build.gradle b/runners/portability/java/build.gradle
index 5425b8f..9c529a6 100644
--- a/runners/portability/java/build.gradle
+++ b/runners/portability/java/build.gradle
@@ -33,7 +33,7 @@
   compile library.java.hamcrest_library
   compile project(":runners:java-fn-execution")
   compile project(path: ":sdks:java:harness", configuration: "shadow")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile library.java.slf4j_api
   testCompile project(path: ":runners:core-construction-java", configuration: "testRuntime")
   testCompile library.java.hamcrest_core
diff --git a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/ExternalWorkerService.java b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/ExternalWorkerService.java
index 028a934..363b013 100644
--- a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/ExternalWorkerService.java
+++ b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/ExternalWorkerService.java
@@ -25,7 +25,7 @@
 import org.apache.beam.runners.fnexecution.GrpcFnServer;
 import org.apache.beam.runners.fnexecution.ServerFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/JobServicePipelineResult.java b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/JobServicePipelineResult.java
index bcfa321..820e93b 100644
--- a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/JobServicePipelineResult.java
+++ b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/JobServicePipelineResult.java
@@ -17,6 +17,7 @@
  */
 package org.apache.beam.runners.portability;
 
+import java.util.Iterator;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
@@ -26,24 +27,27 @@
 import org.apache.beam.model.jobmanagement.v1.JobApi.CancelJobRequest;
 import org.apache.beam.model.jobmanagement.v1.JobApi.CancelJobResponse;
 import org.apache.beam.model.jobmanagement.v1.JobApi.GetJobStateRequest;
+import org.apache.beam.model.jobmanagement.v1.JobApi.JobMessage;
+import org.apache.beam.model.jobmanagement.v1.JobApi.JobMessagesRequest;
+import org.apache.beam.model.jobmanagement.v1.JobApi.JobMessagesResponse;
 import org.apache.beam.model.jobmanagement.v1.JobApi.JobStateEvent;
 import org.apache.beam.model.jobmanagement.v1.JobServiceGrpc.JobServiceBlockingStub;
 import org.apache.beam.sdk.PipelineResult;
 import org.apache.beam.sdk.metrics.MetricResults;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.joda.time.Duration;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 class JobServicePipelineResult implements PipelineResult, AutoCloseable {
 
-  private static final long POLL_INTERVAL_MS = 10 * 1000;
+  private static final long POLL_INTERVAL_MS = 3_000;
 
   private static final Logger LOG = LoggerFactory.getLogger(JobServicePipelineResult.class);
 
   private final ByteString jobId;
   private final CloseableResource<JobServiceBlockingStub> jobService;
-  @Nullable private State terminationState;
+  @Nullable private State terminalState;
   @Nullable private final Runnable cleanup;
   private org.apache.beam.model.jobmanagement.v1.JobApi.MetricResults jobMetrics;
 
@@ -51,14 +55,14 @@
       ByteString jobId, CloseableResource<JobServiceBlockingStub> jobService, Runnable cleanup) {
     this.jobId = jobId;
     this.jobService = jobService;
-    this.terminationState = null;
+    this.terminalState = null;
     this.cleanup = cleanup;
   }
 
   @Override
   public State getState() {
-    if (terminationState != null) {
-      return terminationState;
+    if (terminalState != null) {
+      return terminalState;
     }
     JobServiceBlockingStub stub = jobService.get();
     JobStateEvent response =
@@ -98,26 +102,16 @@
 
   @Override
   public State waitUntilFinish() {
-    if (terminationState != null) {
-      return terminationState;
+    if (terminalState != null) {
+      return terminalState;
     }
-    JobServiceBlockingStub stub = jobService.get();
-    GetJobStateRequest request = GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build();
-    JobStateEvent response = stub.getState(request);
-    State lastState = getJavaState(response.getState());
-    while (!lastState.isTerminal()) {
-      try {
-        Thread.sleep(POLL_INTERVAL_MS);
-      } catch (InterruptedException e) {
-        Thread.currentThread().interrupt();
-        throw new RuntimeException(e);
-      }
-      response = stub.getState(request);
-      lastState = getJavaState(response.getState());
+    try {
+      waitForTerminalState();
+      propagateErrors();
+      return terminalState;
+    } finally {
+      close();
     }
-    close();
-    terminationState = lastState;
-    return lastState;
   }
 
   @Override
@@ -139,6 +133,41 @@
     }
   }
 
+  private void waitForTerminalState() {
+    JobServiceBlockingStub stub = jobService.get();
+    GetJobStateRequest request = GetJobStateRequest.newBuilder().setJobIdBytes(jobId).build();
+    JobStateEvent response = stub.getState(request);
+    State lastState = getJavaState(response.getState());
+    while (!lastState.isTerminal()) {
+      try {
+        Thread.sleep(POLL_INTERVAL_MS);
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        throw new RuntimeException(e);
+      }
+      response = stub.getState(request);
+      lastState = getJavaState(response.getState());
+    }
+    terminalState = lastState;
+  }
+
+  private void propagateErrors() {
+    if (terminalState != State.DONE) {
+      JobMessagesRequest messageStreamRequest =
+          JobMessagesRequest.newBuilder().setJobIdBytes(jobId).build();
+      Iterator<JobMessagesResponse> messageStreamIterator =
+          jobService.get().getMessageStream(messageStreamRequest);
+      while (messageStreamIterator.hasNext()) {
+        JobMessage messageResponse = messageStreamIterator.next().getMessageResponse();
+        if (messageResponse.getImportance() == JobMessage.MessageImportance.JOB_MESSAGE_ERROR) {
+          throw new RuntimeException(
+              "The Runner experienced the following error during execution:\n"
+                  + messageResponse.getMessageText());
+        }
+      }
+    }
+  }
+
   private static State getJavaState(JobApi.JobState.Enum protoState) {
     switch (protoState) {
       case UNSPECIFIED:
diff --git a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java
index 72c3cbc..31c7be0 100644
--- a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java
+++ b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/PortableRunner.java
@@ -53,8 +53,8 @@
 import org.apache.beam.sdk.options.PipelineOptionsValidator;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
 import org.apache.beam.sdk.util.ZipFiles;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Sets;
diff --git a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/testing/TestJobService.java b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/testing/TestJobService.java
index 76b8974..53578b7 100644
--- a/runners/portability/java/src/main/java/org/apache/beam/runners/portability/testing/TestJobService.java
+++ b/runners/portability/java/src/main/java/org/apache/beam/runners/portability/testing/TestJobService.java
@@ -27,7 +27,7 @@
 import org.apache.beam.model.jobmanagement.v1.JobApi.RunJobResponse;
 import org.apache.beam.model.jobmanagement.v1.JobServiceGrpc.JobServiceImplBase;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A JobService for tests.
diff --git a/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java b/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
index 40f7900..9f011f3 100644
--- a/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
+++ b/runners/portability/java/src/test/java/org/apache/beam/runners/portability/PortableRunnerTest.java
@@ -38,9 +38,9 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Timestamp;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Timestamp;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaJobServerDriver.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaJobServerDriver.java
index f21d666..5788ad5 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaJobServerDriver.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/SamzaJobServerDriver.java
@@ -32,7 +32,7 @@
 import org.apache.beam.runners.fnexecution.jobsubmission.JobInvoker;
 import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/WindowAssignTranslator.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/WindowAssignTranslator.java
index 95c7328..114a256 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/WindowAssignTranslator.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/translation/WindowAssignTranslator.java
@@ -28,7 +28,7 @@
 import org.apache.beam.sdk.transforms.windowing.Window;
 import org.apache.beam.sdk.transforms.windowing.WindowFn;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.samza.operators.MessageStream;
 
 /**
diff --git a/runners/samza/src/main/java/org/apache/beam/runners/samza/util/SamzaPipelineTranslatorUtils.java b/runners/samza/src/main/java/org/apache/beam/runners/samza/util/SamzaPipelineTranslatorUtils.java
index 758515a4..b1bfc83 100644
--- a/runners/samza/src/main/java/org/apache/beam/runners/samza/util/SamzaPipelineTranslatorUtils.java
+++ b/runners/samza/src/main/java/org/apache/beam/runners/samza/util/SamzaPipelineTranslatorUtils.java
@@ -28,7 +28,7 @@
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 
 /** Utilities for pipeline translation. */
diff --git a/runners/spark/build.gradle b/runners/spark/build.gradle
index 1d70ad1..495b1d9 100644
--- a/runners/spark/build.gradle
+++ b/runners/spark/build.gradle
@@ -76,7 +76,6 @@
   provided "com.esotericsoftware.kryo:kryo:2.21"
   runtimeOnly library.java.jackson_module_scala
   runtimeOnly "org.scala-lang:scala-library:2.11.8"
-  compile "org.scala-lang.modules:scala-java8-compat_2.11:0.9.0"
   testCompile project(":sdks:java:io:kafka")
   testCompile project(path: ":sdks:java:core", configuration: "shadowTest")
   // SparkStateInternalsTest extends abstract StateInternalsTest
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkJobInvoker.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkJobInvoker.java
index 2ea261f..956e9bc 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkJobInvoker.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkJobInvoker.java
@@ -27,7 +27,7 @@
 import org.apache.beam.runners.fnexecution.jobsubmission.PortablePipelineRunner;
 import org.apache.beam.runners.fnexecution.provisioning.JobInfo;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ListeningExecutorService;
 import org.slf4j.Logger;
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java
index d0c1c0f..e5166a3 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineRunner.java
@@ -49,7 +49,7 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.options.PortablePipelineOptions;
 import org.apache.beam.sdk.options.PortablePipelineOptions.RetrievalServiceType;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Struct;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Struct;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.kohsuke.args4j.CmdLineException;
diff --git a/sdks/go/README.md b/sdks/go/README.md
index ddbbe9c..f2d56d4 100644
--- a/sdks/go/README.md
+++ b/sdks/go/README.md
@@ -108,8 +108,7 @@
 
 
 See [BUILD.md](./BUILD.md) for how to build Go code in general. See
-[CONTAINERS.md](../CONTAINERS.md) for how to build and push the Go
-SDK harness container image.
+[container documentation](https://beam.apache.org/documentation/runtime/environments/#building-container-images) for how to build and push the Go SDK harness container image.
 
 ## Issues
 
diff --git a/sdks/java/container/build.gradle b/sdks/java/container/build.gradle
index ca5b3cf..5c182c9 100644
--- a/sdks/java/container/build.gradle
+++ b/sdks/java/container/build.gradle
@@ -74,7 +74,7 @@
           root: project.rootProject.hasProperty(["docker-repository-root"]) ?
                   project.rootProject["docker-repository-root"] : "apachebeam",
           tag: project.rootProject.hasProperty(["docker-tag"]) ?
-                  project.rootProject["docker-tag"] : project.version)
+                  project.rootProject["docker-tag"] : project.sdk_version)
   dockerfile project.file("./${dockerfileName}")
   files "./build/"
 }
diff --git a/sdks/java/core/build.gradle b/sdks/java/core/build.gradle
index a14305d..ea0f5c9 100644
--- a/sdks/java/core/build.gradle
+++ b/sdks/java/core/build.gradle
@@ -46,6 +46,7 @@
 processResources {
   filter org.apache.tools.ant.filters.ReplaceTokens, tokens: [
     'pom.version': version,
+    'pom.sdk_version': sdk_version,
     'timestamp': new Date().format("yyyy-MM-dd HH:mm")
   ]
 }
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
index 6784712..380e324 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/schemas/SchemaTranslation.java
@@ -31,7 +31,7 @@
 import org.apache.beam.sdk.schemas.Schema.TypeName;
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.values.Row;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
 
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
index 27e8517..3b2b24e 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/DoFn.java
@@ -618,7 +618,10 @@
   @Target(ElementType.PARAMETER)
   public @interface Element {}
 
-  /** Parameter annotation for the input element timestamp for a {@link ProcessElement} method. */
+  /**
+   * Parameter annotation for the input element timestamp for {@link ProcessElement}, {@link
+   * GetInitialRestriction}, {@link SplitRestriction}, and {@link NewTracker} methods.
+   */
   @Documented
   @Retention(RetentionPolicy.RUNTIME)
   @Target(ElementType.PARAMETER)
@@ -723,7 +726,23 @@
    * Annotation for the method that maps an element to an initial restriction for a <a
    * href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
    *
-   * <p>Signature: {@code RestrictionT getInitialRestriction(InputT element);}
+   * <p>Signature: {@code RestrictionT getInitialRestriction(InputT element, <optional arguments>);}
+   *
+   * <p>The optional arguments are allowed to be:
+   *
+   * <ul>
+   *   <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
+   *       passed the timestamp of the current element being processed; the argument must be of type
+   *       {@link Instant}.
+   *   <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
+   *       window of the current element. When applied by {@link ParDo} the subtype of {@link
+   *       BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
+   *       window is not accessed a runner may perform additional optimizations.
+   *   <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
+   *       about the current triggering pane.
+   *   <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
+   *       options for the current pipeline.
+   * </ul>
    */
   // TODO: Make the InputT parameter optional.
   @Documented
@@ -788,7 +807,23 @@
    * be processed in parallel.
    *
    * <p>Signature: {@code void splitRestriction(InputT element, RestrictionT restriction,
-   * OutputReceiver<RestrictionT> receiver);}
+   * OutputReceiver<RestrictionT> receiver, <optional arguments>);}
+   *
+   * <p>The optional arguments are allowed to be:
+   *
+   * <ul>
+   *   <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
+   *       passed the timestamp of the current element being processed; the argument must be of type
+   *       {@link Instant}.
+   *   <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
+   *       window of the current element. When applied by {@link ParDo} the subtype of {@link
+   *       BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
+   *       window is not accessed a runner may perform additional optimizations.
+   *   <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
+   *       about the current triggering pane.
+   *   <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
+   *       options for the current pipeline.
+   * </ul>
    *
    * <p>Optional: if this method is omitted, the restriction will not be split (equivalent to
    * defining the method and outputting the {@code restriction} unchanged).
@@ -804,8 +839,25 @@
    * Annotation for the method that creates a new {@link RestrictionTracker} for the restriction of
    * a <a href="https://s.apache.org/splittable-do-fn">splittable</a> {@link DoFn}.
    *
-   * <p>Signature: {@code MyRestrictionTracker newTracker(RestrictionT restriction);} where {@code
-   * MyRestrictionTracker} must be a subtype of {@code RestrictionTracker<RestrictionT>}.
+   * <p>Signature: {@code MyRestrictionTracker newTracker(RestrictionT restriction, <optional
+   * arguments>);} where {@code MyRestrictionTracker} must be a subtype of {@code
+   * RestrictionTracker<RestrictionT>}.
+   *
+   * <p>The optional arguments are allowed to be:
+   *
+   * <ul>
+   *   <li>If one of its arguments is tagged with the {@link Timestamp} annotation, then it will be
+   *       passed the timestamp of the current element being processed; the argument must be of type
+   *       {@link Instant}.
+   *   <li>If one of its arguments is a subtype of {@link BoundedWindow}, then it will be passed the
+   *       window of the current element. When applied by {@link ParDo} the subtype of {@link
+   *       BoundedWindow} must match the type of windows on the input {@link PCollection}. If the
+   *       window is not accessed a runner may perform additional optimizations.
+   *   <li>If one of its arguments is of type {@link PaneInfo}, then it will be passed information
+   *       about the current triggering pane.
+   *   <li>If one of the parameters is of type {@link PipelineOptions}, then it will be passed the
+   *       options for the current pipeline.
+   * </ul>
    */
   @Documented
   @Retention(RetentionPolicy.RUNTIME)
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
index 5737ac9..2606359 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignature.java
@@ -945,7 +945,7 @@
 
   /** Describes a {@link DoFn.GetInitialRestriction} method. */
   @AutoValue
-  public abstract static class GetInitialRestrictionMethod implements DoFnMethod {
+  public abstract static class GetInitialRestrictionMethod implements MethodWithExtraParameters {
     /** The annotated method itself. */
     @Override
     public abstract Method targetMethod();
@@ -953,14 +953,28 @@
     /** Type of the returned restriction. */
     public abstract TypeDescriptor<?> restrictionT();
 
-    static GetInitialRestrictionMethod create(Method targetMethod, TypeDescriptor<?> restrictionT) {
-      return new AutoValue_DoFnSignature_GetInitialRestrictionMethod(targetMethod, restrictionT);
+    /** The window type used by this method, if any. */
+    @Nullable
+    @Override
+    public abstract TypeDescriptor<? extends BoundedWindow> windowT();
+
+    /** Types of optional parameters of the annotated method, in the order they appear. */
+    @Override
+    public abstract List<Parameter> extraParameters();
+
+    static GetInitialRestrictionMethod create(
+        Method targetMethod,
+        TypeDescriptor<?> restrictionT,
+        TypeDescriptor<? extends BoundedWindow> windowT,
+        List<Parameter> extraParameters) {
+      return new AutoValue_DoFnSignature_GetInitialRestrictionMethod(
+          targetMethod, restrictionT, windowT, extraParameters);
     }
   }
 
   /** Describes a {@link DoFn.SplitRestriction} method. */
   @AutoValue
-  public abstract static class SplitRestrictionMethod implements DoFnMethod {
+  public abstract static class SplitRestrictionMethod implements MethodWithExtraParameters {
     /** The annotated method itself. */
     @Override
     public abstract Method targetMethod();
@@ -968,14 +982,28 @@
     /** Type of the restriction taken and returned. */
     public abstract TypeDescriptor<?> restrictionT();
 
-    static SplitRestrictionMethod create(Method targetMethod, TypeDescriptor<?> restrictionT) {
-      return new AutoValue_DoFnSignature_SplitRestrictionMethod(targetMethod, restrictionT);
+    /** The window type used by this method, if any. */
+    @Nullable
+    @Override
+    public abstract TypeDescriptor<? extends BoundedWindow> windowT();
+
+    /** Types of optional parameters of the annotated method, in the order they appear. */
+    @Override
+    public abstract List<Parameter> extraParameters();
+
+    static SplitRestrictionMethod create(
+        Method targetMethod,
+        TypeDescriptor<?> restrictionT,
+        TypeDescriptor<? extends BoundedWindow> windowT,
+        List<Parameter> extraParameters) {
+      return new AutoValue_DoFnSignature_SplitRestrictionMethod(
+          targetMethod, restrictionT, windowT, extraParameters);
     }
   }
 
   /** Describes a {@link DoFn.NewTracker} method. */
   @AutoValue
-  public abstract static class NewTrackerMethod implements DoFnMethod {
+  public abstract static class NewTrackerMethod implements MethodWithExtraParameters {
     /** The annotated method itself. */
     @Override
     public abstract Method targetMethod();
@@ -986,9 +1014,23 @@
     /** Type of the returned {@link RestrictionTracker}. */
     public abstract TypeDescriptor<?> trackerT();
 
+    /** The window type used by this method, if any. */
+    @Nullable
+    @Override
+    public abstract TypeDescriptor<? extends BoundedWindow> windowT();
+
+    /** Types of optional parameters of the annotated method, in the order they appear. */
+    @Override
+    public abstract List<Parameter> extraParameters();
+
     static NewTrackerMethod create(
-        Method targetMethod, TypeDescriptor<?> restrictionT, TypeDescriptor<?> trackerT) {
-      return new AutoValue_DoFnSignature_NewTrackerMethod(targetMethod, restrictionT, trackerT);
+        Method targetMethod,
+        TypeDescriptor<?> restrictionT,
+        TypeDescriptor<?> trackerT,
+        TypeDescriptor<? extends BoundedWindow> windowT,
+        List<Parameter> extraParameters) {
+      return new AutoValue_DoFnSignature_NewTrackerMethod(
+          targetMethod, restrictionT, trackerT, windowT, extraParameters);
     }
   }
 
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java
index b3fde4f..04bf2a5 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/reflect/DoFnSignatures.java
@@ -103,6 +103,8 @@
   private static final ImmutableList<Class<? extends Parameter>>
       ALLOWED_SPLITTABLE_PROCESS_ELEMENT_PARAMETERS =
           ImmutableList.of(
+              Parameter.WindowParameter.class,
+              Parameter.PaneInfoParameter.class,
               Parameter.PipelineOptionsParameter.class,
               Parameter.ElementParameter.class,
               Parameter.TimestampParameter.class,
@@ -133,6 +135,28 @@
               Parameter.TaggedOutputReceiverParameter.class,
               Parameter.StateParameter.class);
 
+  private static final Collection<Class<? extends Parameter>>
+      ALLOWED_GET_INITIAL_RESTRICTION_PARAMETERS =
+          ImmutableList.of(
+              Parameter.WindowParameter.class,
+              Parameter.TimestampParameter.class,
+              Parameter.PaneInfoParameter.class,
+              Parameter.PipelineOptionsParameter.class);
+
+  private static final Collection<Class<? extends Parameter>> ALLOWED_SPLIT_RESTRICTION_PARAMETERS =
+      ImmutableList.of(
+          Parameter.WindowParameter.class,
+          Parameter.TimestampParameter.class,
+          Parameter.PaneInfoParameter.class,
+          Parameter.PipelineOptionsParameter.class);
+
+  private static final Collection<Class<? extends Parameter>> ALLOWED_NEW_TRACKER_PARAMETERS =
+      ImmutableList.of(
+          Parameter.WindowParameter.class,
+          Parameter.TimestampParameter.class,
+          Parameter.PaneInfoParameter.class,
+          Parameter.PipelineOptionsParameter.class);
+
   /** @return the {@link DoFnSignature} for the given {@link DoFn} instance. */
   public static <FnT extends DoFn<?, ?>> DoFnSignature signatureForDoFn(FnT fn) {
     return getSignature(fn.getClass());
@@ -438,7 +462,12 @@
           errors.forMethod(DoFn.GetInitialRestriction.class, getInitialRestrictionMethod);
       signatureBuilder.setGetInitialRestriction(
           analyzeGetInitialRestrictionMethod(
-              getInitialRestrictionErrors, fnT, getInitialRestrictionMethod, inputT));
+              getInitialRestrictionErrors,
+              fnT,
+              getInitialRestrictionMethod,
+              inputT,
+              outputT,
+              fnContext));
     }
 
     if (splitRestrictionMethod != null) {
@@ -446,7 +475,7 @@
           errors.forMethod(DoFn.SplitRestriction.class, splitRestrictionMethod);
       signatureBuilder.setSplitRestriction(
           analyzeSplitRestrictionMethod(
-              splitRestrictionErrors, fnT, splitRestrictionMethod, inputT));
+              splitRestrictionErrors, fnT, splitRestrictionMethod, inputT, outputT, fnContext));
     }
 
     if (getRestrictionCoderMethod != null) {
@@ -460,7 +489,8 @@
     if (newTrackerMethod != null) {
       ErrorReporter newTrackerErrors = errors.forMethod(DoFn.NewTracker.class, newTrackerMethod);
       signatureBuilder.setNewTracker(
-          analyzeNewTrackerMethod(newTrackerErrors, fnT, newTrackerMethod));
+          analyzeNewTrackerMethod(
+              newTrackerErrors, fnT, newTrackerMethod, inputT, outputT, fnContext));
     }
 
     signatureBuilder.setIsBoundedPerElement(inferBoundedness(fnT, processElement, errors));
@@ -812,6 +842,7 @@
 
     TypeDescriptor<?> trackerT = getTrackerType(fnClass, m);
     TypeDescriptor<? extends BoundedWindow> windowT = getWindowType(fnClass, m);
+
     for (int i = 0; i < params.length; ++i) {
       Parameter extraParam =
           analyzeExtraParameter(
@@ -1032,13 +1063,7 @@
 
       return Parameter.stateParameter(stateDecl);
     } else {
-      List<String> allowedParamTypes =
-          Arrays.asList(
-              formatType(new TypeDescriptor<BoundedWindow>() {}),
-              formatType(new TypeDescriptor<RestrictionTracker<?, ?>>() {}));
-      paramErrors.throwIllegalArgument(
-          "%s is not a valid context parameter. Should be one of %s",
-          formatType(paramT), allowedParamTypes);
+      paramErrors.throwIllegalArgument("%s is not a valid context parameter.", formatType(paramT));
       // Unreachable
       return null;
     }
@@ -1158,19 +1183,44 @@
   @VisibleForTesting
   static DoFnSignature.GetInitialRestrictionMethod analyzeGetInitialRestrictionMethod(
       ErrorReporter errors,
-      TypeDescriptor<? extends DoFn> fnT,
+      TypeDescriptor<? extends DoFn<?, ?>> fnT,
       Method m,
-      TypeDescriptor<?> inputT) {
+      TypeDescriptor<?> inputT,
+      TypeDescriptor<?> outputT,
+      FnAnalysisContext fnContext) {
     // Method is of the form:
     // @GetInitialRestriction
-    // RestrictionT getInitialRestriction(InputT element);
+    // RestrictionT getInitialRestriction(InputT element, ... additional optional parameters ...);
+
     Type[] params = m.getGenericParameterTypes();
     errors.checkArgument(
-        params.length == 1 && fnT.resolveType(params[0]).equals(inputT),
-        "Must take a single argument of type %s",
+        params.length >= 1 && fnT.resolveType(params[0]).equals(inputT),
+        "First argument must be of type %s",
         formatType(inputT));
+
+    MethodAnalysisContext methodContext = MethodAnalysisContext.create();
+    TypeDescriptor<? extends BoundedWindow> windowT = getWindowType(fnT, m);
+    for (int i = 1; i < params.length; ++i) {
+      Parameter extraParam =
+          analyzeExtraParameter(
+              errors,
+              fnContext,
+              methodContext,
+              fnT,
+              ParameterDescription.of(
+                  m, i, fnT.resolveType(params[i]), Arrays.asList(m.getParameterAnnotations()[i])),
+              inputT,
+              outputT);
+
+      methodContext.addParameter(extraParam);
+    }
+
+    for (Parameter parameter : methodContext.getExtraParameters()) {
+      checkParameterOneOf(errors, parameter, ALLOWED_GET_INITIAL_RESTRICTION_PARAMETERS);
+    }
+
     return DoFnSignature.GetInitialRestrictionMethod.create(
-        m, fnT.resolveType(m.getGenericReturnType()));
+        m, fnT.resolveType(m.getGenericReturnType()), windowT, methodContext.extraParameters);
   }
 
   /**
@@ -1186,16 +1236,19 @@
   @VisibleForTesting
   static DoFnSignature.SplitRestrictionMethod analyzeSplitRestrictionMethod(
       ErrorReporter errors,
-      TypeDescriptor<? extends DoFn> fnT,
+      TypeDescriptor<? extends DoFn<?, ?>> fnT,
       Method m,
-      TypeDescriptor<?> inputT) {
+      TypeDescriptor<?> inputT,
+      TypeDescriptor<?> outputT,
+      FnAnalysisContext fnContext) {
     // Method is of the form:
     // @SplitRestriction
-    // void splitRestriction(InputT element, RestrictionT restriction);
+    // void splitRestriction(InputT element, RestrictionT restriction, ... additional optional
+    // parameters ...);
     errors.checkArgument(void.class.equals(m.getReturnType()), "Must return void");
 
     Type[] params = m.getGenericParameterTypes();
-    errors.checkArgument(params.length == 3, "Must have exactly 3 arguments");
+    errors.checkArgument(params.length >= 3, "Must have at least 3 arguments");
     errors.checkArgument(
         fnT.resolveType(params[0]).equals(inputT),
         "First argument must be the element type %s",
@@ -1210,7 +1263,29 @@
         formatType(expectedReceiverT),
         formatType(receiverT));
 
-    return DoFnSignature.SplitRestrictionMethod.create(m, restrictionT);
+    MethodAnalysisContext methodContext = MethodAnalysisContext.create();
+    TypeDescriptor<? extends BoundedWindow> windowT = getWindowType(fnT, m);
+    for (int i = 3; i < params.length; ++i) {
+      Parameter extraParam =
+          analyzeExtraParameter(
+              errors,
+              fnContext,
+              methodContext,
+              fnT,
+              ParameterDescription.of(
+                  m, i, fnT.resolveType(params[i]), Arrays.asList(m.getParameterAnnotations()[i])),
+              inputT,
+              outputT);
+
+      methodContext.addParameter(extraParam);
+    }
+
+    for (Parameter parameter : methodContext.getExtraParameters()) {
+      checkParameterOneOf(errors, parameter, ALLOWED_SPLIT_RESTRICTION_PARAMETERS);
+    }
+
+    return DoFnSignature.SplitRestrictionMethod.create(
+        m, restrictionT, windowT, methodContext.getExtraParameters());
   }
 
   private static ImmutableMap<String, TimerDeclaration> analyzeTimerDeclarations(
@@ -1286,12 +1361,17 @@
 
   @VisibleForTesting
   static DoFnSignature.NewTrackerMethod analyzeNewTrackerMethod(
-      ErrorReporter errors, TypeDescriptor<? extends DoFn> fnT, Method m) {
+      ErrorReporter errors,
+      TypeDescriptor<? extends DoFn<?, ?>> fnT,
+      Method m,
+      TypeDescriptor<?> inputT,
+      TypeDescriptor<?> outputT,
+      FnAnalysisContext fnContext) {
     // Method is of the form:
     // @NewTracker
-    // TrackerT newTracker(RestrictionT restriction);
+    // TrackerT newTracker(RestrictionT restriction, ... additional optional parameters ...);
     Type[] params = m.getGenericParameterTypes();
-    errors.checkArgument(params.length == 1, "Must have a single argument");
+    errors.checkArgument(params.length >= 1, "Must have at least one argument");
 
     TypeDescriptor<?> restrictionT = fnT.resolveType(params[0]);
     TypeDescriptor<?> trackerT = fnT.resolveType(m.getGenericReturnType());
@@ -1301,7 +1381,30 @@
         "Returns %s, but must return a subtype of %s",
         formatType(trackerT),
         formatType(expectedTrackerT));
-    return DoFnSignature.NewTrackerMethod.create(m, restrictionT, trackerT);
+
+    MethodAnalysisContext methodContext = MethodAnalysisContext.create();
+    TypeDescriptor<? extends BoundedWindow> windowT = getWindowType(fnT, m);
+    for (int i = 1; i < params.length; ++i) {
+      Parameter extraParam =
+          analyzeExtraParameter(
+              errors,
+              fnContext,
+              methodContext,
+              fnT,
+              ParameterDescription.of(
+                  m, i, fnT.resolveType(params[i]), Arrays.asList(m.getParameterAnnotations()[i])),
+              inputT,
+              outputT);
+
+      methodContext.addParameter(extraParam);
+    }
+
+    for (Parameter parameter : methodContext.getExtraParameters()) {
+      checkParameterOneOf(errors, parameter, ALLOWED_NEW_TRACKER_PARAMETERS);
+    }
+
+    return DoFnSignature.NewTrackerMethod.create(
+        m, restrictionT, trackerT, windowT, methodContext.getExtraParameters());
   }
 
   private static Collection<Method> declaredMethodsWithAnnotation(
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java
index 741f373..08a962a 100644
--- a/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/util/ReleaseInfo.java
@@ -49,11 +49,16 @@
     return getProperties().get("name");
   }
 
-  /** Provides the SDK version. */
+  /** Provides the BEAM version. ie: 2.18.0-SNAPSHOT */
   public String getVersion() {
     return getProperties().get("version");
   }
 
+  /** Provides the SDK version. ie: 2.18.0 or 2.18.0.dev */
+  public String getSdkVersion() {
+    return getProperties().get("sdk_version");
+  }
+
   /////////////////////////////////////////////////////////////////////////
   private static final Logger LOG = LoggerFactory.getLogger(ReleaseInfo.class);
   private static final String DEFAULT_NAME = "Apache Beam SDK for Java";
@@ -79,6 +84,9 @@
       if (!properties.containsKey("version")) {
         properties.setProperty("version", DEFAULT_VERSION);
       }
+      if (!properties.containsKey("sdk_version")) {
+        properties.setProperty("sdk_version", DEFAULT_VERSION);
+      }
       INSTANCE = new AutoValue_ReleaseInfo(ImmutableMap.copyOf((Map) properties));
     }
   }
diff --git a/sdks/java/core/src/main/resources/org/apache/beam/sdk/sdk.properties b/sdks/java/core/src/main/resources/org/apache/beam/sdk/sdk.properties
index 38181c4..3320a4c 100644
--- a/sdks/java/core/src/main/resources/org/apache/beam/sdk/sdk.properties
+++ b/sdks/java/core/src/main/resources/org/apache/beam/sdk/sdk.properties
@@ -17,6 +17,7 @@
 # SDK source version
 
 version=@pom.version@
+sdk_version=@pom.sdk_version@
 
 build.date=@timestamp@
 
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
index bad9d55..841a75c 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/TestStreamTest.java
@@ -191,7 +191,7 @@
                 TimestampedValue.of("firstPane", new Instant(100)),
                 TimestampedValue.of("alsoFirstPane", new Instant(200)))
             .addElements(TimestampedValue.of("onTimePane", new Instant(500)))
-            .advanceWatermarkTo(new Instant(1001L))
+            .advanceWatermarkTo(new Instant(1000L))
             .addElements(
                 TimestampedValue.of("finalLatePane", new Instant(750)),
                 TimestampedValue.of("alsoFinalLatePane", new Instant(250)))
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
index 884b9a6..7cd03a7 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/ParDoTest.java
@@ -3736,6 +3736,7 @@
       ValidatesRunner.class,
       UsesStatefulParDo.class,
       UsesTimersInParDo.class,
+      UsesTestStream.class,
       UsesTestStreamWithOutputTimestamp.class
     })
     public void testOutputTimestamp() {
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesProcessElementTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesProcessElementTest.java
index 1a19eeb..593c846 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesProcessElementTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesProcessElementTest.java
@@ -37,9 +37,7 @@
   @Test
   public void testBadExtraProcessContextType() throws Exception {
     thrown.expect(IllegalArgumentException.class);
-    thrown.expectMessage(
-        "Integer is not a valid context parameter. "
-            + "Should be one of [BoundedWindow, RestrictionTracker<?, ?>]");
+    thrown.expectMessage("Integer is not a valid context parameter.");
 
     analyzeProcessElementMethod(
         new AnonymousMethod() {
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesSplittableDoFnTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesSplittableDoFnTest.java
index bac8459..d5efdc5 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesSplittableDoFnTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/reflect/DoFnSignaturesSplittableDoFnTest.java
@@ -19,23 +19,34 @@
 
 import static org.apache.beam.sdk.transforms.reflect.DoFnSignaturesTestUtils.analyzeProcessElementMethod;
 import static org.apache.beam.sdk.transforms.reflect.DoFnSignaturesTestUtils.errors;
+import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.lang.reflect.Method;
 import java.util.List;
 import org.apache.beam.sdk.coders.KvCoder;
+import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.StructuredCoder;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.state.StateSpec;
+import org.apache.beam.sdk.state.StateSpecs;
+import org.apache.beam.sdk.state.ValueState;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.DoFn.BoundedPerElement;
+import org.apache.beam.sdk.transforms.DoFn.StateId;
 import org.apache.beam.sdk.transforms.DoFn.UnboundedPerElement;
+import org.apache.beam.sdk.transforms.reflect.DoFnSignatures.FnAnalysisContext;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignaturesTestUtils.AnonymousMethod;
 import org.apache.beam.sdk.transforms.reflect.DoFnSignaturesTestUtils.FakeDoFn;
 import org.apache.beam.sdk.transforms.splittabledofn.HasDefaultTracker;
 import org.apache.beam.sdk.transforms.splittabledofn.RestrictionTracker;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
+import org.apache.beam.sdk.transforms.windowing.PaneInfo;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TypeDescriptor;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Predicates;
+import org.joda.time.Instant;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -92,19 +103,45 @@
   }
 
   @Test
-  public void testSplittableProcessElementMustNotHaveOtherParams() throws Exception {
+  public void testSplittableProcessElementMustNotHaveUnsupportedParams() throws Exception {
     thrown.expect(IllegalArgumentException.class);
     thrown.expectMessage("Illegal parameter");
-    thrown.expectMessage("BoundedWindow");
+    thrown.expectMessage("ValueState");
 
-    DoFnSignature.ProcessElementMethod signature =
-        analyzeProcessElementMethod(
-            new AnonymousMethod() {
-              private void method(
-                  DoFn<Integer, String>.ProcessContext context,
-                  SomeRestrictionTracker tracker,
-                  BoundedWindow window) {}
-            });
+    DoFn<Integer, String> doFn =
+        new DoFn<Integer, String>() {
+          @StateId("my-state-id")
+          public final StateSpec<ValueState<String>> myStateSpec =
+              StateSpecs.value(StringUtf8Coder.of());
+
+          @ProcessElement
+          public void method(
+              DoFn<Integer, String>.ProcessContext context,
+              SomeRestrictionTracker tracker,
+              @StateId("my-state-id") ValueState<String> myState) {}
+        };
+    Method processElementMethod = null;
+    for (Method method : doFn.getClass().getDeclaredMethods()) {
+      if ("method".equals(method.getName())) {
+        processElementMethod = method;
+      }
+    }
+    checkState(processElementMethod != null);
+
+    FnAnalysisContext context = FnAnalysisContext.create();
+    context.addStateDeclaration(
+        DoFnSignature.StateDeclaration.create(
+            "my-state-id",
+            doFn.getClass().getField("myStateSpec"),
+            new TypeDescriptor<ValueState<String>>() {}));
+
+    DoFnSignatures.analyzeProcessElementMethod(
+        errors(),
+        new TypeDescriptor<DoFn<Integer, String>>() {},
+        processElementMethod,
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        context);
   }
 
   @Test
@@ -226,7 +263,7 @@
 
   /** Tests a splittable {@link DoFn} that defines all methods in their full form, correctly. */
   @Test
-  public void testSplittableWithAllFunctions() throws Exception {
+  public void testSplittableWithAllFunctionsAndAllParameters() throws Exception {
     class GoodSplittableDoFn extends DoFn<Integer, String> {
       @ProcessElement
       public ProcessContinuation processElement(
@@ -235,16 +272,32 @@
       }
 
       @GetInitialRestriction
-      public SomeRestriction getInitialRestriction(Integer element) {
+      public SomeRestriction getInitialRestriction(
+          Integer element,
+          PipelineOptions pipelineOptions,
+          BoundedWindow boundedWindow,
+          PaneInfo paneInfo,
+          @Timestamp Instant timestamp) {
         return null;
       }
 
       @SplitRestriction
       public void splitRestriction(
-          Integer element, SomeRestriction restriction, OutputReceiver<SomeRestriction> receiver) {}
+          Integer element,
+          SomeRestriction restriction,
+          OutputReceiver<SomeRestriction> receiver,
+          PipelineOptions pipelineOptions,
+          BoundedWindow boundedWindow,
+          PaneInfo paneInfo,
+          @Timestamp Instant timestamp) {}
 
       @NewTracker
-      public SomeRestrictionTracker newTracker(SomeRestriction restriction) {
+      public SomeRestrictionTracker newTracker(
+          SomeRestriction restriction,
+          PipelineOptions pipelineOptions,
+          BoundedWindow boundedWindow,
+          PaneInfo paneInfo,
+          @Timestamp Instant timestamp) {
         return null;
       }
 
@@ -455,7 +508,9 @@
           void method(
               Integer element, SomeRestriction restriction, DoFn.OutputReceiver<String> receiver) {}
         }.getMethod(),
-        TypeDescriptor.of(Integer.class));
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        FnAnalysisContext.create());
   }
 
   @Test
@@ -476,12 +531,14 @@
               SomeRestriction restriction,
               DoFn.OutputReceiver<SomeRestriction> receiver) {}
         }.getMethod(),
-        TypeDescriptor.of(Integer.class));
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        FnAnalysisContext.create());
   }
 
   @Test
-  public void testSplitRestrictionWrongNumArguments() throws Exception {
-    thrown.expectMessage("Must have exactly 3 arguments");
+  public void testSplitRestrictionWrongArgumentType() throws Exception {
+    thrown.expectMessage("Object is not a valid context parameter.");
     DoFnSignatures.analyzeSplitRestrictionMethod(
         errors(),
         TypeDescriptor.of(FakeDoFn.class),
@@ -492,7 +549,9 @@
               DoFn.OutputReceiver<SomeRestriction> receiver,
               Object extra) {}
         }.getMethod(),
-        TypeDescriptor.of(Integer.class));
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        FnAnalysisContext.create());
   }
 
   @Test
@@ -563,8 +622,8 @@
   }
 
   @Test
-  public void testNewTrackerWrongNumArguments() throws Exception {
-    thrown.expectMessage("Must have a single argument");
+  public void testNewTrackerWrongArgumentType() throws Exception {
+    thrown.expectMessage("Object is not a valid context parameter.");
     DoFnSignatures.analyzeNewTrackerMethod(
         errors(),
         TypeDescriptor.of(FakeDoFn.class),
@@ -572,7 +631,10 @@
           private SomeRestrictionTracker method(SomeRestriction restriction, Object extra) {
             return null;
           }
-        }.getMethod());
+        }.getMethod(),
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        FnAnalysisContext.create());
   }
 
   @Test
@@ -587,6 +649,9 @@
           private SomeRestrictionTracker method(String restriction) {
             return null;
           }
-        }.getMethod());
+        }.getMethod(),
+        TypeDescriptor.of(Integer.class),
+        TypeDescriptor.of(String.class),
+        FnAnalysisContext.create());
   }
 }
diff --git a/sdks/java/extensions/sql/perf-tests/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BigQueryIOPushDownIT.java b/sdks/java/extensions/sql/perf-tests/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BigQueryIOPushDownIT.java
index 05ad30e..caa5497 100644
--- a/sdks/java/extensions/sql/perf-tests/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BigQueryIOPushDownIT.java
+++ b/sdks/java/extensions/sql/perf-tests/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/bigquery/BigQueryIOPushDownIT.java
@@ -56,7 +56,8 @@
 
 @RunWith(JUnit4.class)
 public class BigQueryIOPushDownIT {
-  private static final String READ_FROM_TABLE = "bigquery-public-data:hacker_news.full";
+  private static final String READ_FROM_TABLE =
+      "apache-beam-testing:beam_performance.hacker_news_full";
   private static final String NAMESPACE = BigQueryIOPushDownIT.class.getName();
   private static final String FIELDS_READ_METRIC = "fields_read";
   private static final String READ_TIME_METRIC = "read_time";
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
index 9b06a12..4da3d35e 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbTable.java
@@ -17,12 +17,19 @@
  */
 package org.apache.beam.sdk.extensions.sql.meta.provider.mongodb;
 
+import static org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.sql.SqlKind.AND;
+import static org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.sql.SqlKind.COMPARISON;
+import static org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.sql.SqlKind.OR;
 import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
 
+import com.mongodb.client.model.Filters;
 import java.io.Serializable;
+import java.util.ArrayList;
 import java.util.List;
+import java.util.function.IntFunction;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
+import java.util.stream.Collectors;
 import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.extensions.sql.impl.BeamTableStatistics;
 import org.apache.beam.sdk.extensions.sql.meta.BeamSqlTableFilter;
@@ -34,7 +41,9 @@
 import org.apache.beam.sdk.io.mongodb.MongoDbIO;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.schemas.FieldAccessDescriptor;
+import org.apache.beam.sdk.schemas.FieldTypeDescriptors;
 import org.apache.beam.sdk.schemas.Schema;
+import org.apache.beam.sdk.schemas.Schema.FieldType;
 import org.apache.beam.sdk.schemas.utils.SelectHelpers;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.JsonToRow;
@@ -49,12 +58,23 @@
 import org.apache.beam.sdk.values.POutput;
 import org.apache.beam.sdk.values.Row;
 import org.apache.beam.vendor.calcite.v1_20_0.com.google.common.annotations.VisibleForTesting;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexCall;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexInputRef;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexLiteral;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.rex.RexNode;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.sql.SqlKind;
+import org.apache.beam.vendor.calcite.v1_20_0.org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.bson.Document;
+import org.bson.conversions.Bson;
 import org.bson.json.JsonMode;
 import org.bson.json.JsonWriterSettings;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @Experimental
 public class MongoDbTable extends SchemaBaseBeamTable implements Serializable {
+  private static final Logger LOGGER = LoggerFactory.getLogger(MongoDbTable.class);
   // Should match: mongodb://username:password@localhost:27017/database/collection
   @VisibleForTesting
   final Pattern locationPattern =
@@ -103,14 +123,23 @@
             .resolve(getSchema());
     final Schema newSchema = SelectHelpers.getOutputSchema(getSchema(), resolved);
 
+    FindQuery findQuery = FindQuery.create();
+
     if (!(filters instanceof DefaultTableFilter)) {
-      throw new AssertionError("Predicate push-down is unsupported, yet received a predicate.");
+      MongoDbFilter mongoFilter = (MongoDbFilter) filters;
+      if (!mongoFilter.getSupported().isEmpty()) {
+        Bson filter = constructPredicate(mongoFilter.getSupported());
+        LOGGER.info("Pushing down the following filter: " + filter.toString());
+        findQuery = findQuery.withFilters(filter);
+      }
     }
 
     if (!fieldNames.isEmpty()) {
-      readInstance = readInstance.withQueryFn(FindQuery.create().withProjection(fieldNames));
+      findQuery = findQuery.withProjection(fieldNames);
     }
 
+    readInstance = readInstance.withQueryFn(findQuery);
+
     return readInstance.expand(begin).apply(DocumentToRow.withSchema(newSchema));
   }
 
@@ -127,6 +156,131 @@
   }
 
   @Override
+  public BeamSqlTableFilter constructFilter(List<RexNode> filter) {
+    return MongoDbFilter.create(filter);
+  }
+
+  /**
+   * Given a predicate in a conjunctive normal form (CNF), construct a {@code Bson} filter for
+   * MongoDB find query.
+   *
+   * @param supported A list of {@code RexNode} in CNF.
+   * @return {@code Bson} filter.
+   */
+  private Bson constructPredicate(List<RexNode> supported) {
+    assert !supported.isEmpty();
+    List<Bson> cnf =
+        supported.stream().map(this::translateRexNodeToBson).collect(Collectors.toList());
+    if (cnf.size() == 1) {
+      return cnf.get(0);
+    }
+    return Filters.and(cnf);
+  }
+
+  /**
+   * Recursively translates a single RexNode to MongoDB Bson filter. Supports simple comparison
+   * operations, negation, and nested conjunction/disjunction. Boolean fields are translated as an
+   * `$eq` operation with a boolean `true`.
+   *
+   * @param node {@code RexNode} to translate.
+   * @return {@code Bson} filter.
+   */
+  private Bson translateRexNodeToBson(RexNode node) {
+    final IntFunction<String> fieldIdToName = i -> getSchema().getField(i).getName();
+    // Supported operations are described in MongoDbFilter#isSupported
+    if (node instanceof RexCall) {
+      RexCall compositeNode = (RexCall) node;
+      List<RexLiteral> literals = new ArrayList<>();
+      List<RexInputRef> inputRefs = new ArrayList<>();
+
+      for (RexNode operand : compositeNode.getOperands()) {
+        if (operand instanceof RexLiteral) {
+          literals.add((RexLiteral) operand);
+        } else if (operand instanceof RexInputRef) {
+          inputRefs.add((RexInputRef) operand);
+        }
+      }
+
+      // Operation is a comparison, since one of the operands in a field reference.
+      if (inputRefs.size() == 1) {
+        RexInputRef inputRef = inputRefs.get(0);
+        String inputFieldName = fieldIdToName.apply(inputRef.getIndex());
+        if (literals.size() > 0) {
+          // Convert literal value to the same Java type as the field we are comparing to.
+          Object literal = convertToExpectedType(inputRef, literals.get(0));
+
+          switch (node.getKind()) {
+            case IN:
+              return Filters.in(inputFieldName, convertToExpectedType(inputRef, literals));
+            case EQUALS:
+              return Filters.eq(inputFieldName, literal);
+            case NOT_EQUALS:
+              return Filters.not(Filters.eq(inputFieldName, literal));
+            case LESS_THAN:
+              return Filters.lt(inputFieldName, literal);
+            case GREATER_THAN:
+              return Filters.gt(inputFieldName, literal);
+            case GREATER_THAN_OR_EQUAL:
+              return Filters.gte(inputFieldName, literal);
+            case LESS_THAN_OR_EQUAL:
+              return Filters.lte(inputFieldName, literal);
+            default:
+              // Encountered an unexpected node kind, RuntimeException below.
+              break;
+          }
+        } else if (node.getKind().equals(SqlKind.NOT)) {
+          // Ex: `where not boolean_field`
+          return Filters.not(translateRexNodeToBson(inputRef));
+        } else {
+          throw new RuntimeException(
+              "Cannot create a filter for an unsupported node: " + node.toString());
+        }
+      } else { // Operation is a conjunction/disjunction.
+        switch (node.getKind()) {
+          case AND:
+            // Recursively construct filter for each operand of conjunction.
+            return Filters.and(
+                compositeNode.getOperands().stream()
+                    .map(this::translateRexNodeToBson)
+                    .collect(Collectors.toList()));
+          case OR:
+            // Recursively construct filter for each operand of disjunction.
+            return Filters.or(
+                compositeNode.getOperands().stream()
+                    .map(this::translateRexNodeToBson)
+                    .collect(Collectors.toList()));
+          default:
+            // Encountered an unexpected node kind, RuntimeException below.
+            break;
+        }
+      }
+      throw new RuntimeException(
+          "Encountered an unexpected node kind: " + node.getKind().toString());
+    } else if (node instanceof RexInputRef
+        && node.getType().getSqlTypeName().equals(SqlTypeName.BOOLEAN)) {
+      // Boolean field, must be true. Ex: `select * from table where bool_field`
+      return Filters.eq(fieldIdToName.apply(((RexInputRef) node).getIndex()), true);
+    }
+
+    throw new RuntimeException(
+        "Was expecting a RexCall or a boolean RexInputRef, but received: "
+            + node.getClass().getSimpleName());
+  }
+
+  private Object convertToExpectedType(RexInputRef inputRef, RexLiteral literal) {
+    FieldType beamFieldType = getSchema().getField(inputRef.getIndex()).getType();
+
+    return literal.getValueAs(
+        FieldTypeDescriptors.javaTypeForFieldType(beamFieldType).getRawType());
+  }
+
+  private Object convertToExpectedType(RexInputRef inputRef, List<RexLiteral> literals) {
+    return literals.stream()
+        .map(l -> convertToExpectedType(inputRef, l))
+        .collect(Collectors.toList());
+  }
+
+  @Override
   public IsBounded isBounded() {
     return IsBounded.BOUNDED;
   }
@@ -205,4 +359,117 @@
       }
     }
   }
+
+  static class MongoDbFilter implements BeamSqlTableFilter {
+    private List<RexNode> supported;
+    private List<RexNode> unsupported;
+
+    public MongoDbFilter(List<RexNode> supported, List<RexNode> unsupported) {
+      this.supported = supported;
+      this.unsupported = unsupported;
+    }
+
+    @Override
+    public List<RexNode> getNotSupported() {
+      return unsupported;
+    }
+
+    @Override
+    public int numSupported() {
+      return BeamSqlTableFilter.expressionsInFilter(supported);
+    }
+
+    public List<RexNode> getSupported() {
+      return supported;
+    }
+
+    @Override
+    public String toString() {
+      String supStr =
+          "supported{"
+              + supported.stream().map(RexNode::toString).collect(Collectors.joining())
+              + "}";
+      String unsupStr =
+          "unsupported{"
+              + unsupported.stream().map(RexNode::toString).collect(Collectors.joining())
+              + "}";
+
+      return "[" + supStr + ", " + unsupStr + "]";
+    }
+
+    public static MongoDbFilter create(List<RexNode> predicateCNF) {
+      ImmutableList.Builder<RexNode> supported = ImmutableList.builder();
+      ImmutableList.Builder<RexNode> unsupported = ImmutableList.builder();
+
+      for (RexNode node : predicateCNF) {
+        if (!node.getType().getSqlTypeName().equals(SqlTypeName.BOOLEAN)) {
+          throw new RuntimeException(
+              "Predicate node '"
+                  + node.getClass().getSimpleName()
+                  + "' should be a boolean expression, but was: "
+                  + node.getType().getSqlTypeName());
+        }
+
+        if (isSupported(node)) {
+          supported.add(node);
+        } else {
+          unsupported.add(node);
+        }
+      }
+
+      return new MongoDbFilter(supported.build(), unsupported.build());
+    }
+
+    /**
+     * Check whether a {@code RexNode} is supported. To keep things simple:<br>
+     * 1. Support comparison operations in predicate, which compare a single field to literal
+     * values. 2. Support nested Conjunction (AND), Disjunction (OR) as long as child operations are
+     * supported.<br>
+     * 3. Support boolean fields.
+     *
+     * @param node A node to check for predicate push-down support.
+     * @return A boolean whether an expression is supported.
+     */
+    private static boolean isSupported(RexNode node) {
+      if (node instanceof RexCall) {
+        RexCall compositeNode = (RexCall) node;
+
+        if (node.getKind().belongsTo(COMPARISON) || node.getKind().equals(SqlKind.NOT)) {
+          int fields = 0;
+          for (RexNode operand : compositeNode.getOperands()) {
+            if (operand instanceof RexInputRef) {
+              fields++;
+            } else if (operand instanceof RexLiteral) {
+              // RexLiterals are expected, but no action is needed.
+            } else {
+              // Complex predicates are not supported. Ex: `field1+5 == 10`.
+              return false;
+            }
+          }
+          // All comparison operations should have exactly one field reference.
+          // Ex: `field1 == field2` is not supported.
+          // TODO: Can be supported via Filters#where.
+          if (fields == 1) {
+            return true;
+          }
+        } else if (node.getKind().equals(AND) || node.getKind().equals(OR)) {
+          // Nested ANDs and ORs are supported as long as all operands are supported.
+          for (RexNode operand : compositeNode.getOperands()) {
+            if (!isSupported(operand)) {
+              return false;
+            }
+          }
+          return true;
+        }
+      } else if (node instanceof RexInputRef) {
+        // When field is a boolean.
+        return true;
+      } else {
+        throw new RuntimeException(
+            "Encountered an unexpected node type: " + node.getClass().getSimpleName());
+      }
+
+      return false;
+    }
+  }
 }
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbFilterTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbFilterTest.java
new file mode 100644
index 0000000..aafaf9b
--- /dev/null
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbFilterTest.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.meta.provider.mongodb;
+
+import static org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider.PUSH_DOWN_OPTION;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.instanceOf;
+
+import com.alibaba.fastjson.JSON;
+import java.util.Arrays;
+import java.util.Collection;
+import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
+import org.apache.beam.sdk.extensions.sql.impl.rel.BeamCalcRel;
+import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
+import org.apache.beam.sdk.extensions.sql.meta.Table;
+import org.apache.beam.sdk.extensions.sql.meta.provider.mongodb.MongoDbTable.MongoDbFilter;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider.PushDownOptions;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.schemas.Schema;
+import org.apache.beam.sdk.testing.TestPipeline;
+import org.apache.beam.sdk.values.Row;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameter;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public class MongoDbFilterTest {
+  private static final Schema BASIC_SCHEMA =
+      Schema.builder()
+          .addInt32Field("unused1")
+          .addInt32Field("id")
+          .addStringField("name")
+          .addInt16Field("unused2")
+          .addBooleanField("b")
+          .build();
+  private BeamSqlEnv sqlEnv;
+
+  @Parameters
+  public static Collection<Object[]> data() {
+    return Arrays.asList(
+        new Object[][] {
+          {"select * from TEST where unused1=100", true},
+          {"select * from TEST where unused1 in (100, 200)", true},
+          {"select * from TEST where b", true},
+          {"select * from TEST where not b", true},
+          {
+            "select * from TEST where unused1>100 and unused1<=200 and id<>1 and (name='two' or id=2)",
+            true
+          },
+          {"select * from TEST where name like 'o%e'", false},
+          {"select * from TEST where unused1+10=110", false},
+          {"select * from TEST where unused1=unused2 and id=2", false},
+          {"select * from TEST where unused1+unused2=10", false}
+        });
+  }
+
+  @Parameter public String query;
+
+  @Parameter(1)
+  public boolean isSupported;
+
+  @Rule public TestPipeline pipeline = TestPipeline.create();
+
+  @Before
+  public void buildUp() {
+    TestTableProvider tableProvider = new TestTableProvider();
+    Table table = getTable("TEST", PushDownOptions.NONE);
+    tableProvider.createTable(table);
+    tableProvider.addRows(
+        table.getName(),
+        row(BASIC_SCHEMA, 100, 1, "one", (short) 100, true),
+        row(BASIC_SCHEMA, 200, 2, "two", (short) 200, false));
+
+    sqlEnv =
+        BeamSqlEnv.builder(tableProvider)
+            .setPipelineOptions(PipelineOptionsFactory.create())
+            .build();
+  }
+
+  @Test
+  public void testIsSupported() {
+    BeamRelNode beamRelNode = sqlEnv.parseQuery(query);
+    assertThat(beamRelNode, instanceOf(BeamCalcRel.class));
+    MongoDbFilter filter =
+        MongoDbFilter.create(((BeamCalcRel) beamRelNode).getProgram().split().right);
+
+    assertThat(
+        "Query: '" + query + "' is expected to be " + (isSupported ? "supported." : "unsupported."),
+        filter.getNotSupported().isEmpty() == isSupported);
+  }
+
+  private static Table getTable(String name, PushDownOptions options) {
+    return Table.builder()
+        .name(name)
+        .comment(name + " table")
+        .schema(BASIC_SCHEMA)
+        .properties(
+            JSON.parseObject("{ " + PUSH_DOWN_OPTION + ": " + "\"" + options.toString() + "\" }"))
+        .type("test")
+        .build();
+  }
+
+  private static Row row(Schema schema, Object... objects) {
+    return Row.withSchema(schema).addValues(objects).build();
+  }
+}
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbReadWriteIT.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbReadWriteIT.java
index 0d4296a..47ad96a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbReadWriteIT.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/meta/provider/mongodb/MongoDbReadWriteIT.java
@@ -57,6 +57,7 @@
 import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.Row;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.bson.Document;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -145,7 +146,7 @@
   public void init() {
     sqlEnv = BeamSqlEnv.inMemory(new MongoDbTableProvider());
     MongoDatabase db = client.getDatabase(database);
-    Document r = db.runCommand(new BasicDBObject().append("profile", 2));
+    db.runCommand(new BasicDBObject().append("profile", 2));
   }
 
   @After
@@ -295,6 +296,107 @@
         containsInAnyOrder("c_varchar", "c_boolean", "c_integer"));
   }
 
+  @Test
+  public void testPredicatePushDown() {
+    final Document expectedFilter =
+        new Document()
+            .append(
+                "$or",
+                ImmutableList.of(
+                        new Document("c_varchar", "varchar"),
+                        new Document(
+                            "c_varchar", new Document("$not", new Document("$eq", "fakeString"))))
+                    .asList())
+            .append("c_boolean", true)
+            .append("c_integer", 2147483647);
+    final Schema expectedSchema =
+        Schema.builder()
+            .addNullableField("c_varchar", STRING)
+            .addNullableField("c_boolean", BOOLEAN)
+            .addNullableField("c_integer", INT32)
+            .build();
+    Row testRow = row(expectedSchema, "varchar", true, 2147483647);
+
+    String createTableStatement =
+        "CREATE EXTERNAL TABLE TEST( \n"
+            + "   c_bigint BIGINT, \n "
+            + "   c_tinyint TINYINT, \n"
+            + "   c_smallint SMALLINT, \n"
+            + "   c_integer INTEGER, \n"
+            + "   c_float FLOAT, \n"
+            + "   c_double DOUBLE, \n"
+            + "   c_boolean BOOLEAN, \n"
+            + "   c_varchar VARCHAR, \n "
+            + "   c_arr ARRAY<VARCHAR> \n"
+            + ") \n"
+            + "TYPE 'mongodb' \n"
+            + "LOCATION '"
+            + mongoSqlUrl
+            + "'";
+    sqlEnv.executeDdl(createTableStatement);
+
+    String insertStatement =
+        "INSERT INTO TEST VALUES ("
+            + "9223372036854775807, "
+            + "127, "
+            + "32767, "
+            + "2147483647, "
+            + "1.0, "
+            + "1.0, "
+            + "TRUE, "
+            + "'varchar', "
+            + "ARRAY['123', '456']"
+            + ")";
+
+    BeamRelNode insertRelNode = sqlEnv.parseQuery(insertStatement);
+    BeamSqlRelUtils.toPCollection(writePipeline, insertRelNode);
+    writePipeline.run().waitUntilFinish();
+
+    BeamRelNode node =
+        sqlEnv.parseQuery(
+            "select c_varchar, c_boolean, c_integer from TEST"
+                + " where (c_varchar='varchar' or c_varchar<>'fakeString') and c_boolean and c_integer=2147483647");
+    // Calc should be dropped, since MongoDb can push-down all predicate operations from a query
+    // above.
+    assertThat(node, instanceOf(BeamPushDownIOSourceRel.class));
+    // Only selected fields are projected.
+    assertThat(
+        node.getRowType().getFieldNames(),
+        containsInAnyOrder("c_varchar", "c_boolean", "c_integer"));
+    PCollection<Row> output = BeamSqlRelUtils.toPCollection(readPipeline, node);
+
+    assertThat(output.getSchema(), equalTo(expectedSchema));
+    PAssert.that(output).containsInAnyOrder(testRow);
+
+    readPipeline.run().waitUntilFinish();
+
+    MongoDatabase db = client.getDatabase(database);
+    MongoCollection coll = db.getCollection("system.profile");
+    // Find the last executed query.
+    Object query =
+        coll.find()
+            .filter(Filters.eq("op", "query"))
+            .sort(new BasicDBObject().append("ts", -1))
+            .iterator()
+            .next();
+
+    // Retrieve a projection parameters.
+    assertThat(query, instanceOf(Document.class));
+    Object command = ((Document) query).get("command");
+    assertThat(command, instanceOf(Document.class));
+    Object filter = ((Document) command).get("filter");
+    assertThat(filter, instanceOf(Document.class));
+    Object projection = ((Document) command).get("projection");
+    assertThat(projection, instanceOf(Document.class));
+
+    // Validate projected fields.
+    assertThat(
+        ((Document) projection).keySet(),
+        containsInAnyOrder("c_varchar", "c_boolean", "c_integer"));
+    // Validate filtered fields.
+    assertThat(((Document) filter), equalTo(expectedFilter));
+  }
+
   private Row row(Schema schema, Object... values) {
     return Row.withSchema(schema).addValues(values).build();
   }
diff --git a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
index 330fb2d..153df25 100644
--- a/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
+++ b/sdks/java/extensions/sql/zetasql/src/main/java/org/apache/beam/sdk/extensions/sql/zetasql/BeamZetaSqlCalcRel.java
@@ -34,7 +34,6 @@
 import org.apache.beam.sdk.extensions.sql.meta.provider.bigquery.BeamBigQuerySqlDialect;
 import org.apache.beam.sdk.extensions.sql.meta.provider.bigquery.BeamSqlUnparseContext;
 import org.apache.beam.sdk.schemas.Schema;
-import org.apache.beam.sdk.schemas.Schema.Field;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
 import org.apache.beam.sdk.transforms.ParDo;
@@ -67,13 +66,14 @@
   private static final SqlDialect DIALECT = BeamBigQuerySqlDialect.DEFAULT;
   private final SqlImplementor.Context context;
 
+  private static String columnName(int i) {
+    return "_" + i;
+  }
+
   public BeamZetaSqlCalcRel(
       RelOptCluster cluster, RelTraitSet traits, RelNode input, RexProgram program) {
     super(cluster, traits, input, program);
-    final IntFunction<SqlNode> fn =
-        i ->
-            new SqlIdentifier(
-                getProgram().getInputRowType().getFieldList().get(i).getName(), SqlParserPos.ZERO);
+    final IntFunction<SqlNode> fn = i -> new SqlIdentifier(columnName(i), SqlParserPos.ZERO);
     context = new BeamSqlUnparseContext(fn);
   }
 
@@ -146,20 +146,21 @@
     @Setup
     public void setup() {
       AnalyzerOptions options = SqlAnalyzer.initAnalyzerOptions();
-      for (Field field : inputSchema.getFields()) {
+      for (int i = 0; i < inputSchema.getFieldCount(); i++) {
         options.addExpressionColumn(
-            sanitize(field.getName()), ZetaSqlUtils.beamFieldTypeToZetaSqlType(field.getType()));
+            columnName(i),
+            ZetaSqlUtils.beamFieldTypeToZetaSqlType(inputSchema.getField(i).getType()));
       }
 
       // TODO[BEAM-8630]: use a single PreparedExpression for all condition and projects
       projectExps = new ArrayList<>();
       for (String project : projects) {
-        PreparedExpression projectExp = new PreparedExpression(sanitize(project));
+        PreparedExpression projectExp = new PreparedExpression(project);
         projectExp.prepare(options);
         projectExps.add(projectExp);
       }
       if (condition != null) {
-        conditionExp = new PreparedExpression(sanitize(condition));
+        conditionExp = new PreparedExpression(condition);
         conditionExp.prepare(options);
       }
     }
@@ -168,10 +169,11 @@
     public void processElement(ProcessContext c) {
       Map<String, Value> columns = new HashMap<>();
       Row row = c.element();
-      for (Field field : inputSchema.getFields()) {
+      for (int i = 0; i < inputSchema.getFieldCount(); i++) {
         columns.put(
-            sanitize(field.getName()),
-            ZetaSqlUtils.javaObjectToZetaSqlValue(row.getValue(field.getName()), field.getType()));
+            columnName(i),
+            ZetaSqlUtils.javaObjectToZetaSqlValue(
+                row.getValue(i), inputSchema.getField(i).getType()));
       }
 
       // TODO[BEAM-8630]: support parameters in expression evaluation
@@ -201,12 +203,5 @@
         conditionExp.close();
       }
     }
-
-    // Replaces "$" with "_" because "$" is not allowed in a valid ZetaSQL identifier
-    // (ZetaSQL identifier syntax: [A-Za-z_][A-Za-z_0-9]*)
-    // TODO[BEAM-8630]: check if this is sufficient and correct, or even better fix this in Calcite
-    private static String sanitize(String identifier) {
-      return identifier.replaceAll("\\$", "_");
-    }
   }
 }
diff --git a/sdks/java/fn-execution/build.gradle b/sdks/java/fn-execution/build.gradle
index ea46cff..d099333 100644
--- a/sdks/java/fn-execution/build.gradle
+++ b/sdks/java/fn-execution/build.gradle
@@ -27,7 +27,7 @@
   compile project(path: ":model:pipeline", configuration: "shadow")
   compile project(path: ":model:fn-execution", configuration: "shadow")
   compile project(path: ":sdks:java:core", configuration: "shadow")
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   compile library.java.vendored_guava_26_0_jre
   compile library.java.slf4j_api
   compile library.java.joda_time
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
index bc6da0e..c6180a3 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactory.java
@@ -20,14 +20,14 @@
 import java.net.SocketAddress;
 import java.util.List;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientInterceptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.netty.NettyChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollDomainSocketChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollEventLoopGroup;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.EpollSocketChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.unix.DomainSocketAddress;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientInterceptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.netty.NettyChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollDomainSocketChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollEventLoopGroup;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.EpollSocketChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.unix.DomainSocketAddress;
 
 /** A Factory which creates an underlying {@link ManagedChannel} implementation. */
 public abstract class ManagedChannelFactory {
@@ -36,7 +36,7 @@
   }
 
   public static ManagedChannelFactory createEpoll() {
-    org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.Epoll.ensureAvailability();
+    org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.Epoll.ensureAvailability();
     return new Epoll();
   }
 
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/SocketAddressFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/SocketAddressFactory.java
index c77e1bc..b7d9d76 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/SocketAddressFactory.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/channel/SocketAddressFactory.java
@@ -23,7 +23,7 @@
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.unix.DomainSocketAddress;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.unix.DomainSocketAddress;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.net.HostAndPort;
 
 /** Creates a {@link SocketAddress} based upon a supplied string. */
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataBufferingOutboundObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataBufferingOutboundObserver.java
index bbc2916..e741c7e 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataBufferingOutboundObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataBufferingOutboundObserver.java
@@ -23,7 +23,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.options.ExperimentalOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 
 /**
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java
index 7ed83df..3140616 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexer.java
@@ -28,8 +28,8 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.Elements.Data;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserver.java
index c0215ae..51d9595 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserver.java
@@ -20,8 +20,8 @@
 import java.io.IOException;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataTimeBasedBufferingOutboundObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataTimeBasedBufferingOutboundObserver.java
index 3595fbd..b339e33 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataTimeBasedBufferingOutboundObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/BeamFnDataTimeBasedBufferingOutboundObserver.java
@@ -25,7 +25,7 @@
 import java.util.concurrent.TimeUnit;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortRead.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortRead.java
index 9568b90..f06599e 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortRead.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortRead.java
@@ -23,7 +23,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.RemoteGrpcPort;
 import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
 import org.apache.beam.model.pipeline.v1.RunnerApi.PTransform;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 
 /**
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWrite.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWrite.java
index b1c7604..42fd798 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWrite.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWrite.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.pipeline.v1.RunnerApi.FunctionSpec;
 import org.apache.beam.model.pipeline.v1.RunnerApi.PCollection;
 import org.apache.beam.model.pipeline.v1.RunnerApi.PTransform;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 
 /**
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/BufferingStreamObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/BufferingStreamObserver.java
index da7505d..12f042d 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/BufferingStreamObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/BufferingStreamObserver.java
@@ -27,8 +27,8 @@
 import java.util.concurrent.Phaser;
 import java.util.concurrent.TimeUnit;
 import javax.annotation.concurrent.ThreadSafe;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 
 /**
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DataStreams.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DataStreams.java
index 3134ea4..140f508 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DataStreams.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DataStreams.java
@@ -27,7 +27,7 @@
 import java.util.NoSuchElementException;
 import java.util.concurrent.BlockingQueue;
 import org.apache.beam.sdk.coders.Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.CountingInputStream;
 
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DirectStreamObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DirectStreamObserver.java
index dce3452..3906318 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DirectStreamObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/DirectStreamObserver.java
@@ -21,8 +21,8 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import javax.annotation.concurrent.ThreadSafe;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserver.java
index a25985d..016cb11 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserver.java
@@ -17,9 +17,9 @@
  */
 package org.apache.beam.sdk.fn.stream;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientResponseObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientResponseObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link ClientResponseObserver} which delegates all {@link StreamObserver} calls.
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactory.java
index 83f94f9..6693fee 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactory.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactory.java
@@ -18,8 +18,8 @@
 package org.apache.beam.sdk.fn.stream;
 
 import java.util.concurrent.ExecutorService;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * Creates factories which determine an underlying {@link StreamObserver} implementation to use in
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/SynchronizedStreamObserver.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/SynchronizedStreamObserver.java
index c960d96..31e9af2 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/SynchronizedStreamObserver.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/stream/SynchronizedStreamObserver.java
@@ -17,7 +17,7 @@
  */
 package org.apache.beam.sdk.fn.stream;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * A {@link StreamObserver} which provides synchronous access access to an underlying {@link
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java
index a4c99a1..aad1fd1 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/InProcessManagedChannelFactory.java
@@ -19,8 +19,8 @@
 
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
 
 /**
  * A {@link ManagedChannelFactory} that uses in-process channels.
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/TestStreams.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/TestStreams.java
index b76997e..cd8b977 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/TestStreams.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/test/TestStreams.java
@@ -19,8 +19,8 @@
 
 import java.util.function.Consumer;
 import java.util.function.Supplier;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /** Utility methods which enable testing of {@link StreamObserver}s. */
 public class TestStreams {
diff --git a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindow.java b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindow.java
index 94d3400..3e9b21c 100644
--- a/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindow.java
+++ b/sdks/java/fn-execution/src/main/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindow.java
@@ -25,7 +25,7 @@
 import org.apache.beam.sdk.coders.CoderException;
 import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
 import org.apache.beam.sdk.util.VarInt;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
 import org.joda.time.Instant;
 
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactoryTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactoryTest.java
index 3e60697..fc0c813 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactoryTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/ManagedChannelFactoryTest.java
@@ -21,7 +21,7 @@
 import static org.junit.Assume.assumeTrue;
 
 import org.apache.beam.model.pipeline.v1.Endpoints;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
@@ -45,7 +45,7 @@
 
   @Test
   public void testEpollHostPortChannel() {
-    assumeTrue(org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.Epoll.isAvailable());
+    assumeTrue(org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.Epoll.isAvailable());
     Endpoints.ApiServiceDescriptor apiServiceDescriptor =
         Endpoints.ApiServiceDescriptor.newBuilder().setUrl("localhost:123").build();
     ManagedChannel channel =
@@ -56,7 +56,7 @@
 
   @Test
   public void testEpollDomainSocketChannel() throws Exception {
-    assumeTrue(org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.epoll.Epoll.isAvailable());
+    assumeTrue(org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.epoll.Epoll.isAvailable());
     Endpoints.ApiServiceDescriptor apiServiceDescriptor =
         Endpoints.ApiServiceDescriptor.newBuilder()
             .setUrl("unix://" + tmpFolder.newFile().getAbsolutePath())
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/SocketAddressFactoryTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/SocketAddressFactoryTest.java
index 0107a7b..91c1e17 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/SocketAddressFactoryTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/channel/SocketAddressFactoryTest.java
@@ -23,7 +23,7 @@
 import java.io.File;
 import java.net.InetSocketAddress;
 import java.net.SocketAddress;
-import org.apache.beam.vendor.grpc.v1p21p0.io.netty.channel.unix.DomainSocketAddress;
+import org.apache.beam.vendor.grpc.v1p26p0.io.netty.channel.unix.DomainSocketAddress;
 import org.hamcrest.Matchers;
 import org.junit.Rule;
 import org.junit.Test;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexerTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexerTest.java
index bf1b1d3..51301e4 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexerTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataGrpcMultiplexerTest.java
@@ -31,7 +31,7 @@
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
 import org.junit.Test;
 
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserverTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserverTest.java
index ed2f700..a75b9fa 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserverTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/BeamFnDataSizeBasedBufferingOutboundObserverTest.java
@@ -40,7 +40,7 @@
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortReadTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortReadTest.java
index c1b2175..97aebaf 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortReadTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortReadTest.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.model.pipeline.v1.Endpoints.OAuth2ClientCredentialsGrant;
 import org.apache.beam.model.pipeline.v1.RunnerApi.PTransform;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWriteTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWriteTest.java
index c4be16b..1775728 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWriteTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/data/RemoteGrpcPortWriteTest.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.model.pipeline.v1.Endpoints.OAuth2ClientCredentialsGrant;
 import org.apache.beam.model.pipeline.v1.RunnerApi.PTransform;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/DataStreamsTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/DataStreamsTest.java
index 3e66d50..8dd5819 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/DataStreamsTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/DataStreamsTest.java
@@ -41,7 +41,7 @@
 import org.apache.beam.sdk.fn.stream.DataStreams.DataStreamDecoder;
 import org.apache.beam.sdk.fn.stream.DataStreams.ElementDelimitedOutputStream;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.ByteStreams;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.io.CountingOutputStream;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserverTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserverTest.java
index 97fc2da..5e0e4b5 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserverTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/ForwardingClientResponseObserverTest.java
@@ -21,9 +21,9 @@
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientResponseObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientResponseObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactoryTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactoryTest.java
index 60cd8b0..de56d01 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactoryTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/stream/OutboundObserverFactoryTest.java
@@ -22,8 +22,8 @@
 import static org.junit.Assert.assertThat;
 
 import java.util.concurrent.Executors;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindowTest.java b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindowTest.java
index 18d6896..ecf5bc1 100644
--- a/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindowTest.java
+++ b/sdks/java/fn-execution/src/test/java/org/apache/beam/sdk/fn/windowing/EncodedBoundedWindowTest.java
@@ -19,7 +19,7 @@
 
 import org.apache.beam.sdk.fn.windowing.EncodedBoundedWindow.Coder;
 import org.apache.beam.sdk.testing.CoderProperties;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/sdks/java/harness/build.gradle b/sdks/java/harness/build.gradle
index 42378ea..bfc9112 100644
--- a/sdks/java/harness/build.gradle
+++ b/sdks/java/harness/build.gradle
@@ -62,7 +62,7 @@
   shadowTest library.java.powermock_mockito
   compile library.java.joda_time
   compile library.java.slf4j_api
-  compile library.java.vendored_grpc_1_21_0
+  compile library.java.vendored_grpc_1_26_0
   provided library.java.error_prone_annotations
   testCompile library.java.hamcrest_core
   testCompile library.java.hamcrest_library
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BeamFnDataReadRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BeamFnDataReadRunner.java
index e93b2ba..65f11d9 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BeamFnDataReadRunner.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BeamFnDataReadRunner.java
@@ -17,6 +17,7 @@
  */
 package org.apache.beam.fn.harness;
 
+import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
 import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables.getOnlyElement;
 
 import com.google.auto.service.AutoService;
@@ -24,12 +25,16 @@
 import java.util.Map;
 import java.util.function.Consumer;
 import java.util.function.Supplier;
+import org.apache.beam.fn.harness.HandlesSplits.SplitResult;
 import org.apache.beam.fn.harness.control.BundleSplitListener;
 import org.apache.beam.fn.harness.data.BeamFnDataClient;
 import org.apache.beam.fn.harness.data.PCollectionConsumerRegistry;
 import org.apache.beam.fn.harness.data.PTransformFunctionRegistry;
 import org.apache.beam.fn.harness.state.BeamFnStateClient;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest.DesiredSplit;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.RemoteGrpcPort;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
@@ -47,6 +52,7 @@
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.primitives.Ints;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -131,6 +137,11 @@
   private final BeamFnDataClient beamFnDataClient;
   private final Coder<WindowedValue<OutputT>> coder;
 
+  private final Object splittingLock = new Object();
+  // 0-based index of the current element being processed
+  private long index = -1;
+  // 0-based index of the first element to not process, aka the first element of the residual
+  private long stopIndex = Long.MAX_VALUE;
   private InboundDataClient readFuture;
 
   BeamFnDataReadRunner(
@@ -170,7 +181,109 @@
             apiServiceDescriptor,
             LogicalEndpoint.of(processBundleInstructionIdSupplier.get(), pTransformId),
             coder,
-            consumer);
+            this::forwardElementToConsumer);
+  }
+
+  public void forwardElementToConsumer(WindowedValue<OutputT> element) throws Exception {
+    synchronized (splittingLock) {
+      if (index == stopIndex - 1) {
+        return;
+      }
+      index += 1;
+    }
+    consumer.accept(element);
+  }
+
+  public void split(
+      ProcessBundleSplitRequest request, ProcessBundleSplitResponse.Builder response) {
+    DesiredSplit desiredSplit = request.getDesiredSplitsMap().get(pTransformId);
+    if (desiredSplit == null) {
+      return;
+    }
+
+    long totalBufferSize = desiredSplit.getEstimatedInputElements();
+
+    HandlesSplits splittingConsumer = null;
+    if (consumer instanceof HandlesSplits) {
+      splittingConsumer = ((HandlesSplits) consumer);
+    }
+
+    synchronized (splittingLock) {
+      // Since we hold the splittingLock, we guarantee that we will not pass the next element
+      // to the downstream consumer. We still have a race where the downstream consumer may
+      // have yet to see the element or has completed processing the element by the time
+      // we ask it to split (even after we have asked for its progress).
+
+      // If the split request we received was delayed and is less then the known number of elements
+      // then use "index + 1" as the total size. Similarly, if we have already split and the
+      // split request is bounded incorrectly, use the stop index as the upper bound.
+      if (totalBufferSize < index + 1) {
+        totalBufferSize = index + 1;
+      } else if (totalBufferSize > stopIndex) {
+        totalBufferSize = stopIndex;
+      }
+
+      // In the case where we have yet to process an element, set the current element progress to 1.
+      double currentElementProgress = 1;
+
+      // If we have started processing at least one element, attempt to get the downstream
+      // progress defaulting to 0.5 if no progress was able to get fetched.
+      if (index >= 0) {
+        if (splittingConsumer != null) {
+          currentElementProgress = splittingConsumer.getProgress();
+        } else {
+          currentElementProgress = 0.5;
+        }
+      }
+
+      checkArgument(
+          desiredSplit.getAllowedSplitPointsList().isEmpty(),
+          "TODO: BEAM-3836, support split point restrictions.");
+
+      // Now figure out where to split.
+      //
+      // The units here (except for keepOfElementRemainder) are all in terms of number or
+      // (possibly fractional) elements.
+
+      // Compute the amount of "remaining" work that we know of.
+      double remainder = totalBufferSize - index - currentElementProgress;
+      // Compute the number of elements (including fractional elements) that we should "keep".
+      double keep = remainder * desiredSplit.getFractionOfRemainder();
+
+      // If the downstream operator says the progress is less than 1 then the element could be
+      // splittable.
+      if (currentElementProgress < 1) {
+        // See if the amount we need to keep falls within the current element's remainder and if
+        // so, attempt to split it.
+        double keepOfElementRemainder = keep / (1 - currentElementProgress);
+        if (keepOfElementRemainder < 1) {
+          SplitResult splitResult =
+              splittingConsumer != null ? splittingConsumer.trySplit(keepOfElementRemainder) : null;
+          if (splitResult != null) {
+            stopIndex = index + 1;
+            response
+                .addPrimaryRoots(splitResult.getPrimaryRoot())
+                .addResidualRoots(splitResult.getResidualRoot())
+                .addChannelSplitsBuilder()
+                .setLastPrimaryElement(index - 1)
+                .setFirstResidualElement(stopIndex);
+            return;
+          }
+        }
+      }
+
+      // Otherwise, split at the closest element boundary.
+      int newStopIndex =
+          Ints.checkedCast(index + Math.max(1, Math.round(currentElementProgress + keep)));
+      if (newStopIndex < stopIndex) {
+        stopIndex = newStopIndex;
+        response
+            .addChannelSplitsBuilder()
+            .setLastPrimaryElement(stopIndex - 1)
+            .setFirstResidualElement(stopIndex);
+        return;
+      }
+    }
   }
 
   public void blockTillReadFinishes() throws Exception {
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BoundedSourceRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BoundedSourceRunner.java
index a632aa2..a06c002 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BoundedSourceRunner.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/BoundedSourceRunner.java
@@ -43,7 +43,7 @@
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.InvalidProtocolBufferException;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
index deba809..0953d28 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnApiDoFnRunner.java
@@ -86,8 +86,8 @@
 import org.apache.beam.sdk.values.Row;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.WindowingStrategy;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.util.Durations;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.util.Durations;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableListMultimap;
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
index 6ec1673..579d447 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/FnHarness.java
@@ -44,7 +44,7 @@
 import org.apache.beam.sdk.io.FileSystems;
 import org.apache.beam.sdk.options.ExperimentalOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.slf4j.Logger;
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java
new file mode 100644
index 0000000..a2ac123
--- /dev/null
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/HandlesSplits.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.fn.harness;
+
+import com.google.auto.value.AutoValue;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi;
+
+public interface HandlesSplits {
+  SplitResult trySplit(double fractionOfRemainder);
+
+  double getProgress();
+
+  @AutoValue
+  abstract class SplitResult {
+    public static SplitResult of(
+        BeamFnApi.BundleApplication primaryRoot, BeamFnApi.DelayedBundleApplication residualRoot) {
+      return new AutoValue_HandlesSplits_SplitResult(primaryRoot, residualRoot);
+    }
+
+    public abstract BeamFnApi.BundleApplication getPrimaryRoot();
+
+    public abstract BeamFnApi.DelayedBundleApplication getResidualRoot();
+  }
+}
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/AddHarnessIdInterceptor.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/AddHarnessIdInterceptor.java
index 7fec44e..f949716 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/AddHarnessIdInterceptor.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/AddHarnessIdInterceptor.java
@@ -19,10 +19,10 @@
 
 import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
 
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ClientInterceptor;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Metadata.Key;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.MetadataUtils;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ClientInterceptor;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Metadata.Key;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.MetadataUtils;
 
 /** A {@link ClientInterceptor} that attaches a provided SDK Harness ID to outgoing messages. */
 public class AddHarnessIdInterceptor {
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/BeamFnControlClient.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/BeamFnControlClient.java
index a6a0211..bfaddda 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/BeamFnControlClient.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/BeamFnControlClient.java
@@ -32,8 +32,8 @@
 import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.function.ThrowingFunction;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java
index a258e01..00882b5 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/ProcessBundleHandler.java
@@ -64,8 +64,8 @@
 import org.apache.beam.sdk.function.ThrowingRunnable;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.util.common.ReflectHelpers;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Message;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Message;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ArrayListMultimap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.HashMultimap;
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/RegisterHandler.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/RegisterHandler.java
index 6a02c7d..bfa0980 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/RegisterHandler.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/control/RegisterHandler.java
@@ -25,7 +25,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.RegisterResponse;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Message;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Message;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClient.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClient.java
index 61c4580..e21fee9 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClient.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClient.java
@@ -33,7 +33,7 @@
 import org.apache.beam.sdk.fn.data.LogicalEndpoint;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiver.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiver.java
deleted file mode 100644
index f234844..0000000
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiver.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.fn.harness.data;
-
-import java.io.Closeable;
-import java.util.HashMap;
-import org.apache.beam.runners.core.metrics.LabeledMetrics;
-import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
-import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
-import org.apache.beam.runners.core.metrics.MonitoringInfoConstants.Labels;
-import org.apache.beam.runners.core.metrics.MonitoringInfoMetricName;
-import org.apache.beam.sdk.fn.data.FnDataReceiver;
-import org.apache.beam.sdk.metrics.Counter;
-import org.apache.beam.sdk.metrics.MetricsContainer;
-import org.apache.beam.sdk.metrics.MetricsEnvironment;
-import org.apache.beam.sdk.util.WindowedValue;
-
-/**
- * A wrapping {@code FnDataReceiver<WindowedValue<T>>} which counts the number of elements consumed
- * by the original {@code FnDataReceiver<WindowedValue<T>>}.
- *
- * @param <T> - The receiving type of the PTransform.
- */
-public class ElementCountFnDataReceiver<T> implements FnDataReceiver<WindowedValue<T>> {
-
-  private FnDataReceiver<WindowedValue<T>> original;
-  private Counter counter;
-  private MetricsContainer unboundMetricContainer;
-
-  public ElementCountFnDataReceiver(
-      FnDataReceiver<WindowedValue<T>> original,
-      String pCollection,
-      MetricsContainerStepMap metricContainerRegistry) {
-    this.original = original;
-    HashMap<String, String> labels = new HashMap<String, String>();
-    labels.put(Labels.PCOLLECTION, pCollection);
-    MonitoringInfoMetricName metricName =
-        MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.ELEMENT_COUNT, labels);
-    this.counter = LabeledMetrics.counter(metricName);
-    // Collect the metric in a metric container which is not bound to the step name.
-    // This is required to count elements from impulse steps, which will produce elements outside
-    // of a pTransform context.
-    this.unboundMetricContainer = metricContainerRegistry.getUnboundContainer();
-  }
-
-  @Override
-  public void accept(WindowedValue<T> input) throws Exception {
-    try (Closeable close = MetricsEnvironment.scopedMetricsContainer(this.unboundMetricContainer)) {
-      // Increment the counter for each window the element occurs in.
-      this.counter.inc(input.getWindows().size());
-      this.original.accept(input);
-    }
-  }
-}
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiver.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiver.java
deleted file mode 100644
index 65f75b0..0000000
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiver.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.fn.harness.data;
-
-import java.util.Collection;
-import org.apache.beam.sdk.fn.data.FnDataReceiver;
-import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
-
-/**
- * A {@link FnDataReceiver} which forwards all received inputs to a collection of {@link
- * FnDataReceiver receivers}.
- */
-public class MultiplexingFnDataReceiver<T> implements FnDataReceiver<T> {
-  public static <T> FnDataReceiver<T> forConsumers(Collection<FnDataReceiver<T>> consumers) {
-    if (consumers.size() == 1) {
-      return Iterables.getOnlyElement(consumers);
-    }
-    return new MultiplexingFnDataReceiver<>(consumers);
-  }
-
-  private final Collection<FnDataReceiver<T>> consumers;
-
-  private MultiplexingFnDataReceiver(Collection<FnDataReceiver<T>> consumers) {
-    this.consumers = consumers;
-  }
-
-  @Override
-  public void accept(T input) throws Exception {
-    for (FnDataReceiver<T> consumer : consumers) {
-      consumer.accept(input);
-    }
-  }
-}
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
index 80d270f..3276c46 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistry.java
@@ -17,23 +17,32 @@
  */
 package org.apache.beam.fn.harness.data;
 
+import com.google.auto.value.AutoValue;
 import java.io.Closeable;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import org.apache.beam.fn.harness.HandlesSplits;
 import org.apache.beam.model.pipeline.v1.MetricsApi.MonitoringInfo;
 import org.apache.beam.runners.core.metrics.ExecutionStateTracker;
+import org.apache.beam.runners.core.metrics.LabeledMetrics;
 import org.apache.beam.runners.core.metrics.MetricsContainerImpl;
 import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
 import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
+import org.apache.beam.runners.core.metrics.MonitoringInfoConstants.Labels;
+import org.apache.beam.runners.core.metrics.MonitoringInfoMetricName;
 import org.apache.beam.runners.core.metrics.SimpleExecutionState;
 import org.apache.beam.runners.core.metrics.SimpleStateRegistry;
 import org.apache.beam.sdk.fn.data.FnDataReceiver;
+import org.apache.beam.sdk.metrics.Counter;
+import org.apache.beam.sdk.metrics.MetricsContainer;
 import org.apache.beam.sdk.metrics.MetricsEnvironment;
 import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ArrayListMultimap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ListMultimap;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
 
 /**
  * The {@code PCollectionConsumerRegistry} is used to maintain a collection of consuming
@@ -43,8 +52,24 @@
  */
 public class PCollectionConsumerRegistry {
 
-  private ListMultimap<String, FnDataReceiver<WindowedValue<?>>> pCollectionIdsToConsumers;
-  private Map<String, ElementCountFnDataReceiver> pCollectionIdsToWrappedConsumer;
+  /** Stores metadata about each consumer so that the appropriate metrics tracking can occur. */
+  @AutoValue
+  abstract static class ConsumerAndMetadata {
+    public static ConsumerAndMetadata forConsumer(
+        FnDataReceiver consumer, String pTransformId, SimpleExecutionState state) {
+      return new AutoValue_PCollectionConsumerRegistry_ConsumerAndMetadata(
+          consumer, pTransformId, state);
+    }
+
+    public abstract FnDataReceiver getConsumer();
+
+    public abstract String getPTransformId();
+
+    public abstract SimpleExecutionState getExecutionState();
+  }
+
+  private ListMultimap<String, ConsumerAndMetadata> pCollectionIdsToConsumers;
+  private Map<String, FnDataReceiver> pCollectionIdsToWrappedConsumer;
   private MetricsContainerStepMap metricsContainerRegistry;
   private ExecutionStateTracker stateTracker;
   private SimpleStateRegistry executionStates = new SimpleStateRegistry();
@@ -54,7 +79,7 @@
     this.metricsContainerRegistry = metricsContainerRegistry;
     this.stateTracker = stateTracker;
     this.pCollectionIdsToConsumers = ArrayListMultimap.create();
-    this.pCollectionIdsToWrappedConsumer = new HashMap<String, ElementCountFnDataReceiver>();
+    this.pCollectionIdsToWrappedConsumer = new HashMap<>();
   }
 
   /**
@@ -77,15 +102,13 @@
     // Just save these consumers for now, but package them up later with an
     // ElementCountFnDataReceiver and possibly a MultiplexingFnDataReceiver
     // if there are multiple consumers.
-    ElementCountFnDataReceiver wrappedConsumer =
-        pCollectionIdsToWrappedConsumer.getOrDefault(pCollectionId, null);
-    if (wrappedConsumer != null) {
+    if (pCollectionIdsToWrappedConsumer.containsKey(pCollectionId)) {
       throw new RuntimeException(
           "New consumers for a pCollectionId cannot be register()-d after "
               + "calling getMultiplexingConsumer.");
     }
 
-    HashMap<String, String> labelsMetadata = new HashMap<String, String>();
+    HashMap<String, String> labelsMetadata = new HashMap<>();
     labelsMetadata.put(MonitoringInfoConstants.Labels.PTRANSFORM, pTransformId);
     SimpleExecutionState state =
         new SimpleExecutionState(
@@ -93,20 +116,9 @@
             MonitoringInfoConstants.Urns.PROCESS_BUNDLE_MSECS,
             labelsMetadata);
     executionStates.register(state);
-    // Wrap the consumer with extra logic to set the metric container with the appropriate
-    // PTransform context. This ensures that user metrics obtain the pTransform ID when they are
-    // created. Also use the ExecutionStateTracker and enter an appropriate state to track the
-    // Process Bundle Execution time metric.
-    FnDataReceiver<WindowedValue<T>> wrapAndEnableMetricContainer =
-        (WindowedValue<T> input) -> {
-          MetricsContainerImpl container = metricsContainerRegistry.getContainer(pTransformId);
-          try (Closeable closeable = MetricsEnvironment.scopedMetricsContainer(container)) {
-            try (Closeable trackerCloseable = this.stateTracker.enterState(state)) {
-              consumer.accept(input);
-            }
-          }
-        };
-    pCollectionIdsToConsumers.put(pCollectionId, (FnDataReceiver) wrapAndEnableMetricContainer);
+
+    pCollectionIdsToConsumers.put(
+        pCollectionId, ConsumerAndMetadata.forConsumer(consumer, pTransformId, state));
   }
 
   /** Reset the execution states of the registered functions. */
@@ -121,24 +133,28 @@
 
   /**
    * New consumers should not be register()-ed after calling this method. This will cause a
-   * RuntimeException, as this would fail to properly wrap the late-added consumer to the
-   * ElementCountFnDataReceiver.
+   * RuntimeException, as this would fail to properly wrap the late-added consumer to track metrics.
    *
-   * @return A single ElementCountFnDataReceiver which directly wraps all the registered consumers.
+   * @return A {@link FnDataReceiver} which directly wraps all the registered consumers.
    */
   public FnDataReceiver<WindowedValue<?>> getMultiplexingConsumer(String pCollectionId) {
-    ElementCountFnDataReceiver wrappedConsumer =
-        pCollectionIdsToWrappedConsumer.getOrDefault(pCollectionId, null);
-    if (wrappedConsumer == null) {
-      List<FnDataReceiver<WindowedValue<?>>> consumers =
-          pCollectionIdsToConsumers.get(pCollectionId);
-      FnDataReceiver<WindowedValue<?>> consumer =
-          MultiplexingFnDataReceiver.forConsumers(consumers);
-      wrappedConsumer =
-          new ElementCountFnDataReceiver(consumer, pCollectionId, metricsContainerRegistry);
-      pCollectionIdsToWrappedConsumer.put(pCollectionId, wrappedConsumer);
-    }
-    return wrappedConsumer;
+    return pCollectionIdsToWrappedConsumer.computeIfAbsent(
+        pCollectionId,
+        pcId -> {
+          List<ConsumerAndMetadata> consumerAndMetadatas = pCollectionIdsToConsumers.get(pcId);
+          if (consumerAndMetadatas == null) {
+            throw new IllegalArgumentException(
+                String.format("Unknown PCollectionId %s", pCollectionId));
+          } else if (consumerAndMetadatas.size() == 1) {
+            if (consumerAndMetadatas.get(0).getConsumer() instanceof HandlesSplits) {
+              return new SplittingMetricTrackingFnDataReceiver(pcId, consumerAndMetadatas.get(0));
+            }
+            return new MetricTrackingFnDataReceiver(pcId, consumerAndMetadatas.get(0));
+          } else {
+            /* TODO(SDF), Consider supporting splitting each consumer individually. This would never come up in the existing SDF expansion, but might be useful to support fused SDF nodes. This would require dedicated delivery of the split results to each of the consumers separately. */
+            return new MultiplexingMetricTrackingFnDataReceiver(pcId, consumerAndMetadatas);
+          }
+        });
   }
 
   /** @return Execution Time MonitoringInfos based on the tracked start or finish function. */
@@ -146,11 +162,141 @@
     return executionStates.getExecutionTimeMonitoringInfos();
   }
 
+  /** @return the underlying consumers for a pCollectionId, some tests may wish to check this. */
+  @VisibleForTesting
+  public List<FnDataReceiver> getUnderlyingConsumers(String pCollectionId) {
+    return Lists.transform(
+        pCollectionIdsToConsumers.get(pCollectionId), input -> input.getConsumer());
+  }
+
   /**
-   * @return the number of underlying consumers for a pCollectionId, some tests may wish to check
-   *     this.
+   * A wrapping {@code FnDataReceiver<WindowedValue<T>>} which counts the number of elements
+   * consumed by the original {@code FnDataReceiver<WindowedValue<T>> consumer} and sets up metrics
+   * for tracking PTransform processing time.
+   *
+   * @param <T> - The receiving type of the PTransform.
    */
-  public List<FnDataReceiver<WindowedValue<?>>> getUnderlyingConsumers(String pCollectionId) {
-    return pCollectionIdsToConsumers.get(pCollectionId);
+  private class MetricTrackingFnDataReceiver<T> implements FnDataReceiver<WindowedValue<T>> {
+    private final FnDataReceiver<WindowedValue<T>> delegate;
+    private final String pTransformId;
+    private final SimpleExecutionState state;
+    private final Counter counter;
+    private final MetricsContainer unboundMetricContainer;
+
+    public MetricTrackingFnDataReceiver(
+        String pCollectionId, ConsumerAndMetadata consumerAndMetadata) {
+      this.delegate = consumerAndMetadata.getConsumer();
+      this.state = consumerAndMetadata.getExecutionState();
+      this.pTransformId = consumerAndMetadata.getPTransformId();
+      HashMap<String, String> labels = new HashMap<String, String>();
+      labels.put(Labels.PCOLLECTION, pCollectionId);
+      MonitoringInfoMetricName metricName =
+          MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.ELEMENT_COUNT, labels);
+      this.counter = LabeledMetrics.counter(metricName);
+      // Collect the metric in a metric container which is not bound to the step name.
+      // This is required to count elements from impulse steps, which will produce elements outside
+      // of a pTransform context.
+      this.unboundMetricContainer = metricsContainerRegistry.getUnboundContainer();
+    }
+
+    @Override
+    public void accept(WindowedValue<T> input) throws Exception {
+      try (Closeable close =
+          MetricsEnvironment.scopedMetricsContainer(this.unboundMetricContainer)) {
+        // Increment the counter for each window the element occurs in.
+        this.counter.inc(input.getWindows().size());
+
+        // Wrap the consumer with extra logic to set the metric container with the appropriate
+        // PTransform context. This ensures that user metrics obtain the pTransform ID when they are
+        // created. Also use the ExecutionStateTracker and enter an appropriate state to track the
+        // Process Bundle Execution time metric.
+        MetricsContainerImpl container = metricsContainerRegistry.getContainer(pTransformId);
+        try (Closeable closeable = MetricsEnvironment.scopedMetricsContainer(container)) {
+          try (Closeable trackerCloseable = stateTracker.enterState(state)) {
+            this.delegate.accept(input);
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * A wrapping {@code FnDataReceiver<WindowedValue<T>>} which counts the number of elements
+   * consumed by the original {@code FnDataReceiver<WindowedValue<T>> consumers} and sets up metrics
+   * for tracking PTransform processing time.
+   *
+   * @param <T> - The receiving type of the PTransform.
+   */
+  private class MultiplexingMetricTrackingFnDataReceiver<T>
+      implements FnDataReceiver<WindowedValue<T>> {
+    private final List<ConsumerAndMetadata> consumerAndMetadatas;
+    private final Counter counter;
+    private final MetricsContainer unboundMetricContainer;
+
+    public MultiplexingMetricTrackingFnDataReceiver(
+        String pCollectionId, List<ConsumerAndMetadata> consumerAndMetadatas) {
+      this.consumerAndMetadatas = consumerAndMetadatas;
+      HashMap<String, String> labels = new HashMap<String, String>();
+      labels.put(Labels.PCOLLECTION, pCollectionId);
+      MonitoringInfoMetricName metricName =
+          MonitoringInfoMetricName.named(MonitoringInfoConstants.Urns.ELEMENT_COUNT, labels);
+      this.counter = LabeledMetrics.counter(metricName);
+      // Collect the metric in a metric container which is not bound to the step name.
+      // This is required to count elements from impulse steps, which will produce elements outside
+      // of a pTransform context.
+      this.unboundMetricContainer = metricsContainerRegistry.getUnboundContainer();
+    }
+
+    @Override
+    public void accept(WindowedValue<T> input) throws Exception {
+      try (Closeable close =
+          MetricsEnvironment.scopedMetricsContainer(this.unboundMetricContainer)) {
+        // Increment the counter for each window the element occurs in.
+        this.counter.inc(input.getWindows().size());
+
+        // Wrap the consumer with extra logic to set the metric container with the appropriate
+        // PTransform context. This ensures that user metrics obtain the pTransform ID when they are
+        // created. Also use the ExecutionStateTracker and enter an appropriate state to track the
+        // Process Bundle Execution time metric.
+        for (ConsumerAndMetadata consumerAndMetadata : consumerAndMetadatas) {
+          MetricsContainerImpl container =
+              metricsContainerRegistry.getContainer(consumerAndMetadata.getPTransformId());
+          try (Closeable closeable = MetricsEnvironment.scopedMetricsContainer(container)) {
+            try (Closeable trackerCloseable =
+                stateTracker.enterState(consumerAndMetadata.getExecutionState())) {
+              consumerAndMetadata.getConsumer().accept(input);
+            }
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * A wrapping {@code FnDataReceiver<WindowedValue<T>>} which counts the number of elements
+   * consumed by the original {@code FnDataReceiver<WindowedValue<T>> consumer} and forwards split
+   * and progress requests to the original consumer.
+   *
+   * @param <T> - The receiving type of the PTransform.
+   */
+  private class SplittingMetricTrackingFnDataReceiver<T> extends MetricTrackingFnDataReceiver<T>
+      implements HandlesSplits {
+    private final HandlesSplits delegate;
+
+    public SplittingMetricTrackingFnDataReceiver(
+        String pCollection, ConsumerAndMetadata consumerAndMetadata) {
+      super(pCollection, consumerAndMetadata);
+      this.delegate = (HandlesSplits) consumerAndMetadata.getConsumer();
+    }
+
+    @Override
+    public SplitResult trySplit(double fractionOfRemainder) {
+      return delegate.trySplit(fractionOfRemainder);
+    }
+
+    @Override
+    public double getProgress() {
+      return delegate.getProgress();
+    }
   }
 }
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClient.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClient.java
index 01cf0c7..8fae554 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClient.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClient.java
@@ -21,7 +21,6 @@
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.TimeUnit;
 import org.apache.beam.fn.harness.control.ProcessBundleHandler;
-import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.sdk.coders.Coder;
@@ -96,7 +95,7 @@
    *
    * <p>This method is NOT thread safe. This should only be invoked by a single thread, and is
    * intended for use with a newly constructed QueueingBeamFnDataClient in {@link
-   * ProcessBundleHandler#processBundle(InstructionRequest)}.
+   * ProcessBundleHandler#processBundle}.
    */
   public void drainAndBlock() throws Exception {
     while (true) {
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClient.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClient.java
index 1941a10..e17f5eb 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClient.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClient.java
@@ -46,12 +46,12 @@
 import org.apache.beam.sdk.extensions.gcp.options.GcsOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.SdkHarnessOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Timestamp;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientCallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ClientResponseObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Timestamp;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientCallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.ClientResponseObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java
index b3e6f64..38e2027 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BagUserState.java
@@ -27,7 +27,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.fn.stream.DataStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 
 /**
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
index f85b3c8..bb4a661 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCache.java
@@ -31,8 +31,8 @@
 import org.apache.beam.model.pipeline.v1.Endpoints.ApiServiceDescriptor;
 import org.apache.beam.sdk.fn.IdGenerator;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiStateAccessor.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiStateAccessor.java
index 26b0dfa..1d2dc1f 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiStateAccessor.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/FnApiStateAccessor.java
@@ -54,7 +54,7 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
 
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapSideInput.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapSideInput.java
index 996b87e..d795d44 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapSideInput.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/MultimapSideInput.java
@@ -22,7 +22,7 @@
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.fn.stream.DataStreams;
 import org.apache.beam.sdk.transforms.Materializations.MultimapView;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 
 /**
  * An implementation of a multimap side input that utilizes the Beam Fn State API to fetch values.
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java
index 1ebadb5..a7cb38d 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/state/StateFetchingIterators.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateGetRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Throwables;
 
 /**
diff --git a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactories.java b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactories.java
index 7f21991..8707c3b 100644
--- a/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactories.java
+++ b/sdks/java/harness/src/main/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactories.java
@@ -22,7 +22,7 @@
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.options.ExperimentalOptions;
 import org.apache.beam.sdk.options.PipelineOptions;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 
 /**
  * Uses {@link PipelineOptions} to configure which underlying {@link StreamObserver} implementation
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/AssignWindowsRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/AssignWindowsRunnerTest.java
index e6e5297..c27400f 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/AssignWindowsRunnerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/AssignWindowsRunnerTest.java
@@ -184,7 +184,7 @@
             null /* pipelineOptions */,
             null /* beamFnDataClient */,
             null /* beamFnStateClient */,
-            null /* pTransformId */,
+            "ptransform",
             PTransform.newBuilder()
                 .putInputs("in", "input")
                 .putOutputs("out", "output")
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java
index e8a814c..39e1f9e 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BeamFnDataReadRunnerTest.java
@@ -21,9 +21,11 @@
 import static org.hamcrest.Matchers.contains;
 import static org.hamcrest.Matchers.containsInAnyOrder;
 import static org.hamcrest.Matchers.empty;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.anyDouble;
 import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.verify;
@@ -39,12 +41,18 @@
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicReference;
+import org.apache.beam.fn.harness.HandlesSplits.SplitResult;
 import org.apache.beam.fn.harness.PTransformRunnerFactory.Registrar;
 import org.apache.beam.fn.harness.data.BeamFnDataClient;
-import org.apache.beam.fn.harness.data.MultiplexingFnDataReceiver;
 import org.apache.beam.fn.harness.data.PCollectionConsumerRegistry;
 import org.apache.beam.fn.harness.data.PTransformFunctionRegistry;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.BundleApplication;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.DelayedBundleApplication;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitRequest.DesiredSplit;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse;
+import org.apache.beam.model.fnexecution.v1.BeamFnApi.ProcessBundleSplitResponse.ChannelSplit;
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.model.pipeline.v1.RunnerApi;
 import org.apache.beam.model.pipeline.v1.RunnerApi.MessageWithComponents;
@@ -65,7 +73,6 @@
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Suppliers;
-import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
@@ -203,10 +210,8 @@
     when(mockBeamFnDataClient.receive(any(), any(), any(), any()))
         .thenReturn(bundle1Future)
         .thenReturn(bundle2Future);
-    List<WindowedValue<String>> valuesA = new ArrayList<>();
-    List<WindowedValue<String>> valuesB = new ArrayList<>();
-    FnDataReceiver<WindowedValue<String>> consumers =
-        MultiplexingFnDataReceiver.forConsumers(ImmutableList.of(valuesA::add, valuesB::add));
+    List<WindowedValue<String>> values = new ArrayList<>();
+    FnDataReceiver<WindowedValue<String>> consumers = values::add;
     AtomicReference<String> bundleId = new AtomicReference<>("0");
     BeamFnDataReadRunner<String> readRunner =
         new BeamFnDataReadRunner<>(
@@ -245,13 +250,11 @@
 
     readRunner.blockTillReadFinishes();
     future.get();
-    assertThat(valuesA, contains(valueInGlobalWindow("ABC"), valueInGlobalWindow("DEF")));
-    assertThat(valuesB, contains(valueInGlobalWindow("ABC"), valueInGlobalWindow("DEF")));
+    assertThat(values, contains(valueInGlobalWindow("ABC"), valueInGlobalWindow("DEF")));
 
     // Process for bundle id 1
     bundleId.set("1");
-    valuesA.clear();
-    valuesB.clear();
+    values.clear();
     readRunner.registerInputLocation();
 
     verify(mockBeamFnDataClient)
@@ -278,8 +281,7 @@
 
     readRunner.blockTillReadFinishes();
     future.get();
-    assertThat(valuesA, contains(valueInGlobalWindow("GHI"), valueInGlobalWindow("JKL")));
-    assertThat(valuesB, contains(valueInGlobalWindow("GHI"), valueInGlobalWindow("JKL")));
+    assertThat(values, contains(valueInGlobalWindow("GHI"), valueInGlobalWindow("JKL")));
 
     verifyNoMoreInteractions(mockBeamFnDataClient);
   }
@@ -296,4 +298,185 @@
     }
     fail("Expected registrar not found.");
   }
+
+  @Test
+  public void testSplittingWhenNoElementsProcessed() throws Exception {
+    List<WindowedValue<String>> outputValues = new ArrayList<>();
+    BeamFnDataReadRunner<String> readRunner = createReadRunner(outputValues::add);
+
+    ProcessBundleSplitRequest request =
+        ProcessBundleSplitRequest.newBuilder()
+            .putDesiredSplits(
+                "pTransformId",
+                DesiredSplit.newBuilder()
+                    .setEstimatedInputElements(10)
+                    .setFractionOfRemainder(0.5)
+                    .build())
+            .build();
+    ProcessBundleSplitResponse.Builder responseBuilder = ProcessBundleSplitResponse.newBuilder();
+    readRunner.split(request, responseBuilder);
+
+    ProcessBundleSplitResponse expected =
+        ProcessBundleSplitResponse.newBuilder()
+            .addChannelSplits(
+                ChannelSplit.newBuilder()
+                    .setLastPrimaryElement(4)
+                    .setFirstResidualElement(5)
+                    .build())
+            .build();
+    assertEquals(expected, responseBuilder.build());
+
+    // Ensure that we process the correct number of elements after splitting.
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("A"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("B"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("C"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("D"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("E"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("F"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("G"));
+    assertThat(
+        outputValues,
+        contains(
+            valueInGlobalWindow("A"),
+            valueInGlobalWindow("B"),
+            valueInGlobalWindow("C"),
+            valueInGlobalWindow("D"),
+            valueInGlobalWindow("E")));
+  }
+
+  @Test
+  public void testSplittingWhenSomeElementsProcessed() throws Exception {
+    List<WindowedValue<String>> outputValues = new ArrayList<>();
+    BeamFnDataReadRunner<String> readRunner = createReadRunner(outputValues::add);
+
+    ProcessBundleSplitRequest request =
+        ProcessBundleSplitRequest.newBuilder()
+            .putDesiredSplits(
+                "pTransformId",
+                DesiredSplit.newBuilder()
+                    .setEstimatedInputElements(10)
+                    .setFractionOfRemainder(0.5)
+                    .build())
+            .build();
+    ProcessBundleSplitResponse.Builder responseBuilder = ProcessBundleSplitResponse.newBuilder();
+
+    // Process 2 elements then split
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("A"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("B"));
+    readRunner.split(request, responseBuilder);
+
+    ProcessBundleSplitResponse expected =
+        ProcessBundleSplitResponse.newBuilder()
+            .addChannelSplits(
+                ChannelSplit.newBuilder()
+                    .setLastPrimaryElement(5)
+                    .setFirstResidualElement(6)
+                    .build())
+            .build();
+    assertEquals(expected, responseBuilder.build());
+
+    // Ensure that we process the correct number of elements after splitting.
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("C"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("D"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("E"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("F"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("G"));
+    assertThat(
+        outputValues,
+        contains(
+            valueInGlobalWindow("A"),
+            valueInGlobalWindow("B"),
+            valueInGlobalWindow("C"),
+            valueInGlobalWindow("D"),
+            valueInGlobalWindow("E"),
+            valueInGlobalWindow("F")));
+  }
+
+  @Test
+  public void testSplittingDownstreamReceiver() throws Exception {
+    SplitResult splitResult =
+        SplitResult.of(
+            BundleApplication.newBuilder().setInputId("primary").build(),
+            DelayedBundleApplication.newBuilder()
+                .setApplication(BundleApplication.newBuilder().setInputId("residual").build())
+                .build());
+    SplittingReceiver splittingReceiver = mock(SplittingReceiver.class);
+    when(splittingReceiver.getProgress()).thenReturn(0.3);
+    when(splittingReceiver.trySplit(anyDouble())).thenReturn(splitResult);
+    BeamFnDataReadRunner<String> readRunner = createReadRunner(splittingReceiver);
+
+    ProcessBundleSplitRequest request =
+        ProcessBundleSplitRequest.newBuilder()
+            .putDesiredSplits(
+                "pTransformId",
+                DesiredSplit.newBuilder()
+                    .setEstimatedInputElements(10)
+                    .setFractionOfRemainder(0.05)
+                    .build())
+            .build();
+    ProcessBundleSplitResponse.Builder responseBuilder = ProcessBundleSplitResponse.newBuilder();
+
+    // We will be "processing" the 'C' element, aka 2nd index
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("A"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("B"));
+    readRunner.forwardElementToConsumer(valueInGlobalWindow("C"));
+    readRunner.split(request, responseBuilder);
+
+    ProcessBundleSplitResponse expected =
+        ProcessBundleSplitResponse.newBuilder()
+            .addPrimaryRoots(splitResult.getPrimaryRoot())
+            .addResidualRoots(splitResult.getResidualRoot())
+            .addChannelSplits(
+                ChannelSplit.newBuilder()
+                    .setLastPrimaryElement(1)
+                    .setFirstResidualElement(3)
+                    .build())
+            .build();
+    assertEquals(expected, responseBuilder.build());
+  }
+
+  private abstract static class SplittingReceiver
+      implements FnDataReceiver<WindowedValue<String>>, HandlesSplits {}
+
+  private BeamFnDataReadRunner<String> createReadRunner(
+      FnDataReceiver<WindowedValue<String>> consumer) throws Exception {
+    String bundleId = "57";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    String localOutputId = "outputPC";
+    String pTransformId = "pTransformId";
+    consumers.register(localOutputId, pTransformId, consumer);
+    PTransformFunctionRegistry startFunctionRegistry =
+        new PTransformFunctionRegistry(
+            mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "start");
+    PTransformFunctionRegistry finishFunctionRegistry =
+        new PTransformFunctionRegistry(
+            mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "finish");
+    List<ThrowingRunnable> teardownFunctions = new ArrayList<>();
+
+    RunnerApi.PTransform pTransform =
+        RemoteGrpcPortRead.readFromPort(PORT_SPEC, localOutputId).toPTransform();
+
+    return new BeamFnDataReadRunner.Factory<String>()
+        .createRunnerForPTransform(
+            PipelineOptionsFactory.create(),
+            mockBeamFnDataClient,
+            null /* beamFnStateClient */,
+            pTransformId,
+            pTransform,
+            Suppliers.ofInstance(bundleId)::get,
+            ImmutableMap.of(
+                localOutputId,
+                RunnerApi.PCollection.newBuilder().setCoderId(ELEMENT_CODER_SPEC_ID).build()),
+            COMPONENTS.getCodersMap(),
+            COMPONENTS.getWindowingStrategiesMap(),
+            consumers,
+            startFunctionRegistry,
+            finishFunctionRegistry,
+            teardownFunctions::add,
+            null /* splitListener */);
+  }
 }
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BoundedSourceRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BoundedSourceRunnerTest.java
index bc31e9c..0f461d2 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BoundedSourceRunnerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/BoundedSourceRunnerTest.java
@@ -44,7 +44,7 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Suppliers;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
index 0f4b375..8bdce50 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnApiDoFnRunnerTest.java
@@ -100,7 +100,7 @@
 import org.apache.beam.sdk.values.PCollectionView;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TupleTagList;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Suppliers;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnHarnessTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnHarnessTest.java
index eaf0b07..90d6848 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnHarnessTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/FnHarnessTest.java
@@ -42,10 +42,10 @@
 import org.apache.beam.sdk.harness.JvmInitializer;
 import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.TextFormat;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.TextFormat;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
index 36a4779..5adb001 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/BeamFnControlClientTest.java
@@ -43,10 +43,10 @@
 import org.apache.beam.sdk.fn.test.InProcessManagedChannelFactory;
 import org.apache.beam.sdk.fn.test.TestStreams;
 import org.apache.beam.sdk.function.ThrowingFunction;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
index a7a233d..a3e959b 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/control/ProcessBundleHandlerTest.java
@@ -77,8 +77,8 @@
 import org.apache.beam.sdk.util.SerializableUtils;
 import org.apache.beam.sdk.util.WindowedValue;
 import org.apache.beam.sdk.values.TupleTag;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Message;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Message;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Multimap;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClientTest.java
index deb6218..7e5dda9 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataGrpcClientTest.java
@@ -46,13 +46,13 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataInboundObserverTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataInboundObserverTest.java
index aa45df6..f51f006 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataInboundObserverTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/BeamFnDataInboundObserverTest.java
@@ -36,7 +36,7 @@
 import org.apache.beam.sdk.fn.data.InboundDataClient;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiverTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiverTest.java
deleted file mode 100644
index ace16c6..0000000
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/ElementCountFnDataReceiverTest.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.fn.harness.data;
-
-import static junit.framework.TestCase.assertEquals;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.withSettings;
-import static org.powermock.api.mockito.PowerMockito.mockStatic;
-
-import org.apache.beam.model.pipeline.v1.MetricsApi.MonitoringInfo;
-import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
-import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
-import org.apache.beam.runners.core.metrics.SimpleMonitoringInfoBuilder;
-import org.apache.beam.sdk.fn.data.FnDataReceiver;
-import org.apache.beam.sdk.metrics.MetricsEnvironment;
-import org.apache.beam.sdk.util.WindowedValue;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.powermock.api.mockito.PowerMockito;
-import org.powermock.core.classloader.annotations.PrepareForTest;
-import org.powermock.modules.junit4.PowerMockRunner;
-
-/** Tests for {@link ElementCountFnDataReceiver}. */
-@RunWith(PowerMockRunner.class)
-@PrepareForTest(MetricsEnvironment.class)
-public class ElementCountFnDataReceiverTest {
-  /**
-   * Test that the elements are counted, and a MonitoringInfo can be extracted from a
-   * metricsContainer, if it is in scope.
-   *
-   * @throws Exception
-   */
-  @Test
-  public void testCountsElements() throws Exception {
-    final String pCollectionA = "pCollectionA";
-
-    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
-
-    FnDataReceiver<WindowedValue<String>> consumer = mock(FnDataReceiver.class);
-    ElementCountFnDataReceiver<String> wrapperConsumer =
-        new ElementCountFnDataReceiver(consumer, pCollectionA, metricsContainerRegistry);
-    WindowedValue<String> element = WindowedValue.valueInGlobalWindow("elem");
-    int numElements = 20;
-    for (int i = 0; i < numElements; i++) {
-      wrapperConsumer.accept(element);
-    }
-    verify(consumer, times(numElements)).accept(element);
-
-    SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder();
-    builder.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT);
-    builder.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, pCollectionA);
-    builder.setInt64Value(numElements);
-    MonitoringInfo expected = builder.build();
-
-    // Clear the timestamp before comparison.
-    MonitoringInfo first = metricsContainerRegistry.getMonitoringInfos().iterator().next();
-    MonitoringInfo result = SimpleMonitoringInfoBuilder.copyAndClearTimestamp(first);
-    assertEquals(expected, result);
-  }
-
-  @Test
-  public void testScopedMetricContainerInvokedUponAccept() throws Exception {
-    mockStatic(MetricsEnvironment.class, withSettings().verboseLogging());
-    final String pCollectionA = "pCollectionA";
-
-    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
-
-    FnDataReceiver<WindowedValue<String>> consumer =
-        mock(FnDataReceiver.class, withSettings().verboseLogging());
-    ElementCountFnDataReceiver<String> wrapperConsumer =
-        new ElementCountFnDataReceiver(consumer, pCollectionA, metricsContainerRegistry);
-    WindowedValue<String> element = WindowedValue.valueInGlobalWindow("elem");
-    wrapperConsumer.accept(element);
-
-    verify(consumer, times(1)).accept(element);
-
-    // Verify that static scopedMetricsContainer is called with unbound container.
-    PowerMockito.verifyStatic(MetricsEnvironment.class, times(1));
-    MetricsEnvironment.scopedMetricsContainer(metricsContainerRegistry.getUnboundContainer());
-  }
-}
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiverTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiverTest.java
deleted file mode 100644
index f3e21c2..0000000
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/MultiplexingFnDataReceiverTest.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.beam.fn.harness.data;
-
-import static org.hamcrest.Matchers.contains;
-import static org.hamcrest.Matchers.containsInAnyOrder;
-import static org.junit.Assert.assertThat;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import org.apache.beam.sdk.fn.data.FnDataReceiver;
-import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.junit.runners.JUnit4;
-
-/** Tests for {@link MultiplexingFnDataReceiver}. */
-@RunWith(JUnit4.class)
-public class MultiplexingFnDataReceiverTest {
-  @Rule public ExpectedException thrown = ExpectedException.none();
-
-  @Test
-  public void singleConsumer() throws Exception {
-    List<String> consumer = new ArrayList<>();
-    FnDataReceiver<String> multiplexer =
-        MultiplexingFnDataReceiver.forConsumers(
-            ImmutableList.<FnDataReceiver<String>>of(consumer::add));
-
-    multiplexer.accept("foo");
-    multiplexer.accept("bar");
-
-    assertThat(consumer, contains("foo", "bar"));
-  }
-
-  @Test
-  public void singleConsumerException() throws Exception {
-    String message = "my_exception";
-    FnDataReceiver<Integer> multiplexer =
-        MultiplexingFnDataReceiver.forConsumers(
-            ImmutableList.<FnDataReceiver<Integer>>of(
-                (Integer i) -> {
-                  if (i > 1) {
-                    throw new Exception(message);
-                  }
-                }));
-
-    multiplexer.accept(0);
-    multiplexer.accept(1);
-    thrown.expectMessage(message);
-    thrown.expect(Exception.class);
-    multiplexer.accept(2);
-  }
-
-  @Test
-  public void multipleConsumers() throws Exception {
-    List<String> consumer = new ArrayList<>();
-    Set<String> otherConsumer = new HashSet<>();
-    FnDataReceiver<String> multiplexer =
-        MultiplexingFnDataReceiver.forConsumers(
-            ImmutableList.<FnDataReceiver<String>>of(consumer::add, otherConsumer::add));
-
-    multiplexer.accept("foo");
-    multiplexer.accept("bar");
-    multiplexer.accept("foo");
-
-    assertThat(consumer, contains("foo", "bar", "foo"));
-    assertThat(otherConsumer, containsInAnyOrder("foo", "bar"));
-  }
-
-  @Test
-  public void multipleConsumersException() throws Exception {
-    String message = "my_exception";
-    List<Integer> consumer = new ArrayList<>();
-    FnDataReceiver<Integer> multiplexer =
-        MultiplexingFnDataReceiver.forConsumers(
-            ImmutableList.<FnDataReceiver<Integer>>of(
-                consumer::add,
-                (Integer i) -> {
-                  if (i > 1) {
-                    throw new Exception(message);
-                  }
-                }));
-
-    multiplexer.accept(0);
-    multiplexer.accept(1);
-    assertThat(consumer, containsInAnyOrder(0, 1));
-
-    thrown.expectMessage(message);
-    thrown.expect(Exception.class);
-    multiplexer.accept(2);
-  }
-}
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistryTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistryTest.java
index e8b377b..dac1fbe 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistryTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/PCollectionConsumerRegistryTest.java
@@ -17,18 +17,30 @@
  */
 package org.apache.beam.fn.harness.data;
 
+import static org.apache.beam.sdk.util.WindowedValue.valueInGlobalWindow;
 import static org.hamcrest.Matchers.contains;
+import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.doThrow;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.withSettings;
 import static org.powermock.api.mockito.PowerMockito.mockStatic;
 
+import org.apache.beam.fn.harness.HandlesSplits;
+import org.apache.beam.model.pipeline.v1.MetricsApi.MonitoringInfo;
 import org.apache.beam.runners.core.metrics.ExecutionStateTracker;
 import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
+import org.apache.beam.runners.core.metrics.MonitoringInfoConstants;
+import org.apache.beam.runners.core.metrics.MonitoringInfoConstants.Labels;
+import org.apache.beam.runners.core.metrics.SimpleMonitoringInfoBuilder;
 import org.apache.beam.sdk.fn.data.FnDataReceiver;
 import org.apache.beam.sdk.metrics.MetricsEnvironment;
 import org.apache.beam.sdk.util.WindowedValue;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -44,6 +56,72 @@
 
   @Rule public ExpectedException expectedException = ExpectedException.none();
 
+  @Test
+  public void singleConsumer() throws Exception {
+    final String pCollectionA = "pCollectionA";
+    final String pTransformIdA = "pTransformIdA";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    FnDataReceiver<WindowedValue<String>> consumerA1 = mock(FnDataReceiver.class);
+
+    consumers.register(pCollectionA, pTransformIdA, consumerA1);
+
+    FnDataReceiver<WindowedValue<String>> wrapperConsumer =
+        (FnDataReceiver<WindowedValue<String>>)
+            (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+
+    WindowedValue<String> element = valueInGlobalWindow("elem");
+    int numElements = 20;
+    for (int i = 0; i < numElements; i++) {
+      wrapperConsumer.accept(element);
+    }
+
+    // Check that the underlying consumers are each invoked per element.
+    verify(consumerA1, times(numElements)).accept(element);
+    assertThat(consumers.keySet(), contains(pCollectionA));
+
+    SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder();
+    builder.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT);
+    builder.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, pCollectionA);
+    builder.setInt64Value(numElements);
+    MonitoringInfo expected = builder.build();
+
+    // Clear the timestamp before comparison.
+    MonitoringInfo pCollectionCount =
+        Iterables.find(
+            metricsContainerRegistry.getMonitoringInfos(),
+            monitoringInfo -> monitoringInfo.containsLabels(Labels.PCOLLECTION));
+    MonitoringInfo result = SimpleMonitoringInfoBuilder.copyAndClearTimestamp(pCollectionCount);
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void singleConsumerException() throws Exception {
+    final String pCollectionA = "pCollectionA";
+    final String pTransformId = "pTransformId";
+    final String message = "testException";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    FnDataReceiver<WindowedValue<String>> consumer = mock(FnDataReceiver.class);
+
+    consumers.register(pCollectionA, pTransformId, consumer);
+
+    FnDataReceiver<WindowedValue<String>> wrapperConsumer =
+        (FnDataReceiver<WindowedValue<String>>)
+            (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+    doThrow(new Exception(message)).when(consumer).accept(any());
+
+    expectedException.expectMessage(message);
+    expectedException.expect(Exception.class);
+    wrapperConsumer.accept(valueInGlobalWindow("elem"));
+  }
+
   /**
    * Test that the counter increments only once when multiple consumers of same pCollection read the
    * same element.
@@ -51,7 +129,54 @@
   @Test
   public void multipleConsumersSamePCollection() throws Exception {
     final String pCollectionA = "pCollectionA";
+    final String pTransformIdA = "pTransformIdA";
+    final String pTransformIdB = "pTransformIdB";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    FnDataReceiver<WindowedValue<String>> consumerA1 = mock(FnDataReceiver.class);
+    FnDataReceiver<WindowedValue<String>> consumerA2 = mock(FnDataReceiver.class);
+
+    consumers.register(pCollectionA, pTransformIdA, consumerA1);
+    consumers.register(pCollectionA, pTransformIdB, consumerA2);
+
+    FnDataReceiver<WindowedValue<String>> wrapperConsumer =
+        (FnDataReceiver<WindowedValue<String>>)
+            (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+
+    WindowedValue<String> element = valueInGlobalWindow("elem");
+    int numElements = 20;
+    for (int i = 0; i < numElements; i++) {
+      wrapperConsumer.accept(element);
+    }
+
+    // Check that the underlying consumers are each invoked per element.
+    verify(consumerA1, times(numElements)).accept(element);
+    verify(consumerA2, times(numElements)).accept(element);
+    assertThat(consumers.keySet(), contains(pCollectionA));
+
+    SimpleMonitoringInfoBuilder builder = new SimpleMonitoringInfoBuilder();
+    builder.setUrn(MonitoringInfoConstants.Urns.ELEMENT_COUNT);
+    builder.setLabel(MonitoringInfoConstants.Labels.PCOLLECTION, pCollectionA);
+    builder.setInt64Value(numElements);
+    MonitoringInfo expected = builder.build();
+
+    // Clear the timestamp before comparison.
+    MonitoringInfo pCollectionCount =
+        Iterables.find(
+            metricsContainerRegistry.getMonitoringInfos(),
+            monitoringInfo -> monitoringInfo.containsLabels(Labels.PCOLLECTION));
+    MonitoringInfo result = SimpleMonitoringInfoBuilder.copyAndClearTimestamp(pCollectionCount);
+    assertEquals(expected, result);
+  }
+
+  @Test
+  public void multipleConsumersSamePCollectionException() throws Exception {
+    final String pCollectionA = "pCollectionA";
     final String pTransformId = "pTransformId";
+    final String message = "testException";
 
     MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
     PCollectionConsumerRegistry consumers =
@@ -66,17 +191,11 @@
     FnDataReceiver<WindowedValue<String>> wrapperConsumer =
         (FnDataReceiver<WindowedValue<String>>)
             (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+    doThrow(new Exception(message)).when(consumerA2).accept(any());
 
-    WindowedValue<String> element = WindowedValue.valueInGlobalWindow("elem");
-    int numElements = 20;
-    for (int i = 0; i < numElements; i++) {
-      wrapperConsumer.accept(element);
-    }
-
-    // Check that the underlying consumers are each invoked per element.
-    verify(consumerA1, times(numElements)).accept(element);
-    verify(consumerA2, times(numElements)).accept(element);
-    assertThat(consumers.keySet(), contains(pCollectionA));
+    expectedException.expectMessage(message);
+    expectedException.expect(Exception.class);
+    wrapperConsumer.accept(valueInGlobalWindow("elem"));
   }
 
   @Test
@@ -118,7 +237,7 @@
         (FnDataReceiver<WindowedValue<String>>)
             (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
 
-    WindowedValue<String> element = WindowedValue.valueInGlobalWindow("elem");
+    WindowedValue<String> element = valueInGlobalWindow("elem");
     wrapperConsumer.accept(element);
 
     // Verify that static scopedMetricsContainer is called with pTransformA's container.
@@ -129,4 +248,61 @@
     PowerMockito.verifyStatic(MetricsEnvironment.class, times(1));
     MetricsEnvironment.scopedMetricsContainer(metricsContainerRegistry.getContainer("pTransformB"));
   }
+
+  @Test
+  public void testScopedMetricContainerInvokedUponAccept() throws Exception {
+    mockStatic(MetricsEnvironment.class, withSettings().verboseLogging());
+    final String pCollectionA = "pCollectionA";
+    final String pTransformIdA = "pTransformIdA";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    FnDataReceiver<WindowedValue<String>> consumer =
+        mock(FnDataReceiver.class, withSettings().verboseLogging());
+
+    consumers.register(pCollectionA, pTransformIdA, consumer);
+
+    FnDataReceiver<WindowedValue<String>> wrapperConsumer =
+        (FnDataReceiver<WindowedValue<String>>)
+            (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+
+    WindowedValue<String> element = WindowedValue.valueInGlobalWindow("elem");
+    wrapperConsumer.accept(element);
+
+    verify(consumer, times(1)).accept(element);
+
+    // Verify that static scopedMetricsContainer is called with unbound container.
+    PowerMockito.verifyStatic(MetricsEnvironment.class, times(1));
+    MetricsEnvironment.scopedMetricsContainer(metricsContainerRegistry.getUnboundContainer());
+  }
+
+  @Test
+  public void testHandlesSplitsPassedToOriginalConsumer() throws Exception {
+    final String pCollectionA = "pCollectionA";
+    final String pTransformIdA = "pTransformIdA";
+
+    MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
+    PCollectionConsumerRegistry consumers =
+        new PCollectionConsumerRegistry(
+            metricsContainerRegistry, mock(ExecutionStateTracker.class));
+    SplittingReceiver consumerA1 = mock(SplittingReceiver.class);
+
+    consumers.register(pCollectionA, pTransformIdA, consumerA1);
+
+    FnDataReceiver<WindowedValue<String>> wrapperConsumer =
+        (FnDataReceiver<WindowedValue<String>>)
+            (FnDataReceiver) consumers.getMultiplexingConsumer(pCollectionA);
+
+    assertTrue(wrapperConsumer instanceof HandlesSplits);
+
+    ((HandlesSplits) wrapperConsumer).getProgress();
+    verify(consumerA1).getProgress();
+
+    ((HandlesSplits) wrapperConsumer).trySplit(0.3);
+    verify(consumerA1).trySplit(0.3);
+  }
+
+  private abstract static class SplittingReceiver<T> implements FnDataReceiver<T>, HandlesSplits {}
 }
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClientTest.java
index 8bcacfa..094d9f7 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/data/QueueingBeamFnDataClientTest.java
@@ -47,13 +47,13 @@
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
 import org.apache.beam.sdk.util.WindowedValue;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClientTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClientTest.java
index e3a4266..dc49275 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClientTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/logging/BeamFnLoggingClientTest.java
@@ -37,14 +37,14 @@
 import org.apache.beam.model.pipeline.v1.Endpoints;
 import org.apache.beam.sdk.fn.test.TestStreams;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.Timestamp;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.Timestamp;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BagUserStateTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BagUserStateTest.java
index 5b01c0f..dbf9885 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BagUserStateTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BagUserStateTest.java
@@ -25,7 +25,7 @@
 import java.io.IOException;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.junit.Rule;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
index e1feac1..e8f1780 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/BeamFnStateGrpcClientCacheTest.java
@@ -36,14 +36,14 @@
 import org.apache.beam.sdk.fn.IdGenerators;
 import org.apache.beam.sdk.fn.stream.OutboundObserverFactory;
 import org.apache.beam.sdk.fn.test.TestStreams;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ManagedChannel;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessChannelBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.inprocess.InProcessServerBuilder;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ManagedChannel;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Status;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.StatusRuntimeException;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessChannelBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.inprocess.InProcessServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.util.concurrent.Uninterruptibles;
 import org.junit.After;
 import org.junit.Before;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/FakeBeamFnStateClient.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/FakeBeamFnStateClient.java
index 7762e66..e634652 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/FakeBeamFnStateClient.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/FakeBeamFnStateClient.java
@@ -32,7 +32,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest.RequestCase;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 
 /** A fake implementation of a {@link BeamFnStateClient} to aid with testing. */
 public class FakeBeamFnStateClient implements BeamFnStateClient {
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/MultimapSideInputTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/MultimapSideInputTest.java
index 9705267..635c111 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/MultimapSideInputTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/MultimapSideInputTest.java
@@ -22,7 +22,7 @@
 import java.io.IOException;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.junit.Test;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateFetchingIteratorsTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateFetchingIteratorsTest.java
index 630627d..d184ca2 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateFetchingIteratorsTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/state/StateFetchingIteratorsTest.java
@@ -24,7 +24,7 @@
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateGetResponse;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateRequest;
 import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateResponse;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterators;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactoriesTest.java b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactoriesTest.java
index d8f5872..76294ee 100644
--- a/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactoriesTest.java
+++ b/sdks/java/harness/src/test/java/org/apache/beam/fn/harness/stream/HarnessStreamObserverFactoriesTest.java
@@ -25,8 +25,8 @@
 import org.apache.beam.sdk.fn.stream.DirectStreamObserver;
 import org.apache.beam.sdk.fn.stream.ForwardingClientResponseObserver;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.CallStreamObserver;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.CallStreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.junit.Before;
 import org.junit.Test;
 import org.junit.runner.RunWith;
diff --git a/sdks/java/io/google-cloud-platform/build.gradle b/sdks/java/io/google-cloud-platform/build.gradle
index 0c1befd..559488f 100644
--- a/sdks/java/io/google-cloud-platform/build.gradle
+++ b/sdks/java/io/google-cloud-platform/build.gradle
@@ -33,6 +33,7 @@
   compile project(":sdks:java:extensions:protobuf")
   compile library.java.avro
   compile library.java.bigdataoss_util
+  compile library.java.gax
   compile library.java.gax_grpc
   compile library.java.google_api_client
   compile library.java.google_api_services_bigquery
@@ -50,6 +51,7 @@
   compile library.java.grpc_all
   compile library.java.grpc_auth
   compile library.java.grpc_core
+  compile library.java.grpc_context
   compile library.java.grpc_netty
   compile library.java.grpc_stub
   compile library.java.grpc_google_cloud_pubsub_v1
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
index f097e47..d477b08 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BatchLoads.java
@@ -136,6 +136,7 @@
   private final Coder<ElementT> elementCoder;
   private final RowWriterFactory<ElementT, DestinationT> rowWriterFactory;
   private String kmsKey;
+  private boolean clusteringEnabled;
 
   // The maximum number of times to retry failed load or copy jobs.
   private int maxRetryJobs = DEFAULT_MAX_RETRY_JOBS;
@@ -151,7 +152,8 @@
       boolean ignoreUnknownValues,
       Coder<ElementT> elementCoder,
       RowWriterFactory<ElementT, DestinationT> rowWriterFactory,
-      @Nullable String kmsKey) {
+      @Nullable String kmsKey,
+      boolean clusteringEnabled) {
     bigQueryServices = new BigQueryServicesImpl();
     this.writeDisposition = writeDisposition;
     this.createDisposition = createDisposition;
@@ -170,6 +172,7 @@
     this.elementCoder = elementCoder;
     this.kmsKey = kmsKey;
     this.rowWriterFactory = rowWriterFactory;
+    this.clusteringEnabled = clusteringEnabled;
     schemaUpdateOptions = Collections.emptySet();
   }
 
@@ -319,6 +322,9 @@
                     .withOutputTags(multiPartitionsTag, TupleTagList.of(singlePartitionTag)));
     PCollection<KV<TableDestination, String>> tempTables =
         writeTempTables(partitions.get(multiPartitionsTag), loadJobIdPrefixView);
+
+    Coder<TableDestination> tableDestinationCoder =
+        clusteringEnabled ? TableDestinationCoderV3.of() : TableDestinationCoderV2.of();
     tempTables
         // Now that the load job has happened, we want the rename to happen immediately.
         .apply(
@@ -326,8 +332,7 @@
                 .triggering(Repeatedly.forever(AfterPane.elementCountAtLeast(1))))
         .apply(WithKeys.of((Void) null))
         .setCoder(
-            KvCoder.of(
-                VoidCoder.of(), KvCoder.of(TableDestinationCoderV2.of(), StringUtf8Coder.of())))
+            KvCoder.of(VoidCoder.of(), KvCoder.of(tableDestinationCoder, StringUtf8Coder.of())))
         .apply(GroupByKey.create())
         .apply(Values.create())
         .apply(
@@ -391,9 +396,11 @@
     PCollection<KV<TableDestination, String>> tempTables =
         writeTempTables(partitions.get(multiPartitionsTag), loadJobIdPrefixView);
 
+    Coder<TableDestination> tableDestinationCoder =
+        clusteringEnabled ? TableDestinationCoderV3.of() : TableDestinationCoderV2.of();
     tempTables
         .apply("ReifyRenameInput", new ReifyAsIterable<>())
-        .setCoder(IterableCoder.of(KvCoder.of(TableDestinationCoderV2.of(), StringUtf8Coder.of())))
+        .setCoder(IterableCoder.of(KvCoder.of(tableDestinationCoder, StringUtf8Coder.of())))
         .apply(
             "WriteRenameUntriggered",
             ParDo.of(
diff --git a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
index 0462269..1fa8408 100644
--- a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
+++ b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIO.java
@@ -2645,7 +2645,8 @@
                 getIgnoreUnknownValues(),
                 elementCoder,
                 rowWriterFactory,
-                getKmsKey());
+                getKmsKey(),
+                getClustering() != null);
         batchLoads.setTestServices(getBigQueryServices());
         if (getSchemaUpdateOptions() != null) {
           batchLoads.setSchemaUpdateOptions(getSchemaUpdateOptions());
diff --git a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOExternalTest.java b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOExternalTest.java
index 50f7528..abecf89 100644
--- a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOExternalTest.java
+++ b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubIOExternalTest.java
@@ -36,8 +36,8 @@
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
 import org.hamcrest.Matchers;
 import org.junit.Test;
diff --git a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
index a7b7f8a..500673c 100644
--- a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
+++ b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
@@ -39,8 +39,8 @@
 import org.apache.beam.sdk.transforms.Impulse;
 import org.apache.beam.sdk.transforms.WithKeys;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf.ByteString;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver;
+import org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf.ByteString;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.stub.StreamObserver;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
diff --git a/sdks/java/testing/expansion-service/src/test/java/org/apache/beam/sdk/expansion/TestExpansionService.java b/sdks/java/testing/expansion-service/src/test/java/org/apache/beam/sdk/expansion/TestExpansionService.java
index 68c0e71..d1a832f 100644
--- a/sdks/java/testing/expansion-service/src/test/java/org/apache/beam/sdk/expansion/TestExpansionService.java
+++ b/sdks/java/testing/expansion-service/src/test/java/org/apache/beam/sdk/expansion/TestExpansionService.java
@@ -31,8 +31,8 @@
 import org.apache.beam.sdk.transforms.Values;
 import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Server;
-import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ServerBuilder;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.Server;
+import org.apache.beam.vendor.grpc.v1p26p0.io.grpc.ServerBuilder;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 
 /**
@@ -57,11 +57,7 @@
       Schema schema = new Schema.Parser().parse(rawSchema);
       return ImmutableMap.of(
           TEST_COUNT_URN, spec -> Count.perElement(),
-          TEST_FILTER_URN,
-              spec ->
-                  Filter.lessThanEq(
-                      // TODO(BEAM-6587): Use strings directly rather than longs.
-                      (long) spec.getPayload().toStringUtf8().charAt(0)),
+          TEST_FILTER_URN, spec -> Filter.lessThanEq(spec.getPayload().toStringUtf8()),
           TEST_PARQUET_READ_URN,
               spec ->
                   new PTransform<PBegin, PCollection<GenericRecord>>() {
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkConfiguration.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkConfiguration.java
index 2b91e07..2ff5744 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkConfiguration.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkConfiguration.java
@@ -205,14 +205,7 @@
       debug = options.getDebug();
     }
     if (options.getQuery() != null) {
-      try {
-        query = NexmarkQueryName.valueOf(options.getQuery());
-      } catch (IllegalArgumentException exc) {
-        query = NexmarkQueryName.fromNumber(Integer.parseInt(options.getQuery()));
-      }
-      if (query == null) {
-        throw new IllegalArgumentException("Unknown query: " + query);
-      }
+      query = NexmarkQueryName.fromId(options.getQuery());
     }
     if (options.getSourceType() != null) {
       sourceType = options.getSourceType();
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java
index 12109ec..794d154 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkLauncher.java
@@ -27,8 +27,10 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
+import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 import java.util.concurrent.ThreadLocalRandom;
 import javax.annotation.Nullable;
 import org.apache.beam.sdk.Pipeline;
@@ -93,10 +95,12 @@
 import org.apache.beam.sdk.values.TimestampedValue;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TupleTagList;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Splitter;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Lists;
+import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Maps;
 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
 import org.apache.kafka.common.serialization.ByteArraySerializer;
 import org.apache.kafka.common.serialization.LongDeserializer;
@@ -159,10 +163,14 @@
   @Nullable private String pubsubSubscription;
 
   @Nullable private PubsubHelper pubsubHelper;
+  private final Map<NexmarkQueryName, NexmarkQuery> queries;
+  private final Map<NexmarkQueryName, NexmarkQueryModel> models;
 
   public NexmarkLauncher(OptionT options, NexmarkConfiguration configuration) {
     this.options = options;
     this.configuration = configuration;
+    queries = createQueries();
+    models = createQueryModels();
   }
 
   /** Is this query running in streaming mode? */
@@ -1193,12 +1201,10 @@
   }
 
   private NexmarkQueryModel getNexmarkQueryModel() {
-    Map<NexmarkQueryName, NexmarkQueryModel> models = createQueryModels();
     return models.get(configuration.query);
   }
 
   private NexmarkQuery<?> getNexmarkQuery() {
-    Map<NexmarkQueryName, NexmarkQuery> queries = createQueries();
     return queries.get(configuration.query);
   }
 
@@ -1228,7 +1234,22 @@
   }
 
   private Map<NexmarkQueryName, NexmarkQuery> createQueries() {
-    return isSql() ? createSqlQueries() : createJavaQueries();
+    Map<NexmarkQueryName, NexmarkQuery> defaultQueries =
+        isSql() ? createSqlQueries() : createJavaQueries();
+    Set<NexmarkQueryName> skippableQueries = getSkippableQueries();
+    return ImmutableMap.copyOf(
+        Maps.filterKeys(defaultQueries, query -> !skippableQueries.contains(query)));
+  }
+
+  private Set<NexmarkQueryName> getSkippableQueries() {
+    Set<NexmarkQueryName> skipQueries = new LinkedHashSet<>();
+    if (options.getSkipQueries() != null && !options.getSkipQueries().trim().equals("")) {
+      Iterable<String> queries = Splitter.on(',').split(options.getSkipQueries());
+      for (String query : queries) {
+        skipQueries.add(NexmarkQueryName.fromId(query.trim()));
+      }
+    }
+    return skipQueries;
   }
 
   private Map<NexmarkQueryName, NexmarkQuery> createSqlQueries() {
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java
index 681316a..1245c6f 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkOptions.java
@@ -77,6 +77,12 @@
 
   void setQuery(String query);
 
+  @Description("Skip the execution of the given queries (comma separated)")
+  @Nullable
+  String getSkipQueries();
+
+  void setSkipQueries(String queries);
+
   @Description("Prefix for output files if using text output for results or running Query 10.")
   @Nullable
   String getOutputPath();
diff --git a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkQueryName.java b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkQueryName.java
index 7159c86..27a488d 100644
--- a/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkQueryName.java
+++ b/sdks/java/testing/nexmark/src/main/java/org/apache/beam/sdk/nexmark/NexmarkQueryName.java
@@ -81,4 +81,21 @@
     }
     return null;
   }
+
+  /**
+   * @return The given {@link NexmarkQueryName} for the id. The id can be the query number (for
+   *     backwards compatibility) or its name.
+   */
+  public static NexmarkQueryName fromId(String id) {
+    NexmarkQueryName query;
+    try {
+      query = NexmarkQueryName.valueOf(id);
+    } catch (IllegalArgumentException exc) {
+      query = NexmarkQueryName.fromNumber(Integer.parseInt(id));
+    }
+    if (query == null) {
+      throw new IllegalArgumentException("Unknown query: " + id);
+    }
+    return query;
+  }
 }
diff --git a/sdks/python/apache_beam/coders/coder_impl.py b/sdks/python/apache_beam/coders/coder_impl.py
index fb76875..8f78b95 100644
--- a/sdks/python/apache_beam/coders/coder_impl.py
+++ b/sdks/python/apache_beam/coders/coder_impl.py
@@ -909,7 +909,8 @@
           buffer = create_OutputStream()
           if (self._write_state is not None
               and out.size() - start_size > self._write_state_threshold):
-            tail = (value_iter[index + 1:] if isinstance(value, (list, tuple))
+            tail = (value_iter[index + 1:]
+                    if isinstance(value_iter, (list, tuple))
                     else value_iter)
             state_token = self._write_state(tail, self._elem_coder)
             out.write_var_int64(-1)
diff --git a/sdks/python/apache_beam/coders/row_coder.py b/sdks/python/apache_beam/coders/row_coder.py
index 73886c1..bc5fd69 100644
--- a/sdks/python/apache_beam/coders/row_coder.py
+++ b/sdks/python/apache_beam/coders/row_coder.py
@@ -70,7 +70,7 @@
   def as_cloud_object(self, coders_context=None):
     raise NotImplementedError("as_cloud_object not supported for RowCoder")
 
-  __hash__ = None
+  __hash__ = None  # type: ignore[assignment]
 
   def __eq__(self, other):
     return type(self) == type(other) and self.schema == other.schema
diff --git a/sdks/python/apache_beam/examples/complete/game/game_stats.py b/sdks/python/apache_beam/examples/complete/game/game_stats.py
index 13be705..f0ab8fc 100644
--- a/sdks/python/apache_beam/examples/complete/game/game_stats.py
+++ b/sdks/python/apache_beam/examples/complete/game/game_stats.py
@@ -186,11 +186,6 @@
     return ', '.join(
         '%s:%s' % (col, self.schema[col]) for col in self.schema)
 
-  def get_schema(self):
-    """Build the output table schema."""
-    return ', '.join(
-        '%s:%s' % (col, self.schema[col]) for col in self.schema)
-
   def expand(self, pcoll):
     return (
         pcoll
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min.py
new file mode 100644
index 0000000..d004cf2
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+
+def min_globally(test=None):
+  # [START min_globally]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    min_element = (
+        pipeline
+        | 'Create numbers' >> beam.Create([3, 4, 1, 2])
+        | 'Get min value' >> beam.CombineGlobally(
+            lambda elements: min(elements or [-1]))
+        | beam.Map(print)
+    )
+    # [END min_globally]
+    if test:
+      test(min_element)
+
+
+def min_per_key(test=None):
+  # [START min_per_key]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    elements_with_min_value_per_key = (
+        pipeline
+        | 'Create produce' >> beam.Create([
+            ('🥕', 3),
+            ('🥕', 2),
+            ('🍆', 1),
+            ('🍅', 4),
+            ('🍅', 5),
+            ('🍅', 3),
+        ])
+        | 'Get min value per key' >> beam.CombinePerKey(min)
+        | beam.Map(print)
+    )
+    # [END min_per_key]
+    if test:
+      test(elements_with_min_value_per_key)
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min_test.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min_test.py
new file mode 100644
index 0000000..321fd12
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/min_test.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import unittest
+
+import mock
+
+from apache_beam.examples.snippets.util import assert_matches_stdout
+from apache_beam.testing.test_pipeline import TestPipeline
+
+from . import min as beam_min
+
+
+def check_min_element(actual):
+  expected = '''[START min_element]
+1
+[END min_element]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_elements_with_min_value_per_key(actual):
+  expected = '''[START elements_with_min_value_per_key]
+('🥕', 2)
+('🍆', 1)
+('🍅', 3)
+[END elements_with_min_value_per_key]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+@mock.patch('apache_beam.Pipeline', TestPipeline)
+@mock.patch(
+    'apache_beam.examples.snippets.transforms.aggregation.min.print', str)
+class MinTest(unittest.TestCase):
+  def test_min_globally(self):
+    beam_min.min_globally(check_min_element)
+
+  def test_min_per_key(self):
+    beam_min.min_per_key(check_elements_with_min_value_per_key)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum.py
new file mode 100644
index 0000000..26094a5
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum.py
@@ -0,0 +1,59 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+
+def sum_globally(test=None):
+  # [START sum_globally]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    total = (
+        pipeline
+        | 'Create numbers' >> beam.Create([3, 4, 1, 2])
+        | 'Sum values' >> beam.CombineGlobally(sum)
+        | beam.Map(print)
+    )
+    # [END sum_globally]
+    if test:
+      test(total)
+
+
+def sum_per_key(test=None):
+  # [START sum_per_key]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    totals_per_key = (
+        pipeline
+        | 'Create produce' >> beam.Create([
+            ('🥕', 3),
+            ('🥕', 2),
+            ('🍆', 1),
+            ('🍅', 4),
+            ('🍅', 5),
+            ('🍅', 3),
+        ])
+        | 'Sum values per key' >> beam.CombinePerKey(sum)
+        | beam.Map(print)
+    )
+    # [END sum_per_key]
+    if test:
+      test(totals_per_key)
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum_test.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum_test.py
new file mode 100644
index 0000000..dd59770
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/sum_test.py
@@ -0,0 +1,60 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import unittest
+
+import mock
+
+from apache_beam.examples.snippets.util import assert_matches_stdout
+from apache_beam.testing.test_pipeline import TestPipeline
+
+from . import sum as beam_sum
+
+
+def check_total(actual):
+  expected = '''[START total]
+10
+[END total]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_totals_per_key(actual):
+  expected = '''[START totals_per_key]
+('🥕', 5)
+('🍆', 1)
+('🍅', 12)
+[END totals_per_key]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+@mock.patch('apache_beam.Pipeline', TestPipeline)
+@mock.patch(
+    'apache_beam.examples.snippets.transforms.aggregation.sum.print', str)
+class SumTest(unittest.TestCase):
+  def test_sum_globally(self):
+    beam_sum.sum_globally(check_total)
+
+  def test_sum_per_key(self):
+    beam_sum.sum_per_key(check_totals_per_key)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top.py
new file mode 100644
index 0000000..0e496f7
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top.py
@@ -0,0 +1,153 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+
+def top_largest(test=None):
+  # [START top_largest]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    largest_elements = (
+        pipeline
+        | 'Create numbers' >> beam.Create([3, 4, 1, 2])
+        | 'Largest N values' >> beam.combiners.Top.Largest(2)
+        | beam.Map(print)
+    )
+    # [END top_largest]
+    if test:
+      test(largest_elements)
+
+
+def top_largest_per_key(test=None):
+  # [START top_largest_per_key]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    largest_elements_per_key = (
+        pipeline
+        | 'Create produce' >> beam.Create([
+            ('🥕', 3),
+            ('🥕', 2),
+            ('🍆', 1),
+            ('🍅', 4),
+            ('🍅', 5),
+            ('🍅', 3),
+        ])
+        | 'Largest N values per key' >> beam.combiners.Top.LargestPerKey(2)
+        | beam.Map(print)
+    )
+    # [END top_largest_per_key]
+    if test:
+      test(largest_elements_per_key)
+
+
+def top_smallest(test=None):
+  # [START top_smallest]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    smallest_elements = (
+        pipeline
+        | 'Create numbers' >> beam.Create([3, 4, 1, 2])
+        | 'Smallest N values' >> beam.combiners.Top.Smallest(2)
+        | beam.Map(print)
+    )
+    # [END top_smallest]
+    if test:
+      test(smallest_elements)
+
+
+def top_smallest_per_key(test=None):
+  # [START top_smallest_per_key]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    smallest_elements_per_key = (
+        pipeline
+        | 'Create produce' >> beam.Create([
+            ('🥕', 3),
+            ('🥕', 2),
+            ('🍆', 1),
+            ('🍅', 4),
+            ('🍅', 5),
+            ('🍅', 3),
+        ])
+        | 'Smallest N values per key' >> beam.combiners.Top.SmallestPerKey(2)
+        | beam.Map(print)
+    )
+    # [END top_smallest_per_key]
+    if test:
+      test(smallest_elements_per_key)
+
+
+def top_of(test=None):
+  # [START top_of]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    shortest_elements = (
+        pipeline
+        | 'Create produce names' >> beam.Create([
+            '🍓 Strawberry',
+            '🥕 Carrot',
+            '🍏 Green apple',
+            '🍆 Eggplant',
+            '🌽 Corn',
+        ])
+        | 'Shortest names' >> beam.combiners.Top.Of(
+            2,             # number of elements
+            key=len,       # optional, defaults to the element itself
+            reverse=True,  # optional, defaults to False (largest/descending)
+        )
+        | beam.Map(print)
+    )
+    # [END top_of]
+    if test:
+      test(shortest_elements)
+
+
+def top_per_key(test=None):
+  # [START top_per_key]
+  import apache_beam as beam
+
+  with beam.Pipeline() as pipeline:
+    shortest_elements_per_key = (
+        pipeline
+        | 'Create produce names' >> beam.Create([
+            ('spring', '🥕 Carrot'),
+            ('spring', '🍓 Strawberry'),
+            ('summer', '🥕 Carrot'),
+            ('summer', '🌽 Corn'),
+            ('summer', '🍏 Green apple'),
+            ('fall', '🥕 Carrot'),
+            ('fall', '🍏 Green apple'),
+            ('winter', '🍆 Eggplant'),
+        ])
+        | 'Shortest names per key' >> beam.combiners.Top.PerKey(
+            2,             # number of elements
+            key=len,       # optional, defaults to the value itself
+            reverse=True,  # optional, defaults to False (largest/descending)
+        )
+        | beam.Map(print)
+    )
+    # [END top_per_key]
+    if test:
+      test(shortest_elements_per_key)
diff --git a/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top_test.py b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top_test.py
new file mode 100644
index 0000000..0489f6d
--- /dev/null
+++ b/sdks/python/apache_beam/examples/snippets/transforms/aggregation/top_test.py
@@ -0,0 +1,111 @@
+# coding=utf-8
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import absolute_import
+from __future__ import print_function
+
+import sys
+import unittest
+
+import mock
+
+from apache_beam.examples.snippets.util import assert_matches_stdout
+from apache_beam.testing.test_pipeline import TestPipeline
+
+from . import top
+
+
+def check_largest_elements(actual):
+  expected = '''[START largest_elements]
+[4, 3]
+[END largest_elements]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_largest_elements_per_key(actual):
+  expected = '''[START largest_elements_per_key]
+('🥕', [3, 2])
+('🍆', [1])
+('🍅', [5, 4])
+[END largest_elements_per_key]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_smallest_elements(actual):
+  expected = '''[START smallest_elements]
+[1, 2]
+[END smallest_elements]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_smallest_elements_per_key(actual):
+  expected = '''[START smallest_elements_per_key]
+('🥕', [2, 3])
+('🍆', [1])
+('🍅', [3, 4])
+[END smallest_elements_per_key]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_shortest_elements(actual):
+  expected = '''[START shortest_elements]
+['🌽 Corn', '🥕 Carrot']
+[END shortest_elements]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+def check_shortest_elements_per_key(actual):
+  expected = '''[START shortest_elements_per_key]
+('spring', ['🥕 Carrot', '🍓 Strawberry'])
+('summer', ['🌽 Corn', '🥕 Carrot'])
+('fall', ['🥕 Carrot', '🍏 Green apple'])
+('winter', ['🍆 Eggplant'])
+[END shortest_elements_per_key]'''.splitlines()[1:-1]
+  assert_matches_stdout(actual, expected)
+
+
+@mock.patch('apache_beam.Pipeline', TestPipeline)
+@mock.patch(
+    'apache_beam.examples.snippets.transforms.aggregation.top.print', str)
+class TopTest(unittest.TestCase):
+  def test_top_largest(self):
+    top.top_largest(check_largest_elements)
+
+  def test_top_largest_per_key(self):
+    top.top_largest_per_key(check_largest_elements_per_key)
+
+  def test_top_smallest(self):
+    top.top_smallest(check_smallest_elements)
+
+  def test_top_smallest_per_key(self):
+    top.top_smallest_per_key(check_smallest_elements_per_key)
+
+  def test_top_of(self):
+    top.top_of(check_shortest_elements)
+
+  # TODO: Remove this after Python 2 deprecation.
+  # https://issues.apache.org/jira/browse/BEAM-8124
+  @unittest.skipIf(sys.version_info[0] == 2,
+                   'nosetests in Python 2 uses ascii instead of utf-8 in '
+                   'the Top.PerKey transform and causes this to fail')
+  def test_top_per_key(self):
+    top.top_per_key(check_shortest_elements_per_key)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/sdks/python/apache_beam/examples/wordcount_xlang.py b/sdks/python/apache_beam/examples/wordcount_xlang.py
index d8c8745..12e2b6b 100644
--- a/sdks/python/apache_beam/examples/wordcount_xlang.py
+++ b/sdks/python/apache_beam/examples/wordcount_xlang.py
@@ -55,13 +55,7 @@
       The processed element.
     """
     text_line = element.strip()
-    # Using bytes type to match input and output coders between Python
-    # and Java SDKs. Any element type can be used for crossing the language
-    # boundary if a matching coder implementation exists in both SDKs.
-    # TODO(BEAM-6587): Use strings once they're understood by the
-    # Java SDK.
-    words = [bytes(x) for x in re.findall(r'[\w\']+', text_line)]
-    return words
+    return re.findall(r'[\w\']+', text_line)
 
 
 def run(p, input_file, output_file):
diff --git a/sdks/python/apache_beam/io/fileio_test.py b/sdks/python/apache_beam/io/fileio_test.py
index 946fac0..f8803d1 100644
--- a/sdks/python/apache_beam/io/fileio_test.py
+++ b/sdks/python/apache_beam/io/fileio_test.py
@@ -95,7 +95,7 @@
 
       assert_that(files_pc, equal_to(files))
 
-  def test_match_files_one_directory_failure(self):
+  def test_match_files_one_directory_failure1(self):
     directories = [
         '%s%s' % (self._new_tempdir(), os.sep),
         '%s%s' % (self._new_tempdir(), os.sep)]
@@ -114,7 +114,7 @@
 
         assert_that(files_pc, equal_to(files))
 
-  def test_match_files_one_directory_failure(self):
+  def test_match_files_one_directory_failure2(self):
     directories = [
         '%s%s' % (self._new_tempdir(), os.sep),
         '%s%s' % (self._new_tempdir(), os.sep)]
diff --git a/sdks/python/apache_beam/io/filesystems_test.py b/sdks/python/apache_beam/io/filesystems_test.py
index d2133b0..298644f 100644
--- a/sdks/python/apache_beam/io/filesystems_test.py
+++ b/sdks/python/apache_beam/io/filesystems_test.py
@@ -26,6 +26,7 @@
 import logging
 import os
 import shutil
+import sys
 import tempfile
 import unittest
 
@@ -49,6 +50,12 @@
 
 class FileSystemsTest(unittest.TestCase):
 
+  @classmethod
+  def setUpClass(cls):
+    # Method has been renamed in Python 3
+    if sys.version_info[0] < 3:
+      cls.assertCountEqual = cls.assertItemsEqual
+
   def setUp(self):
     self.tmpdir = tempfile.mkdtemp()
 
@@ -132,7 +139,7 @@
       FileSystems.match([None])
     self.assertEqual(list(error.exception.exception_details), [None])
 
-  def test_match_directory(self):
+  def test_match_directory_with_files(self):
     path1 = os.path.join(self.tmpdir, 'f1')
     path2 = os.path.join(self.tmpdir, 'f2')
     open(path1, 'a').close()
@@ -142,7 +149,7 @@
     path = os.path.join(self.tmpdir, '*')
     result = FileSystems.match([path])[0]
     files = [f.path for f in result.metadata_list]
-    self.assertEqual(files, [path1, path2])
+    self.assertCountEqual(files, [path1, path2])
 
   def test_match_directory(self):
     result = FileSystems.match([self.tmpdir])[0]
diff --git a/sdks/python/apache_beam/io/iobase.py b/sdks/python/apache_beam/io/iobase.py
index affb6c1..fdb6c00 100644
--- a/sdks/python/apache_beam/io/iobase.py
+++ b/sdks/python/apache_beam/io/iobase.py
@@ -1246,6 +1246,7 @@
   """
 
   def __init__(self, restriction_tracker):
+    # type: (RestrictionTracker) -> None
     if not isinstance(restriction_tracker, RestrictionTracker):
       raise ValueError(
           'Initialize ThreadsafeRestrictionTracker requires'
@@ -1379,6 +1380,7 @@
 
   @property
   def completed_work(self):
+    # type: () -> float
     if self._completed:
       return self._completed
     elif self._remaining and self._fraction:
@@ -1386,6 +1388,7 @@
 
   @property
   def remaining_work(self):
+    # type: () -> float
     if self._remaining:
       return self._remaining
     elif self._completed:
@@ -1393,10 +1396,12 @@
 
   @property
   def total_work(self):
+    # type: () -> float
     return self.completed_work + self.remaining_work
 
   @property
   def fraction_completed(self):
+    # type: () -> float
     if self._fraction is not None:
       return self._fraction
     else:
@@ -1404,6 +1409,7 @@
 
   @property
   def fraction_remaining(self):
+    # type: () -> float
     if self._fraction is not None:
       return 1 - self._fraction
     else:
diff --git a/sdks/python/apache_beam/io/restriction_trackers_test.py b/sdks/python/apache_beam/io/restriction_trackers_test.py
index aaebb09..8c2c7f7 100644
--- a/sdks/python/apache_beam/io/restriction_trackers_test.py
+++ b/sdks/python/apache_beam/io/restriction_trackers_test.py
@@ -141,13 +141,6 @@
     self.assertFalse(tracker.try_claim(220))
     tracker.check_done()
 
-  def test_check_done_after_try_claim_past_end_of_range(self):
-    tracker = OffsetRestrictionTracker(OffsetRange(100, 200))
-    self.assertTrue(tracker.try_claim(150))
-    self.assertTrue(tracker.try_claim(175))
-    self.assertFalse(tracker.try_claim(200))
-    tracker.check_done()
-
   def test_check_done_after_try_claim_right_before_end_of_range(self):
     tracker = OffsetRestrictionTracker(OffsetRange(100, 200))
     self.assertTrue(tracker.try_claim(150))
diff --git a/sdks/python/apache_beam/io/tfrecordio_test.py b/sdks/python/apache_beam/io/tfrecordio_test.py
index 9ed525b..90f484a 100644
--- a/sdks/python/apache_beam/io/tfrecordio_test.py
+++ b/sdks/python/apache_beam/io/tfrecordio_test.py
@@ -290,7 +290,7 @@
                       validate=True))
         assert_that(result, equal_to([b'foo', b'bar']))
 
-  def test_process_gzip(self):
+  def test_process_gzip_with_coder(self):
     with TempDir() as temp_dir:
       path = temp_dir.create_temp_file('result')
       _write_file_gzip(path, FOO_BAR_RECORD_BASE64)
@@ -303,6 +303,17 @@
                       validate=True))
         assert_that(result, equal_to([b'foo', b'bar']))
 
+  def test_process_gzip_without_coder(self):
+    with TempDir() as temp_dir:
+      path = temp_dir.create_temp_file('result')
+      _write_file_gzip(path, FOO_BAR_RECORD_BASE64)
+      with TestPipeline() as p:
+        result = (p
+                  | ReadFromTFRecord(
+                      path,
+                      compression_type=CompressionTypes.GZIP))
+        assert_that(result, equal_to([b'foo', b'bar']))
+
   def test_process_auto(self):
     with TempDir() as temp_dir:
       path = temp_dir.create_temp_file('result.gz')
@@ -316,16 +327,6 @@
                       validate=True))
         assert_that(result, equal_to([b'foo', b'bar']))
 
-  def test_process_gzip(self):
-    with TempDir() as temp_dir:
-      path = temp_dir.create_temp_file('result')
-      _write_file_gzip(path, FOO_BAR_RECORD_BASE64)
-      with TestPipeline() as p:
-        result = (p
-                  | ReadFromTFRecord(
-                      path, compression_type=CompressionTypes.GZIP))
-        assert_that(result, equal_to([b'foo', b'bar']))
-
   def test_process_gzip_auto(self):
     with TempDir() as temp_dir:
       path = temp_dir.create_temp_file('result.gz')
diff --git a/sdks/python/apache_beam/io/vcfio.py b/sdks/python/apache_beam/io/vcfio.py
index c6e5502..7cff80a 100644
--- a/sdks/python/apache_beam/io/vcfio.py
+++ b/sdks/python/apache_beam/io/vcfio.py
@@ -181,9 +181,6 @@
 
     return self < other or self == other
 
-  def __ne__(self, other):
-    return not self == other
-
   def __gt__(self, other):
     if not isinstance(other, Variant):
       return NotImplemented
diff --git a/sdks/python/apache_beam/metrics/cells.py b/sdks/python/apache_beam/metrics/cells.py
index 0b6caa6..8f91288 100644
--- a/sdks/python/apache_beam/metrics/cells.py
+++ b/sdks/python/apache_beam/metrics/cells.py
@@ -29,6 +29,7 @@
 import threading
 import time
 from builtins import object
+from typing import Optional
 
 from apache_beam.portability.api import beam_fn_api_pb2
 from apache_beam.portability.api import metrics_pb2
@@ -86,6 +87,7 @@
     self.value = CounterAggregator.identity_element()
 
   def combine(self, other):
+    # type: (CounterCell) -> CounterCell
     result = CounterCell()
     result.inc(self.value + other.value)
     return result
@@ -106,6 +108,7 @@
         self.value += value
 
   def get_cumulative(self):
+    # type: () -> int
     with self._lock:
       return self.value
 
@@ -144,6 +147,7 @@
     self.data = DistributionAggregator.identity_element()
 
   def combine(self, other):
+    # type: (DistributionCell) -> DistributionCell
     result = DistributionCell()
     result.data = self.data.combine(other.data)
     return result
@@ -169,6 +173,7 @@
       self.data.max = ivalue
 
   def get_cumulative(self):
+    # type: () -> DistributionData
     with self._lock:
       return self.data.get_cumulative()
 
@@ -204,6 +209,7 @@
     self.data = GaugeAggregator.identity_element()
 
   def combine(self, other):
+    # type: (GaugeCell) -> GaugeCell
     result = GaugeCell()
     result.data = self.data.combine(other.data)
     return result
@@ -220,6 +226,7 @@
       self.data.timestamp = time.time()
 
   def get_cumulative(self):
+    # type: () -> GaugeData
     with self._lock:
       return self.data.get_cumulative()
 
@@ -239,6 +246,7 @@
 class DistributionResult(object):
   """The result of a Distribution metric."""
   def __init__(self, data):
+    # type: (DistributionData) -> None
     self.data = data
 
   def __eq__(self, other):
@@ -290,6 +298,7 @@
 
 class GaugeResult(object):
   def __init__(self, data):
+    # type: (GaugeData) -> None
     self.data = data
 
   def __eq__(self, other):
@@ -349,9 +358,11 @@
         self.timestamp)
 
   def get_cumulative(self):
+    # type: () -> GaugeData
     return GaugeData(self.value, timestamp=self.timestamp)
 
   def combine(self, other):
+    # type: (Optional[GaugeData]) -> GaugeData
     if other is None:
       return self
 
@@ -362,6 +373,7 @@
 
   @staticmethod
   def singleton(value, timestamp=None):
+    # type: (...) -> GaugeData
     return GaugeData(value, timestamp=timestamp)
 
   def to_runner_api(self):
@@ -427,9 +439,11 @@
         self.max)
 
   def get_cumulative(self):
+    # type: () -> DistributionData
     return DistributionData(self.sum, self.count, self.min, self.max)
 
   def combine(self, other):
+    # type: (Optional[DistributionData]) -> DistributionData
     if other is None:
       return self
 
@@ -474,7 +488,7 @@
     """
     raise NotImplementedError
 
-  def combine(self, updates):
+  def combine(self, x, y):
     raise NotImplementedError
 
   def result(self, x):
@@ -490,12 +504,15 @@
   """
   @staticmethod
   def identity_element():
+    # type: () -> int
     return 0
 
   def combine(self, x, y):
+    # type: (...) -> int
     return int(x) + int(y)
 
   def result(self, x):
+    # type: (...) -> int
     return int(x)
 
 
@@ -508,12 +525,15 @@
   """
   @staticmethod
   def identity_element():
+    # type: () -> DistributionData
     return DistributionData(0, 0, 2**63 - 1, -2**63)
 
   def combine(self, x, y):
+    # type: (DistributionData, DistributionData) -> DistributionData
     return x.combine(y)
 
   def result(self, x):
+    # type: (DistributionData) -> DistributionResult
     return DistributionResult(x.get_cumulative())
 
 
@@ -526,11 +546,14 @@
   """
   @staticmethod
   def identity_element():
+    # type: () -> GaugeData
     return GaugeData(None, timestamp=0)
 
   def combine(self, x, y):
+    # type: (GaugeData, GaugeData) -> GaugeData
     result = x.combine(y)
     return result
 
   def result(self, x):
+    # type: (GaugeData) -> GaugeResult
     return GaugeResult(x.get_cumulative())
diff --git a/sdks/python/apache_beam/pipeline.py b/sdks/python/apache_beam/pipeline.py
index 1dbafd9..7b1be39 100644
--- a/sdks/python/apache_beam/pipeline.py
+++ b/sdks/python/apache_beam/pipeline.py
@@ -51,7 +51,6 @@
 import abc
 import logging
 import os
-import re
 import shutil
 import tempfile
 from builtins import object
@@ -80,7 +79,9 @@
 from apache_beam.portability import common_urns
 from apache_beam.runners import PipelineRunner
 from apache_beam.runners import create_runner
+from apache_beam.transforms import ParDo
 from apache_beam.transforms import ptransform
+from apache_beam.transforms.sideinputs import get_sideinput_index
 #from apache_beam.transforms import external
 from apache_beam.typehints import TypeCheckError
 from apache_beam.typehints import typehints
@@ -91,6 +92,7 @@
   from apache_beam.portability.api import beam_runner_api_pb2
   from apache_beam.runners.pipeline_context import PipelineContext
   from apache_beam.runners.runner import PipelineResult
+  from apache_beam.transforms import environments
 
 __all__ = ['Pipeline', 'PTransformOverride']
 
@@ -679,7 +681,7 @@
                     return_context=False,
                     context=None,  # type: Optional[PipelineContext]
                     use_fake_coders=False,
-                    default_environment=None  # type: Optional[beam_runner_api_pb2.Environment]
+                    default_environment=None  # type: Optional[environments.Environment]
                    ):
     # type: (...) -> beam_runner_api_pb2.Pipeline
     """For internal use only; no backwards-compatibility guarantees."""
@@ -821,7 +823,7 @@
 
   def __init__(self,
                parent,
-               transform,  # type: ptransform.PTransform
+               transform,  # type: Optional[ptransform.PTransform]
                full_label,  # type: str
                inputs,  # type: Optional[Sequence[Union[pvalue.PBegin, pvalue.PCollection]]]
                environment_id=None  # type: Optional[str]
@@ -909,17 +911,18 @@
     # type: (...) -> None
     """Visits all nodes reachable from the current node."""
 
-    for pval in self.inputs:
-      if pval not in visited and not isinstance(pval, pvalue.PBegin):
-        if pval.producer is not None:
-          pval.producer.visit(visitor, pipeline, visited)
+    for in_pval in self.inputs:
+      if in_pval not in visited and not isinstance(in_pval, pvalue.PBegin):
+        if in_pval.producer is not None:
+          in_pval.producer.visit(visitor, pipeline, visited)
           # The value should be visited now since we visit outputs too.
-          assert pval in visited, pval
+          assert in_pval in visited, in_pval
 
     # Visit side inputs.
-    for pval in self.side_inputs:
-      if isinstance(pval, pvalue.AsSideInput) and pval.pvalue not in visited:
-        pval = pval.pvalue  # Unpack marker-object-wrapped pvalue.
+    for side_input in self.side_inputs:
+      if isinstance(side_input, pvalue.AsSideInput) \
+          and side_input.pvalue not in visited:
+        pval = side_input.pvalue  # Unpack marker-object-wrapped pvalue.
         if pval.producer is not None:
           pval.producer.visit(visitor, pipeline, visited)
           # The value should be visited now since we visit outputs too.
@@ -944,11 +947,11 @@
     # output of such a transform is the containing DoOutputsTuple, not the
     # PCollection inside it. Without the code below a tagged PCollection will
     # not be marked as visited while visiting its producer.
-    for pval in self.outputs.values():
-      if isinstance(pval, pvalue.DoOutputsTuple):
-        pvals = (v for v in pval)
+    for out_pval in self.outputs.values():
+      if isinstance(out_pval, pvalue.DoOutputsTuple):
+        pvals = (v for v in out_pval)
       else:
-        pvals = (pval,)
+        pvals = (out_pval,)
       for v in pvals:
         if v not in visited:
           visited.add(v)
@@ -1019,15 +1022,17 @@
     main_inputs = [context.pcollections.get_by_id(id)
                    for tag, id in proto.inputs.items()
                    if not is_side_input(tag)]
+
     # Ordering is important here.
-    indexed_side_inputs = [(int(re.match('side([0-9]+)(-.*)?$', tag).group(1)),
+    indexed_side_inputs = [(get_sideinput_index(tag),
                             context.pcollections.get_by_id(id))
                            for tag, id in proto.inputs.items()
                            if is_side_input(tag)]
     side_inputs = [si for _, si in sorted(indexed_side_inputs)]
+    transform = ptransform.PTransform.from_runner_api(proto.spec, context)
     result = AppliedPTransform(
         parent=None,
-        transform=ptransform.PTransform.from_runner_api(proto.spec, context),
+        transform=transform,
         full_label=proto.unique_name,
         inputs=main_inputs,
         environment_id=proto.environment_id)
@@ -1045,6 +1050,7 @@
         for tag, id in proto.outputs.items()}
     # This annotation is expected by some runners.
     if proto.spec.urn == common_urns.primitives.PAR_DO.urn:
+      assert isinstance(result.transform, ParDo)
       result.transform.output_tags = set(proto.outputs.keys()).difference(
           {'None'})
     if not result.parts:
diff --git a/sdks/python/apache_beam/portability/common_urns.py b/sdks/python/apache_beam/portability/common_urns.py
index 9c43570..6f2943e 100644
--- a/sdks/python/apache_beam/portability/common_urns.py
+++ b/sdks/python/apache_beam/portability/common_urns.py
@@ -21,70 +21,36 @@
 
 from __future__ import absolute_import
 
-from builtins import object
+from apache_beam.portability.api.beam_runner_api_pb2_urns import BeamConstants
+from apache_beam.portability.api.beam_runner_api_pb2_urns import StandardCoders
+from apache_beam.portability.api.beam_runner_api_pb2_urns import StandardEnvironments
+from apache_beam.portability.api.beam_runner_api_pb2_urns import StandardPTransforms
+from apache_beam.portability.api.beam_runner_api_pb2_urns import StandardSideInputTypes
+from apache_beam.portability.api.metrics_pb2_urns import MonitoringInfo
+from apache_beam.portability.api.metrics_pb2_urns import MonitoringInfoSpecs
+from apache_beam.portability.api.metrics_pb2_urns import MonitoringInfoTypeUrns
+from apache_beam.portability.api.standard_window_fns_pb2_urns import FixedWindowsPayload
+from apache_beam.portability.api.standard_window_fns_pb2_urns import GlobalWindowsPayload
+from apache_beam.portability.api.standard_window_fns_pb2_urns import SessionsPayload
+from apache_beam.portability.api.standard_window_fns_pb2_urns import SlidingWindowsPayload
 
-from apache_beam.portability.api import beam_runner_api_pb2
-from apache_beam.portability.api import metrics_pb2
-from apache_beam.portability.api import standard_window_fns_pb2
+primitives = StandardPTransforms.Primitives
+deprecated_primitives = StandardPTransforms.DeprecatedPrimitives
+composites = StandardPTransforms.Composites
+combine_components = StandardPTransforms.CombineComponents
+sdf_components = StandardPTransforms.SplittableParDoComponents
 
+side_inputs = StandardSideInputTypes.Enum
+coders = StandardCoders.Enum
+constants = BeamConstants.Constants
 
-class PropertiesFromEnumValue(object):
-  def __init__(self, value_descriptor):
-    self.urn = (value_descriptor.GetOptions().Extensions[
-        beam_runner_api_pb2.beam_urn])
-    self.constant = (value_descriptor.GetOptions().Extensions[
-        beam_runner_api_pb2.beam_constant])
-    self.spec = (value_descriptor.GetOptions().Extensions[
-        metrics_pb2.monitoring_info_spec])
-    self.label_props = (value_descriptor.GetOptions().Extensions[
-        metrics_pb2.label_props])
+environments = StandardEnvironments.Environments
 
+global_windows = GlobalWindowsPayload.Enum.PROPERTIES
+fixed_windows = FixedWindowsPayload.Enum.PROPERTIES
+sliding_windows = SlidingWindowsPayload.Enum.PROPERTIES
+session_windows = SessionsPayload.Enum.PROPERTIES
 
-class PropertiesFromEnumType(object):
-  def __init__(self, enum_type):
-    for v in enum_type.DESCRIPTOR.values:
-      setattr(self, v.name, PropertiesFromEnumValue(v))
-
-
-primitives = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardPTransforms.Primitives)
-deprecated_primitives = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardPTransforms.DeprecatedPrimitives)
-composites = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardPTransforms.Composites)
-combine_components = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardPTransforms.CombineComponents)
-sdf_components = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardPTransforms.SplittableParDoComponents)
-
-side_inputs = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardSideInputTypes.Enum)
-
-coders = PropertiesFromEnumType(beam_runner_api_pb2.StandardCoders.Enum)
-
-constants = PropertiesFromEnumType(
-    beam_runner_api_pb2.BeamConstants.Constants)
-
-environments = PropertiesFromEnumType(
-    beam_runner_api_pb2.StandardEnvironments.Environments)
-
-
-def PropertiesFromPayloadType(payload_type):
-  return PropertiesFromEnumType(payload_type.Enum).PROPERTIES
-
-
-global_windows = PropertiesFromPayloadType(
-    standard_window_fns_pb2.GlobalWindowsPayload)
-fixed_windows = PropertiesFromPayloadType(
-    standard_window_fns_pb2.FixedWindowsPayload)
-sliding_windows = PropertiesFromPayloadType(
-    standard_window_fns_pb2.SlidingWindowsPayload)
-session_windows = PropertiesFromPayloadType(
-    standard_window_fns_pb2.SessionsPayload)
-
-monitoring_info_specs = PropertiesFromEnumType(
-    metrics_pb2.MonitoringInfoSpecs.Enum)
-monitoring_info_types = PropertiesFromEnumType(
-    metrics_pb2.MonitoringInfoTypeUrns.Enum)
-monitoring_info_labels = PropertiesFromEnumType(
-    metrics_pb2.MonitoringInfo.MonitoringInfoLabels)
+monitoring_info_specs = MonitoringInfoSpecs.Enum
+monitoring_info_types = MonitoringInfoTypeUrns.Enum
+monitoring_info_labels = MonitoringInfo.MonitoringInfoLabels
diff --git a/sdks/python/apache_beam/portability/utils.py b/sdks/python/apache_beam/portability/utils.py
new file mode 100644
index 0000000..7c60598
--- /dev/null
+++ b/sdks/python/apache_beam/portability/utils.py
@@ -0,0 +1,34 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""For internal use only; no backwards-compatibility guarantees."""
+from __future__ import absolute_import
+
+from typing import TYPE_CHECKING
+from typing import NamedTuple
+
+if TYPE_CHECKING:
+  from apache_beam.portability.api import metrics_pb2
+
+
+PropertiesFromEnumValue = NamedTuple(
+    'PropertiesFromEnumValue', [
+        ('urn', str),
+        ('constant', str),
+        ('spec', 'metrics_pb2.MonitoringInfoSpec'),
+        ('label_props', 'metrics_pb2.MonitoringInfoLabelProps'),
+    ])
diff --git a/sdks/python/apache_beam/pvalue.py b/sdks/python/apache_beam/pvalue.py
index 7e5e7c5..bf1f761 100644
--- a/sdks/python/apache_beam/pvalue.py
+++ b/sdks/python/apache_beam/pvalue.py
@@ -160,6 +160,7 @@
   def windowing(self):
     # type: () -> Windowing
     if not hasattr(self, '_windowing'):
+      assert self.producer is not None and self.producer.transform is not None
       self._windowing = self.producer.transform.get_windowing(
           self.producer.inputs)
     return self._windowing
@@ -201,10 +202,14 @@
   @staticmethod
   def from_runner_api(proto, context):
     # type: (beam_runner_api_pb2.PCollection, PipelineContext) -> PCollection
-    # Producer and tag will be filled in later, the key point is that the
-    # same object is returned for the same pcollection id.
+    # Producer and tag will be filled in later, the key point is that the same
+    # object is returned for the same pcollection id.
+    # We pass None for the PCollection's Pipeline to avoid a cycle during
+    # deserialization.  It will be populated soon after this call, in
+    # Pipeline.from_runner_api(). This brief period is the only time that
+    # PCollection.pipeline is allowed to be None.
     return PCollection(
-        None,
+        None,  # type: ignore[arg-type]
         element_type=context.element_type_from_coder_id(proto.coder_id),
         windowing=context.windowing_strategies.get_by_id(
             proto.windowing_strategy_id),
diff --git a/sdks/python/apache_beam/runners/common.pxd b/sdks/python/apache_beam/runners/common.pxd
index 37e05bf..00840fe 100644
--- a/sdks/python/apache_beam/runners/common.pxd
+++ b/sdks/python/apache_beam/runners/common.pxd
@@ -69,7 +69,6 @@
 
   cpdef invoke_process(self, WindowedValue windowed_value,
                        restriction_tracker=*,
-                       OutputProcessor output_processor=*,
                        additional_args=*, additional_kwargs=*)
   cpdef invoke_start_bundle(self)
   cpdef invoke_finish_bundle(self)
diff --git a/sdks/python/apache_beam/runners/common.py b/sdks/python/apache_beam/runners/common.py
index 220e3d3..a6145ac 100644
--- a/sdks/python/apache_beam/runners/common.py
+++ b/sdks/python/apache_beam/runners/common.py
@@ -340,7 +340,7 @@
   represented by a given DoFnSignature."""
 
   def __init__(self,
-               output_processor,  # type: Optional[_OutputProcessor]
+               output_processor,  # type: OutputProcessor
                signature  # type: DoFnSignature
               ):
     # type: (...) -> None
@@ -359,7 +359,7 @@
   @staticmethod
   def create_invoker(
       signature,  # type: DoFnSignature
-      output_processor=None,  # type: Optional[_OutputProcessor]
+      output_processor,  # type: OutputProcessor
       context=None,  # type: Optional[DoFnContext]
       side_inputs=None,   # type: Optional[List[sideinputs.SideInputMap]]
       input_args=None, input_kwargs=None,
@@ -400,6 +400,8 @@
     if use_simple_invoker:
       return SimpleInvoker(output_processor, signature)
     else:
+      if context is None:
+        raise TypeError("Must provide context when not using SimpleInvoker")
       return PerWindowInvoker(
           output_processor,
           signature, context, side_inputs, input_args, input_kwargs,
@@ -408,7 +410,6 @@
   def invoke_process(self,
                      windowed_value,  # type: WindowedValue
                      restriction_tracker=None,  # type: Optional[iobase.RestrictionTracker]
-                     output_processor=None,  # type: Optional[OutputProcessor]
                      additional_args=None,
                      additional_kwargs=None
                     ):
@@ -419,7 +420,7 @@
       windowed_value: a WindowedValue object that gives the element for which
                       process() method should be invoked along with the window
                       the element belongs to.
-      output_procesor: if provided given OutputProcessor will be used.
+      output_processor: if provided given OutputProcessor will be used.
       additional_args: additional arguments to be passed to the current
                       `DoFn.process()` invocation, usually as side inputs.
       additional_kwargs: additional keyword arguments to be passed to the
@@ -437,14 +438,16 @@
     # type: () -> None
     """Invokes the DoFn.start_bundle() method.
     """
-    self.output_processor.start_bundle_outputs(
+    # self.output_processor is Optional, but in practice it won't be None here
+    self.output_processor.start_bundle_outputs(  # type: ignore[union-attr]
         self.signature.start_bundle_method.method_value())
 
   def invoke_finish_bundle(self):
     # type: () -> None
     """Invokes the DoFn.finish_bundle() method.
     """
-    self.output_processor.finish_bundle_outputs(
+    # self.output_processor is Optional, but in practice it won't be None here
+    self.output_processor.finish_bundle_outputs(  # type: ignore[union-attr]
         self.signature.finish_bundle_method.method_value())
 
   def invoke_teardown(self):
@@ -454,7 +457,8 @@
     self.signature.teardown_lifecycle_method.method_value()
 
   def invoke_user_timer(self, timer_spec, key, window, timestamp):
-    self.output_processor.process_outputs(
+    # self.output_processor is Optional, but in practice it won't be None here
+    self.output_processor.process_outputs(  # type: ignore[union-attr]
         WindowedValue(None, timestamp, (window,)),
         self.signature.timer_methods[timer_spec].invoke_timer_callback(
             self.user_state_context, key, window, timestamp))
@@ -476,7 +480,7 @@
   """An invoker that processes elements ignoring windowing information."""
 
   def __init__(self,
-               output_processor,  # type: Optional[_OutputProcessor]
+               output_processor,  # type: OutputProcessor
                signature  # type: DoFnSignature
               ):
     # type: (...) -> None
@@ -486,14 +490,11 @@
   def invoke_process(self,
                      windowed_value,  # type: WindowedValue
                      restriction_tracker=None,  # type: Optional[iobase.RestrictionTracker]
-                     output_processor=None,  # type: Optional[OutputProcessor]
                      additional_args=None,
                      additional_kwargs=None
                     ):
     # type: (...) -> None
-    if not output_processor:
-      output_processor = self.output_processor
-    output_processor.process_outputs(
+    self.output_processor.process_outputs(
         windowed_value, self.process_method(windowed_value.value))
 
 
@@ -501,7 +502,7 @@
   """An invoker that processes elements considering windowing information."""
 
   def __init__(self,
-               output_processor,  # type: Optional[_OutputProcessor]
+               output_processor,  # type: OutputProcessor
                signature,  # type: DoFnSignature
                context,  # type: DoFnContext
                side_inputs,  # type: Iterable[sideinputs.SideInputMap]
@@ -525,8 +526,8 @@
     self.watermark_estimator_param = (
         self.signature.process_method.watermark_estimator_arg_name
         if self.watermark_estimator else None)
-    self.threadsafe_restriction_tracker = None
-    self.current_windowed_value = None
+    self.threadsafe_restriction_tracker = None  # type: Optional[iobase.ThreadsafeRestrictionTracker]
+    self.current_windowed_value = None  # type: Optional[WindowedValue]
     self.bundle_finalizer_param = bundle_finalizer_param
     self.is_key_param_required = False
 
@@ -608,7 +609,6 @@
   def invoke_process(self,
                      windowed_value,  # type: WindowedValue
                      restriction_tracker=None,
-                     output_processor=None,  # type: Optional[OutputProcessor]
                      additional_args=None,
                      additional_kwargs=None
                     ):
@@ -618,8 +618,6 @@
     if not additional_kwargs:
       additional_kwargs = {}
 
-    if not output_processor:
-      output_processor = self.output_processor
     self.context.set_element(windowed_value)
     # Call for the process function for each window if has windowed side inputs
     # or if the process accesses the window parameter. We can just call it once
@@ -655,8 +653,7 @@
       try:
         self.current_windowed_value = windowed_value
         return self._invoke_process_per_window(
-            windowed_value, additional_args, additional_kwargs,
-            output_processor)
+            windowed_value, additional_args, additional_kwargs)
       finally:
         self.threadsafe_restriction_tracker = None
         self.current_windowed_value = windowed_value
@@ -665,16 +662,16 @@
       for w in windowed_value.windows:
         self._invoke_process_per_window(
             WindowedValue(windowed_value.value, windowed_value.timestamp, (w,)),
-            additional_args, additional_kwargs, output_processor)
+            additional_args, additional_kwargs)
     else:
       self._invoke_process_per_window(
-          windowed_value, additional_args, additional_kwargs, output_processor)
+          windowed_value, additional_args, additional_kwargs)
+    return None
 
   def _invoke_process_per_window(self,
                                  windowed_value,  # type: WindowedValue
                                  additional_args,
                                  additional_kwargs,
-                                 output_processor  # type: OutputProcessor
                                 ):
     # type: (...) -> Optional[Tuple[WindowedValue, Timestamp]]
     if self.has_windowed_inputs:
@@ -725,9 +722,11 @@
       elif core.DoFn.PaneInfoParam == p:
         args_for_process[i] = windowed_value.pane_info
       elif isinstance(p, core.DoFn.StateParam):
+        assert self.user_state_context is not None
         args_for_process[i] = (
             self.user_state_context.get_state(p.state_spec, key, window))
       elif isinstance(p, core.DoFn.TimerParam):
+        assert self.user_state_context is not None
         args_for_process[i] = (
             self.user_state_context.get_timer(p.timer_spec, key, window))
       elif core.DoFn.BundleFinalizerParam == p:
@@ -741,14 +740,15 @@
           kwargs_for_process[key] = additional_kwargs[key]
 
     if kwargs_for_process:
-      output_processor.process_outputs(
+      self.output_processor.process_outputs(
           windowed_value,
           self.process_method(*args_for_process, **kwargs_for_process))
     else:
-      output_processor.process_outputs(
+      self.output_processor.process_outputs(
           windowed_value, self.process_method(*args_for_process))
 
     if self.is_splittable:
+      assert self.threadsafe_restriction_tracker is not None
       # TODO: Consider calling check_done right after SDF.Process() finishing.
       # In order to do this, we need to know that current invoking dofn is
       # ProcessSizedElementAndRestriction.
@@ -765,11 +765,11 @@
         return ((
             windowed_value.with_value(((element, deferred_restriction), size)),
             output_watermark), deferred_watermark)
+    return None
 
   def try_split(self, fraction):
-    restriction_tracker = self.threadsafe_restriction_tracker
-    current_windowed_value = self.current_windowed_value
-    if restriction_tracker and current_windowed_value:
+    # type: (...) -> Optional[Tuple[SplitResultType, SplitResultType]]
+    if self.threadsafe_restriction_tracker and self.current_windowed_value:
       # Temporary workaround for [BEAM-7473]: get current_watermark before
       # split, in case watermark gets advanced before getting split results.
       # In worst case, current_watermark is always stale, which is ok.
@@ -777,7 +777,7 @@
         current_watermark = self.watermark_estimator.current_watermark()
       else:
         current_watermark = None
-      split = restriction_tracker.try_split(fraction)
+      split = self.threadsafe_restriction_tracker.try_split(fraction)
       if split:
         primary, residual = split
         element = self.current_windowed_value.value
@@ -795,6 +795,8 @@
     restriction_tracker = self.threadsafe_restriction_tracker
     if restriction_tracker:
       return restriction_tracker.current_progress()
+    else:
+      return None
 
 
 class DoFnRunner(Receiver):
@@ -809,7 +811,7 @@
                kwargs,
                side_inputs,  # type: Iterable[sideinputs.SideInputMap]
                windowing,
-               tagged_receivers=None,  # type: Mapping[Optional[str], Receiver]
+               tagged_receivers,  # type: Mapping[Optional[str], Receiver]
                step_name=None,  # type: Optional[str]
                logging_context=None,
                state=None,
@@ -881,6 +883,7 @@
       return self.do_fn_invoker.invoke_process(windowed_value)
     except BaseException as exn:
       self._reraise_augmented(exn)
+      return None
 
   def process_with_sized_restriction(self, windowed_value):
     # type: (WindowedValue) -> Optional[Tuple[WindowedValue, Timestamp]]
@@ -895,6 +898,7 @@
 
   def current_element_progress(self):
     # type: () -> Optional[iobase.RestrictionProgress]
+    assert isinstance(self.do_fn_invoker, PerWindowInvoker)
     return self.do_fn_invoker.current_element_progress()
 
   def process_user_timer(self, timer_spec, key, window, timestamp):
diff --git a/sdks/python/apache_beam/runners/direct/bundle_factory.py b/sdks/python/apache_beam/runners/direct/bundle_factory.py
index c7677f4..6958fb9 100644
--- a/sdks/python/apache_beam/runners/direct/bundle_factory.py
+++ b/sdks/python/apache_beam/runners/direct/bundle_factory.py
@@ -26,6 +26,7 @@
 from typing import Iterator
 from typing import List
 from typing import Union
+from typing import cast
 
 from apache_beam import pvalue
 from apache_beam.runners import common
@@ -145,9 +146,11 @@
       or as a list of copied WindowedValues.
     """
     if not self._stacked:
+      # we can safely assume self._elements contains only WindowedValues
+      elements = cast('List[WindowedValue]', self._elements)
       if self._committed and not make_copy:
-        return self._elements
-      return list(self._elements)
+        return elements
+      return list(elements)
 
     def iterable_stacked_or_elements(elements):
       for e in elements:
diff --git a/sdks/python/apache_beam/runners/direct/sdf_direct_runner.py b/sdks/python/apache_beam/runners/direct/sdf_direct_runner.py
index e7234de..7659a65 100644
--- a/sdks/python/apache_beam/runners/direct/sdf_direct_runner.py
+++ b/sdks/python/apache_beam/runners/direct/sdf_direct_runner.py
@@ -261,9 +261,11 @@
     self._restriction_tag = _ValueStateTag('restriction')
     self.watermark_hold_tag = _ValueStateTag('watermark_hold')
     self._process_element_invoker = None
+    self._output_processor = _OutputProcessor()
 
     self.sdf_invoker = DoFnInvoker.create_invoker(
         DoFnSignature(self.sdf), context=DoFnContext('unused_context'),
+        output_processor=self._output_processor,
         input_args=args_for_invoker, input_kwargs=kwargs_for_invoker)
 
     self._step_context = None
@@ -329,7 +331,8 @@
                       SDFProcessElementInvoker)
 
     output_values = self._process_element_invoker.invoke_process_element(
-        self.sdf_invoker, windowed_element, tracker, *args, **kwargs)
+        self.sdf_invoker, self._output_processor, windowed_element, tracker,
+        *args, **kwargs)
 
     sdf_result = None
     for output in output_values:
@@ -424,7 +427,7 @@
     raise ValueError
 
   def invoke_process_element(
-      self, sdf_invoker, element, tracker, *args, **kwargs):
+      self, sdf_invoker, output_processor, element, tracker, *args, **kwargs):
     """Invokes `process()` method of a Splittable `DoFn` for a given element.
 
      Args:
@@ -453,10 +456,10 @@
       checkpoint_state.residual_restriction = tracker.checkpoint()
       checkpoint_state.checkpointed = object()
 
-    output_processor = _OutputProcessor()
+    output_processor.reset()
     Timer(self._max_duration, initiate_checkpoint).start()
     sdf_invoker.invoke_process(
-        element, restriction_tracker=tracker, output_processor=output_processor,
+        element, restriction_tracker=tracker,
         additional_args=args, additional_kwargs=kwargs)
 
     assert output_processor.output_iter is not None
@@ -505,3 +508,6 @@
   def process_outputs(self, windowed_input_element, output_iter):
     # type: (WindowedValue, Iterable[Any]) -> None
     self.output_iter = output_iter
+
+  def reset(self):
+    self.output_iter = None
diff --git a/sdks/python/apache_beam/runners/direct/transform_evaluator.py b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
index 0f47b1c..71fd133 100644
--- a/sdks/python/apache_beam/runners/direct/transform_evaluator.py
+++ b/sdks/python/apache_beam/runners/direct/transform_evaluator.py
@@ -586,6 +586,7 @@
       bundles = [bundle]
     else:
       bundles = []
+    assert self._applied_ptransform.transform is not None
     if self._applied_ptransform.inputs:
       input_pvalue = self._applied_ptransform.inputs[0]  # type: Union[pvalue.PBegin, pvalue.PCollection]
     else:
diff --git a/sdks/python/apache_beam/runners/pipeline_context.py b/sdks/python/apache_beam/runners/pipeline_context.py
index b3c4d5b..6b6c3a0 100644
--- a/sdks/python/apache_beam/runners/pipeline_context.py
+++ b/sdks/python/apache_beam/runners/pipeline_context.py
@@ -136,14 +136,6 @@
   Used for accessing and constructing the referenced objects of a Pipeline.
   """
 
-  _COMPONENT_TYPES = {
-      'transforms': pipeline.AppliedPTransform,
-      'pcollections': pvalue.PCollection,
-      'coders': coders.Coder,
-      'windowing_strategies': core.Windowing,
-      'environments': environments.Environment,
-  }
-
   def __init__(self,
                proto=None,  # type: Optional[Union[beam_runner_api_pb2.Components, beam_fn_api_pb2.ProcessBundleDescriptor]]
                default_environment=None,  # type: Optional[environments.Environment]
@@ -158,13 +150,26 @@
           coders=dict(proto.coders.items()),
           windowing_strategies=dict(proto.windowing_strategies.items()),
           environments=dict(proto.environments.items()))
-    for name, cls in self._COMPONENT_TYPES.items():
-      setattr(
-          self, name, _PipelineContextMap(
-              self, cls, namespace, getattr(proto, name, None)))
+
+    self.transforms = _PipelineContextMap(
+        self, pipeline.AppliedPTransform, namespace,
+        proto.transforms if proto is not None else None)
+    self.pcollections = _PipelineContextMap(
+        self, pvalue.PCollection, namespace,
+        proto.pcollections if proto is not None else None)
+    self.coders = _PipelineContextMap(
+        self, coders.Coder, namespace,
+        proto.coders if proto is not None else None)
+    self.windowing_strategies = _PipelineContextMap(
+        self, core.Windowing, namespace,
+        proto.windowing_strategies if proto is not None else None)
+    self.environments = _PipelineContextMap(
+        self, environments.Environment, namespace,
+        proto.environments if proto is not None else None)
+
     if default_environment:
       self._default_environment_id = self.environments.get_id(
-          default_environment, label='default_environment')
+          default_environment, label='default_environment')  # type: Optional[str]
     else:
       self._default_environment_id = None
     self.use_fake_coders = use_fake_coders
@@ -179,7 +184,7 @@
   def coder_id_from_element_type(self, element_type):
     # type: (Any) -> str
     if self.use_fake_coders:
-      return pickler.dumps(element_type)
+      return pickler.dumps(element_type).decode('ascii')
     else:
       return self.coders.get_id(coders.registry.get_coder(element_type))
 
@@ -199,8 +204,13 @@
   def to_runner_api(self):
     # type: () -> beam_runner_api_pb2.Components
     context_proto = beam_runner_api_pb2.Components()
-    for name in self._COMPONENT_TYPES:
-      getattr(self, name).populate_map(getattr(context_proto, name))
+
+    self.transforms.populate_map(context_proto.transforms)
+    self.pcollections.populate_map(context_proto.pcollections)
+    self.coders.populate_map(context_proto.coders)
+    self.windowing_strategies.populate_map(context_proto.windowing_strategies)
+    self.environments.populate_map(context_proto.environments)
+
     return context_proto
 
   def default_environment_id(self):
diff --git a/sdks/python/apache_beam/runners/portability/abstract_job_service.py b/sdks/python/apache_beam/runners/portability/abstract_job_service.py
index 50532f2..8a97b73 100644
--- a/sdks/python/apache_beam/runners/portability/abstract_job_service.py
+++ b/sdks/python/apache_beam/runners/portability/abstract_job_service.py
@@ -31,6 +31,7 @@
 from typing import Dict
 from typing import Iterator
 from typing import Optional
+from typing import Tuple
 from typing import Union
 
 import grpc
@@ -50,6 +51,7 @@
 
 _LOGGER = logging.getLogger(__name__)
 
+StateEvent = Tuple[int, Union[timestamp_pb2.Timestamp, Timestamp]]
 
 def make_state_event(state, timestamp):
   if isinstance(timestamp, Timestamp):
@@ -129,7 +131,7 @@
                request,  # type: beam_job_api_pb2.GetJobStateRequest
                context=None
               ):
-    # type: (...) -> beam_job_api_pb2.GetJobStateResponse
+    # type: (...) -> beam_job_api_pb2.JobStateEvent
     return beam_job_api_pb2.JobStateEvent(
         state=self._jobs[request.job_id].get_state())
 
@@ -153,7 +155,7 @@
         state=self._jobs[request.job_id].get_state())
 
   def GetStateStream(self, request, context=None, timeout=None):
-    # type: (...) -> Iterator[beam_job_api_pb2.GetJobStateResponse]
+    # type: (...) -> Iterator[beam_job_api_pb2.JobStateEvent]
     """Yields state transitions since the stream started.
       """
     if request.job_id not in self._jobs:
@@ -218,11 +220,11 @@
     raise NotImplementedError(self)
 
   def get_state_stream(self):
-    # type: () -> Iterator[Optional[beam_job_api_pb2.JobState.Enum]]
+    # type: () -> Iterator[StateEvent]
     raise NotImplementedError(self)
 
   def get_message_stream(self):
-    # type: () -> Iterator[Union[int, Optional[beam_job_api_pb2.JobMessage]]]
+    # type: () -> Iterator[Union[StateEvent, Optional[beam_job_api_pb2.JobMessage]]]
     raise NotImplementedError(self)
 
 
diff --git a/sdks/python/apache_beam/runners/portability/artifact_service.py b/sdks/python/apache_beam/runners/portability/artifact_service.py
index 17f9b70..708aceb 100644
--- a/sdks/python/apache_beam/runners/portability/artifact_service.py
+++ b/sdks/python/apache_beam/runners/portability/artifact_service.py
@@ -150,7 +150,7 @@
         with self._open(artifact.uri, 'r') as fin:
           # This value is not emitted, but lets us yield a single empty
           # chunk on an empty file.
-          chunk = True
+          chunk = b'1'
           while chunk:
             chunk = fin.read(self._chunk_size)
             yield beam_artifact_api_pb2.ArtifactChunk(data=chunk)
diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner.py b/sdks/python/apache_beam/runners/portability/fn_api_runner.py
index 6100efa..c5e1dac 100644
--- a/sdks/python/apache_beam/runners/portability/fn_api_runner.py
+++ b/sdks/python/apache_beam/runners/portability/fn_api_runner.py
@@ -50,6 +50,7 @@
 from typing import Type
 from typing import TypeVar
 from typing import Union
+from typing import overload
 
 import grpc
 
@@ -152,8 +153,17 @@
     for data in self._input:
       self._futures_by_id.pop(data.instruction_id).set(data)
 
+  @overload
   def push(self, req):
-    # type: (...) -> Optional[ControlFuture]
+    # type: (BeamFnControlServicer.DoneMarker) -> None
+    pass
+
+  @overload
+  def push(self, req):
+    # type: (beam_fn_api_pb2.InstructionRequest) -> ControlFuture
+    pass
+
+  def push(self, req):
     if req == BeamFnControlServicer._DONE_MARKER:
       self._push_queue.put(req)
       return None
@@ -195,7 +205,10 @@
   STARTED_STATE = 'started'
   DONE_STATE = 'done'
 
-  _DONE_MARKER = object()
+  class DoneMarker(object):
+    pass
+
+  _DONE_MARKER = DoneMarker()
 
   def __init__(self):
     self._lock = threading.Lock()
@@ -367,8 +380,9 @@
     # type: (bytes) -> None
     input_stream = create_InputStream(elements_data)
     while input_stream.size() > 0:
-      windowed_value = self._windowed_value_coder.get_impl(
-          ).decode_from_stream(input_stream, True)
+      windowed_val_coder_impl = self._windowed_value_coder.get_impl()  # type: WindowedValueCoderImpl
+      windowed_value = windowed_val_coder_impl.decode_from_stream(
+          input_stream, True)
       key, value = self._kv_extractor(windowed_value.value)
       for window in windowed_value.windows:
         self._values_by_window[key, window].append(value)
@@ -390,7 +404,7 @@
 
   def __init__(
       self,
-      default_environment=None,  # type: Optional[beam_runner_api_pb2.Environment]
+      default_environment=None,  # type: Optional[environments.Environment]
       bundle_repeat=0,
       use_state_iterables=False,
       provision_info=None,  # type: Optional[ExtendedProvisionInfo]
@@ -819,9 +833,10 @@
             pipeline_components.windowing_strategies.items()),
         environments=dict(pipeline_components.environments.items()))
 
-    if worker_handler.state_api_service_descriptor():
+    state_api_service_descriptor = worker_handler.state_api_service_descriptor()
+    if state_api_service_descriptor:
       process_bundle_descriptor.state_api_service_descriptor.url = (
-          worker_handler.state_api_service_descriptor().url)
+          state_api_service_descriptor.url)
 
     # Store the required side inputs into state so it is accessible for the
     # worker when it runs this bundle.
@@ -930,7 +945,8 @@
         # merged results. Without residual_roots, pipeline stops earlier and we
         # may miss some data.
         bundle_manager._num_workers = 1
-        bundle_manager._skip_registration = True
+        # TODO(BEAM-8486): this should be changed to _registered
+        bundle_manager._skip_registration = True  # type: ignore[attr-defined]
         last_result, splits = bundle_manager.process_bundle(
             deferred_inputs, data_output)
         last_sent = deferred_inputs
@@ -1007,7 +1023,8 @@
 
   # These classes are used to interact with the worker.
 
-  class StateServicer(beam_fn_api_pb2_grpc.BeamFnStateServicer):
+  class StateServicer(beam_fn_api_pb2_grpc.BeamFnStateServicer,
+                      sdk_worker.StateHandler):
 
     class CopyOnWriteState(object):
       def __init__(self, underlying):
@@ -1089,11 +1106,11 @@
           else:
             token_base, index = continuation_token.split(':')
             ix = int(index)
-            full_state = self._continuations[token_base]
-            if ix == len(full_state):
+            full_state_cont = self._continuations[token_base]
+            if ix == len(full_state_cont):
               return b'', None
             else:
-              return full_state[ix], '%s:%d' % (token_base, ix + 1)
+              return full_state_cont[ix], '%s:%d' % (token_base, ix + 1)
         else:
           assert not continuation_token
           return b''.join(full_state), None
@@ -1163,16 +1180,16 @@
     """A singleton cache for a StateServicer."""
 
     def __init__(self, state_handler):
-      # type: (sdk_worker.StateHandler) -> None
+      # type: (sdk_worker.CachingStateHandler) -> None
       self._state_handler = state_handler
 
     def create_state_handler(self, api_service_descriptor):
-      # type: (endpoints_pb2.ApiServiceDescriptor) -> sdk_worker.StateHandler
+      # type: (endpoints_pb2.ApiServiceDescriptor) -> sdk_worker.CachingStateHandler
       """Returns the singleton state handler."""
       return self._state_handler
 
     def close(self):
-      # type: (...) -> None
+      # type: () -> None
       """Does nothing."""
       pass
 
@@ -1229,6 +1246,9 @@
   _worker_id_counter = -1
   _lock = threading.Lock()
 
+  control_conn = None  # type: ControlConnection
+  data_conn = None  # type: data_plane._GrpcDataChannel
+
   def __init__(self,
                control_handler,
                data_plane_handler,
@@ -1310,7 +1330,7 @@
 
   def __init__(self,
                unused_payload,  # type: None
-               state,
+               state,  # type: sdk_worker.StateHandler
                provision_info,  # type: Optional[ExtendedProvisionInfo]
                unused_grpc_server=None
               ):
@@ -1444,10 +1464,10 @@
     # If we have provision info, serve these off the control port as well.
     if self.provision_info:
       if self.provision_info.provision_info:
-        provision_info = self.provision_info.provision_info
-        if not provision_info.worker_id:
-          provision_info = copy.copy(provision_info)
-          provision_info.worker_id = str(uuid.uuid4())
+        provision_info_proto = self.provision_info.provision_info
+        if not provision_info_proto.worker_id:
+          provision_info_proto = copy.copy(provision_info_proto)
+          provision_info_proto.worker_id = str(uuid.uuid4())
         beam_provision_api_pb2_grpc.add_ProvisionServiceServicer_to_server(
             BasicProvisionService(self.provision_info.provision_info),
             self.control_server)
@@ -1872,6 +1892,7 @@
                             read_transform_id,  # type: str
                             byte_streams
                            ):
+    assert self._worker_handler is not None
     data_out = self._worker_handler.data_conn.output_stream(
         process_bundle_id, read_transform_id)
     for byte_stream in byte_streams:
@@ -1883,6 +1904,7 @@
     if self._registered:
       registration_future = None
     else:
+      assert self._worker_handler is not None
       process_bundle_registration = beam_fn_api_pb2.InstructionRequest(
           register=beam_fn_api_pb2.RegisterRequest(
               process_bundle_descriptor=[self._bundle_descriptor]))
@@ -1930,6 +1952,8 @@
     self._send_input_to_worker(
         process_bundle_id, read_transform_id, [byte_stream])
 
+    assert self._worker_handler is not None
+
     # Execute the requested splits.
     while not done:
       if split_fraction is None:
@@ -2067,13 +2091,18 @@
 
     merged_result = None  # type: Optional[beam_fn_api_pb2.InstructionResponse]
     split_result_list = []  # type: List[beam_fn_api_pb2.ProcessBundleSplitResponse]
-    with UnboundedThreadPoolExecutor() as executor:
-      for result, split_result in executor.map(lambda part: BundleManager(
+
+    def execute(part_map):
+      # type: (...) -> BundleProcessResult
+      bundle_manager = BundleManager(
           self._worker_handler_list, self._get_buffer,
           self._get_input_coder_impl, self._bundle_descriptor,
           self._progress_frequency, self._registered,
-          cache_token_generator=self._cache_token_generator).process_bundle(
-              part, expected_outputs), part_inputs):
+          cache_token_generator=self._cache_token_generator)
+      return bundle_manager.process_bundle(part_map, expected_outputs)
+
+    with UnboundedThreadPoolExecutor() as executor:
+      for result, split_result in executor.map(execute, part_inputs):
 
         split_result_list += split_result
         if merged_result is None:
@@ -2086,6 +2115,7 @@
                           result.process_bundle.monitoring_infos,
                           merged_result.process_bundle.monitoring_infos))),
               error=result.error or merged_result.error)
+    assert merged_result is not None
 
     return merged_result, split_result_list
 
diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner_test.py b/sdks/python/apache_beam/runners/portability/fn_api_runner_test.py
index 8125a5a..e82f31c 100644
--- a/sdks/python/apache_beam/runners/portability/fn_api_runner_test.py
+++ b/sdks/python/apache_beam/runners/portability/fn_api_runner_test.py
@@ -566,6 +566,12 @@
              p | 'd' >> beam.Create(additional)) | beam.Flatten()
       assert_that(res, equal_to(['a', 'b', 'c'] + additional))
 
+  def test_flatten_same_pcollections(self, with_transcoding=True):
+    with self.create_pipeline() as p:
+      pc = p | beam.Create(['a', 'b'])
+      assert_that((pc, pc, pc) | beam.Flatten(), equal_to(['a', 'b'] * 3))
+
+
   def test_combine_per_key(self):
     with self.create_pipeline() as p:
       res = (p
diff --git a/sdks/python/apache_beam/runners/portability/fn_api_runner_transforms.py b/sdks/python/apache_beam/runners/portability/fn_api_runner_transforms.py
index 97c6cfa..d206122 100644
--- a/sdks/python/apache_beam/runners/portability/fn_api_runner_transforms.py
+++ b/sdks/python/apache_beam/runners/portability/fn_api_runner_transforms.py
@@ -277,7 +277,7 @@
           stage_components.transforms[side.transform_id].inputs[side.local_name]
           for side in side_inputs
       }, main_input=main_input_id)
-      payload = beam_runner_api_pb2.ExecutableStagePayload(
+      exec_payload = beam_runner_api_pb2.ExecutableStagePayload(
           environment=components.environments[self.environment],
           input=main_input_id,
           outputs=external_outputs,
@@ -291,7 +291,7 @@
           unique_name=unique_name(None, self.name),
           spec=beam_runner_api_pb2.FunctionSpec(
               urn='beam:runner:executable_stage:v1',
-              payload=payload.SerializeToString()),
+              payload=exec_payload.SerializeToString()),
           inputs=named_inputs,
           outputs={'output_%d' % ix: pcoll
                    for ix, pcoll in enumerate(external_outputs)},)
@@ -317,7 +317,8 @@
 class TransformContext(object):
 
   _KNOWN_CODER_URNS = set(
-      value.urn for value in common_urns.coders.__dict__.values())
+      value.urn for key, value in common_urns.coders.__dict__.items()
+      if not key.startswith('_'))
 
   def __init__(self,
                components,  # type: beam_runner_api_pb2.Components
@@ -580,15 +581,18 @@
   This representation is also amenable to simple recomputation on fusion.
   """
   consumers = collections.defaultdict(list)  # type: DefaultDict[str, List[Stage]]
+  def get_all_side_inputs():
+    # type: () -> Set[str]
+    all_side_inputs = set()  # type: Set[str]
+    for stage in stages:
+      for transform in stage.transforms:
+        for input in transform.inputs.values():
+          consumers[input].append(stage)
+      for si in stage.side_inputs():
+        all_side_inputs.add(si)
+    return all_side_inputs
 
-  all_side_inputs = set()
-  for stage in stages:
-    for transform in stage.transforms:
-      for input in transform.inputs.values():
-        consumers[input].append(stage)
-    for si in stage.side_inputs():
-      all_side_inputs.add(si)
-  all_side_inputs = frozenset(all_side_inputs)
+  all_side_inputs = frozenset(get_all_side_inputs())
 
   downstream_side_inputs_by_stage = {}  # type: Dict[Stage, FrozenSet[str]]
 
@@ -1440,4 +1444,5 @@
 def split_buffer_id(buffer_id):
   # type: (bytes) -> Tuple[str, str]
   """A buffer id is "kind:pcollection_id". Split into (kind, pcoll_id). """
-  return buffer_id.decode('utf-8').split(':', 1)
+  kind, pcoll_id = buffer_id.decode('utf-8').split(':', 1)
+  return kind, pcoll_id
diff --git a/sdks/python/apache_beam/runners/portability/portable_runner.py b/sdks/python/apache_beam/runners/portability/portable_runner.py
index 0715311..b5dd1d4 100644
--- a/sdks/python/apache_beam/runners/portability/portable_runner.py
+++ b/sdks/python/apache_beam/runners/portability/portable_runner.py
@@ -88,7 +88,7 @@
 
   @staticmethod
   def _create_environment(options):
-    # type: (PipelineOptions) -> beam_runner_api_pb2.Environment
+    # type: (PipelineOptions) -> environments.Environment
     portable_options = options.view_as(PortableOptions)
     # Do not set a Runner. Otherwise this can cause problems in Java's
     # PipelineOptions, i.e. ClassNotFoundException, if the corresponding Runner
diff --git a/sdks/python/apache_beam/runners/worker/bundle_processor.py b/sdks/python/apache_beam/runners/worker/bundle_processor.py
index 052582c..0612e5e 100644
--- a/sdks/python/apache_beam/runners/worker/bundle_processor.py
+++ b/sdks/python/apache_beam/runners/worker/bundle_processor.py
@@ -28,7 +28,6 @@
 import json
 import logging
 import random
-import re
 import threading
 from builtins import next
 from builtins import object
@@ -160,7 +159,7 @@
   """A source-like operation that gathers input from the runner."""
 
   def __init__(self,
-               operation_name,  # type: str
+               operation_name,  # type: Union[str, common.NameContext]
                step_name,
                consumers,  # type: Mapping[Any, Iterable[operations.Operation]]
                counter_factory,
@@ -246,6 +245,7 @@
       if stop_index < self.stop:
         self.stop = stop_index
         return self.stop - 1, None, None, self.stop
+    return None
 
   def progress_metrics(self):
     # type: () -> beam_fn_api_pb2.Metrics.PTransform
@@ -265,7 +265,7 @@
 
 class _StateBackedIterable(object):
   def __init__(self,
-               state_handler,
+               state_handler,  # type: sdk_worker.CachingStateHandler
                state_key,  # type: beam_fn_api_pb2.StateKey
                coder_or_impl,  # type: Union[coders.Coder, coder_impl.CoderImpl]
                is_cached=False
@@ -294,7 +294,7 @@
 
 class StateBackedSideInputMap(object):
   def __init__(self,
-               state_handler,
+               state_handler,  # type: sdk_worker.CachingStateHandler
                transform_id,  # type: str
                tag,  # type: Optional[str]
                side_input_data,  # type: pvalue.SideInputData
@@ -434,7 +434,7 @@
 class SynchronousBagRuntimeState(userstate.BagRuntimeState):
 
   def __init__(self,
-               state_handler,
+               state_handler,  # type: sdk_worker.CachingStateHandler
                state_key,  # type: beam_fn_api_pb2.StateKey
                value_coder  # type: coders.Coder
               ):
@@ -481,7 +481,7 @@
 class SynchronousSetRuntimeState(userstate.SetRuntimeState):
 
   def __init__(self,
-               state_handler,
+               state_handler,  # type: sdk_worker.CachingStateHandler
                state_key,  # type: beam_fn_api_pb2.StateKey
                value_coder  # type: coders.Coder
               ):
@@ -578,7 +578,7 @@
   """Interface for state and timers from SDK to Fn API servicer of state.."""
 
   def __init__(self,
-               state_handler,
+               state_handler,  # type: sdk_worker.CachingStateHandler
                transform_id,  # type: str
                key_coder,  # type: coders.Coder
                window_coder,  # type: coders.Coder
@@ -616,6 +616,7 @@
                 window  # type: windowed_value.BoundedWindow
                ):
     # type: (...) -> OutputTimer
+    assert self._timer_receivers is not None
     return OutputTimer(
         key, window, self._timer_receivers[timer_spec.name])
 
@@ -695,7 +696,7 @@
 
   def __init__(self,
                process_bundle_descriptor,  # type: beam_fn_api_pb2.ProcessBundleDescriptor
-               state_handler,  # type: Union[FnApiRunner.StateServicer, GrpcStateHandler]
+               state_handler,  # type: sdk_worker.CachingStateHandler
                data_channel_factory  # type: data_plane.DataChannelFactory
               ):
     # type: (...) -> None
@@ -868,6 +869,7 @@
                                  deferred_remainder  # type: Tuple[windowed_value.WindowedValue, Timestamp]
                                 ):
     # type: (...) -> beam_fn_api_pb2.DelayedBundleApplication
+    assert op.input_info is not None
     # TODO(SDF): For non-root nodes, need main_input_coder + residual_coder.
     ((element_and_restriction, output_watermark),
      deferred_watermark) = deferred_remainder
@@ -1011,7 +1013,7 @@
                data_channel_factory,  # type: data_plane.DataChannelFactory
                counter_factory,
                state_sampler,  # type: statesampler.StateSampler
-               state_handler
+               state_handler  # type: sdk_worker.CachingStateHandler
               ):
     self.descriptor = descriptor
     self.data_channel_factory = data_channel_factory
@@ -1130,19 +1132,26 @@
 
 @BeamTransformFactory.register_urn(
     DATA_INPUT_URN, beam_fn_api_pb2.RemoteGrpcPort)
-def create(factory, transform_id, transform_proto, grpc_port, consumers):
+def create_source_runner(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    grpc_port,  # type: beam_fn_api_pb2.RemoteGrpcPort
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> DataInputOperation
   # Timers are the one special case where we don't want to call the
   # (unlabeled) operation.process() method, which we detect here.
   # TODO(robertwb): Consider generalizing if there are any more cases.
   output_pcoll = only_element(transform_proto.outputs.values())
   output_consumers = only_element(consumers.values())
-  if (len(output_consumers) == 1
-      and isinstance(only_element(output_consumers), operations.DoOperation)):
+  if len(output_consumers) == 1:
     do_op = only_element(output_consumers)
-    for tag, pcoll_id in do_op.timer_inputs.items():
-      if pcoll_id == output_pcoll:
-        output_consumers[:] = [TimerConsumer(tag, do_op)]
-        break
+    if isinstance(do_op, operations.DoOperation):
+      for tag, pcoll_id in do_op.timer_inputs.items():
+        if pcoll_id == output_pcoll:
+          output_consumers[:] = [TimerConsumer(tag, do_op)]
+          break
 
   if grpc_port.coder_id:
     output_coder = factory.get_coder(grpc_port.coder_id)
@@ -1165,7 +1174,14 @@
 
 @BeamTransformFactory.register_urn(
     DATA_OUTPUT_URN, beam_fn_api_pb2.RemoteGrpcPort)
-def create(factory, transform_id, transform_proto, grpc_port, consumers):
+def create_sink_runner(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    grpc_port,  # type: beam_fn_api_pb2.RemoteGrpcPort
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> DataOutputOperation
   if grpc_port.coder_id:
     output_coder = factory.get_coder(grpc_port.coder_id)
   else:
@@ -1186,7 +1202,14 @@
 
 
 @BeamTransformFactory.register_urn(OLD_DATAFLOW_RUNNER_HARNESS_READ_URN, None)
-def create(factory, transform_id, transform_proto, parameter, consumers):
+def create_source_java(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.ReadOperation
   # The Dataflow runner harness strips the base64 encoding.
   source = pickler.loads(base64.b64encode(parameter))
   spec = operation_specs.WorkerRead(
@@ -1228,7 +1251,14 @@
 
 @BeamTransformFactory.register_urn(
     python_urns.IMPULSE_READ_TRANSFORM, beam_runner_api_pb2.ReadPayload)
-def create(factory, transform_id, transform_proto, parameter, consumers):
+def create_read_from_impulse_python(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,  # type: beam_runner_api_pb2.ReadPayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.ImpulseReadOperation
   return operations.ImpulseReadOperation(
       common.NameContext(transform_proto.unique_name, transform_id),
       factory.counter_factory,
@@ -1240,7 +1270,13 @@
 
 
 @BeamTransformFactory.register_urn(OLD_DATAFLOW_RUNNER_HARNESS_PARDO_URN, None)
-def create(factory, transform_id, transform_proto, serialized_fn, consumers):
+def create_dofn_javasdk(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    serialized_fn,
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   return _create_pardo_operation(
       factory, transform_id, transform_proto, consumers, serialized_fn)
 
@@ -1248,7 +1284,7 @@
 @BeamTransformFactory.register_urn(
     common_urns.sdf_components.PAIR_WITH_RESTRICTION.urn,
     beam_runner_api_pb2.ParDoPayload)
-def create(*args):
+def create_pair_with_restriction(*args):
 
   class PairWithRestriction(beam.DoFn):
     def __init__(self, fn, restriction_provider):
@@ -1269,7 +1305,7 @@
 @BeamTransformFactory.register_urn(
     common_urns.sdf_components.SPLIT_AND_SIZE_RESTRICTIONS.urn,
     beam_runner_api_pb2.ParDoPayload)
-def create(*args):
+def create_split_and_size_restrictions(*args):
 
   class SplitAndSizeRestrictions(beam.DoFn):
     def __init__(self, fn, restriction_provider):
@@ -1287,7 +1323,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.sdf_components.PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS.urn,
     beam_runner_api_pb2.ParDoPayload)
-def create(factory, transform_id, transform_proto, parameter, consumers):
+def create_process_sized_elements_and_restrictions(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,  # type: beam_runner_api_pb2.ParDoPayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   assert parameter.do_fn.urn == python_urns.PICKLED_DOFN_INFO
   serialized_fn = parameter.do_fn.payload
   return _create_pardo_operation(
@@ -1312,7 +1354,14 @@
 
 @BeamTransformFactory.register_urn(
     common_urns.primitives.PAR_DO.urn, beam_runner_api_pb2.ParDoPayload)
-def create(factory, transform_id, transform_proto, parameter, consumers):
+def create_par_do(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,  # type: beam_runner_api_pb2.ParDoPayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.DoOperation
   assert parameter.do_fn.urn == python_urns.PICKLED_DOFN_INFO
   serialized_fn = parameter.do_fn.payload
   return _create_pardo_operation(
@@ -1336,9 +1385,7 @@
         (tag, beam.pvalue.SideInputData.from_runner_api(si, factory.context))
         for tag, si in pardo_proto.side_inputs.items()]
     tagged_side_inputs.sort(
-        key=lambda tag_si: int(re.match('side([0-9]+)(-.*)?$',
-                                        tag_si[0],
-                                        re.DOTALL).group(1)))
+        key=lambda tag_si: sideinputs.get_sideinput_index(tag_si[0]))
     side_input_maps = [
         StateBackedSideInputMap(
             factory.state_handler,
@@ -1447,7 +1494,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.primitives.ASSIGN_WINDOWS.urn,
     beam_runner_api_pb2.WindowingStrategy)
-def create(factory, transform_id, transform_proto, parameter, consumers):
+def create_assign_windows(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,  # type: beam_runner_api_pb2.WindowingStrategy
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   class WindowIntoDoFn(beam.DoFn):
     def __init__(self, windowing):
       self.windowing = windowing
@@ -1466,7 +1519,14 @@
 
 
 @BeamTransformFactory.register_urn(IDENTITY_DOFN_URN, None)
-def create(factory, transform_id, transform_proto, unused_parameter, consumers):
+def create_identity_dofn(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    parameter,
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.FlattenOperation
   return factory.augment_oldstyle_op(
       operations.FlattenOperation(
           common.NameContext(transform_proto.unique_name, transform_id),
@@ -1481,7 +1541,14 @@
 @BeamTransformFactory.register_urn(
     common_urns.combine_components.COMBINE_PER_KEY_PRECOMBINE.urn,
     beam_runner_api_pb2.CombinePayload)
-def create(factory, transform_id, transform_proto, payload, consumers):
+def create_combine_per_key_precombine(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    payload,  # type: beam_runner_api_pb2.CombinePayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.PGBKCVOperation
   serialized_combine_fn = pickler.dumps(
       (beam.CombineFn.from_runner_api(payload.combine_fn, factory.context),
        [], {}))
@@ -1502,7 +1569,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.combine_components.COMBINE_PER_KEY_MERGE_ACCUMULATORS.urn,
     beam_runner_api_pb2.CombinePayload)
-def create(factory, transform_id, transform_proto, payload, consumers):
+def create_combbine_per_key_merge_accumulators(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    payload,  # type: beam_runner_api_pb2.CombinePayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   return _create_combine_phase_operation(
       factory, transform_id, transform_proto, payload, consumers, 'merge')
 
@@ -1510,7 +1583,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.combine_components.COMBINE_PER_KEY_EXTRACT_OUTPUTS.urn,
     beam_runner_api_pb2.CombinePayload)
-def create(factory, transform_id, transform_proto, payload, consumers):
+def create_combine_per_key_extract_outputs(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    payload,  # type: beam_runner_api_pb2.CombinePayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   return _create_combine_phase_operation(
       factory, transform_id, transform_proto, payload, consumers, 'extract')
 
@@ -1518,7 +1597,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.combine_components.COMBINE_GROUPED_VALUES.urn,
     beam_runner_api_pb2.CombinePayload)
-def create(factory, transform_id, transform_proto, payload, consumers):
+def create_combine_grouped_values(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    payload,  # type: beam_runner_api_pb2.CombinePayload
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   return _create_combine_phase_operation(
       factory, transform_id, transform_proto, payload, consumers, 'all')
 
@@ -1544,7 +1629,14 @@
 
 
 @BeamTransformFactory.register_urn(common_urns.primitives.FLATTEN.urn, None)
-def create(factory, transform_id, transform_proto, unused_parameter, consumers):
+def create_flatten(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    payload,
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
+  # type: (...) -> operations.FlattenOperation
   return factory.augment_oldstyle_op(
       operations.FlattenOperation(
           common.NameContext(transform_proto.unique_name, transform_id),
@@ -1560,7 +1652,13 @@
 @BeamTransformFactory.register_urn(
     common_urns.primitives.MAP_WINDOWS.urn,
     beam_runner_api_pb2.FunctionSpec)
-def create(factory, transform_id, transform_proto, mapping_fn_spec, consumers):
+def create_map_windows(
+    factory,  # type: BeamTransformFactory
+    transform_id,  # type: str
+    transform_proto,  # type: beam_runner_api_pb2.PTransform
+    mapping_fn_spec,  # type: beam_runner_api_pb2.SdkFunctionSpec
+    consumers  # type: Dict[str, List[operations.Operation]]
+):
   assert mapping_fn_spec.urn == python_urns.PICKLED_WINDOW_MAPPING_FN
   window_mapping_fn = pickler.loads(mapping_fn_spec.payload)
 
diff --git a/sdks/python/apache_beam/runners/worker/operations.py b/sdks/python/apache_beam/runners/worker/operations.py
index 6bb8bf1..aa95057 100644
--- a/sdks/python/apache_beam/runners/worker/operations.py
+++ b/sdks/python/apache_beam/runners/worker/operations.py
@@ -657,6 +657,7 @@
     with self.scoped_process_state:
       delayed_application = self.dofn_receiver.receive(o)
       if delayed_application:
+        assert self.execution_context is not None
         self.execution_context.delayed_applications.append(
             (self, delayed_application))
 
@@ -748,6 +749,7 @@
 
   def process(self, o):
     # type: (WindowedValue) -> None
+    assert self.tagged_receivers is not None
     with self.scoped_process_state:
       try:
         with self.lock:
@@ -758,6 +760,7 @@
         # the lock.
         delayed_application = self.dofn_runner.process_with_sized_restriction(o)
         if delayed_application:
+          assert self.execution_context is not None
           self.execution_context.delayed_applications.append(
               (self, delayed_application))
       finally:
@@ -784,6 +787,7 @@
       metrics = super(SdfProcessSizedElements, self).progress_metrics()
       current_element_progress = self.current_element_progress()
     if current_element_progress:
+      assert self.input_info is not None
       metrics.active_elements.measured.input_element_counts[
           self.input_info[1]] = 1
       metrics.active_elements.fraction_remaining = (
diff --git a/sdks/python/apache_beam/runners/worker/sdk_worker.py b/sdks/python/apache_beam/runners/worker/sdk_worker.py
index 66724a7..193ac1c 100644
--- a/sdks/python/apache_beam/runners/worker/sdk_worker.py
+++ b/sdks/python/apache_beam/runners/worker/sdk_worker.py
@@ -33,9 +33,11 @@
 from builtins import object
 from concurrent import futures
 from typing import TYPE_CHECKING
+from typing import Any
 from typing import Callable
 from typing import DefaultDict
 from typing import Dict
+from typing import Iterable
 from typing import Iterator
 from typing import List
 from typing import Optional
@@ -491,16 +493,43 @@
       yield
 
 
-class StateHandlerFactory(with_metaclass(abc.ABCMeta, object)):
+class StateHandler(with_metaclass(abc.ABCMeta, object)):  # type: ignore[misc]
+  """An abstract object representing a ``StateHandler``."""
+
+  @abc.abstractmethod
+  def get_raw(self,
+              state_key,  # type: beam_fn_api_pb2.StateKey
+              continuation_token=None  # type: Optional[bytes]
+             ):
+    # type: (...) -> Tuple[bytes, Optional[bytes]]
+    raise NotImplementedError(type(self))
+
+  @abc.abstractmethod
+  def append_raw(self,
+                 state_key,  # type: beam_fn_api_pb2.StateKey
+                 data  # type: bytes
+                ):
+    # type: (...) -> _Future
+    raise NotImplementedError(type(self))
+
+  @abc.abstractmethod
+  def clear(self, state_key):
+    # type: (beam_fn_api_pb2.StateKey) -> _Future
+    raise NotImplementedError(type(self))
+
+
+class StateHandlerFactory(with_metaclass(abc.ABCMeta, object)):  # type: ignore[misc]
   """An abstract factory for creating ``DataChannel``."""
 
   @abc.abstractmethod
   def create_state_handler(self, api_service_descriptor):
+    # type: (endpoints_pb2.ApiServiceDescriptor) -> CachingStateHandler
     """Returns a ``StateHandler`` from the given ApiServiceDescriptor."""
     raise NotImplementedError(type(self))
 
   @abc.abstractmethod
   def close(self):
+    # type: () -> None
     """Close all channels that this factory owns."""
     raise NotImplementedError(type(self))
 
@@ -512,14 +541,14 @@
   """
 
   def __init__(self, state_cache, credentials=None):
-    self._state_handler_cache = {}  # type: Dict[str, GrpcStateHandler]
+    self._state_handler_cache = {}  # type: Dict[str, CachingStateHandler]
     self._lock = threading.Lock()
     self._throwing_state_handler = ThrowingStateHandler()
     self._credentials = credentials
     self._state_cache = state_cache
 
   def create_state_handler(self, api_service_descriptor):
-    # type: (endpoints_pb2.ApiServiceDescriptor) -> GrpcStateHandler
+    # type: (endpoints_pb2.ApiServiceDescriptor) -> CachingStateHandler
     if not api_service_descriptor:
       return self._throwing_state_handler
     url = api_service_descriptor.url
@@ -550,6 +579,7 @@
     return self._state_handler_cache[url]
 
   def close(self):
+    # type: () -> None
     _LOGGER.info('Closing all cached gRPC state handlers.')
     for _, state_handler in self._state_handler_cache.items():
       state_handler.done()
@@ -557,15 +587,15 @@
     self._state_cache.evict_all()
 
 
-class ThrowingStateHandler(object):
+class ThrowingStateHandler(StateHandler):
   """A state handler that errors on any requests."""
 
-  def blocking_get(self, state_key, coder):
+  def get_raw(self, state_key, coder):
     raise RuntimeError(
         'Unable to handle state requests for ProcessBundleDescriptor without '
         'state ApiServiceDescriptor for state key %s.' % state_key)
 
-  def append(self, state_key, coder, elements):
+  def append_raw(self, state_key, coder, elements):
     raise RuntimeError(
         'Unable to handle state requests for ProcessBundleDescriptor without '
         'state ApiServiceDescriptor for state key %s.' % state_key)
@@ -576,7 +606,7 @@
         'state ApiServiceDescriptor for state key %s.' % state_key)
 
 
-class GrpcStateHandler(object):
+class GrpcStateHandler(StateHandler):
 
   _DONE = object()
 
@@ -674,6 +704,7 @@
     return future
 
   def _blocking_request(self, request):
+    # type: (beam_fn_api_pb2.StateRequest) -> beam_fn_api_pb2.StateResponse
     req_future = self._request(request)
     while not req_future.wait(timeout=1):
       if self._exc_info:
@@ -702,7 +733,10 @@
 class CachingStateHandler(object):
   """ A State handler which retrieves and caches state. """
 
-  def __init__(self, global_state_cache, underlying_state):
+  def __init__(self,
+               global_state_cache,  # type: StateCache
+               underlying_state  # type: StateHandler
+              ):
     self._underlying = underlying_state
     self._state_cache = global_state_cache
     self._context = threading.local()
@@ -728,7 +762,12 @@
     finally:
       self._context.cache_token = None
 
-  def blocking_get(self, state_key, coder, is_cached=False):
+  def blocking_get(self,
+                   state_key,  # type: beam_fn_api_pb2.StateKey
+                   coder,  # type: coder_impl.CoderImpl
+                   is_cached=False
+                  ):
+    # type: (...) -> Iterator[Any]
     if not self._should_be_cached(is_cached):
       # Cache disabled / no cache token. Can't do a lookup/store in the cache.
       # Fall back to lazily materializing the state, one element at a time.
@@ -769,6 +808,7 @@
     return self._underlying.append_raw(state_key, out.get())
 
   def clear(self, state_key, is_cached=False):
+    # type: (beam_fn_api_pb2.StateKey, bool) -> _Future
     if self._should_be_cached(is_cached):
       cache_key = self._convert_to_cache_key(state_key)
       self._state_cache.clear(cache_key, self._context.cache_token)
@@ -778,7 +818,11 @@
     # type: () -> None
     self._underlying.done()
 
-  def _materialize_iter(self, state_key, coder):
+  def _materialize_iter(self,
+                        state_key,  # type: beam_fn_api_pb2.StateKey
+                        coder  # type: coder_impl.CoderImpl
+                       ):
+    # type: (...) -> Iterator[Any]
     """Materializes the state lazily, one element at a time.
        :return A generator which returns the next element if advanced.
     """
diff --git a/sdks/python/apache_beam/runners/worker/statecache.py b/sdks/python/apache_beam/runners/worker/statecache.py
index e8fd2ae..1e304c4 100644
--- a/sdks/python/apache_beam/runners/worker/statecache.py
+++ b/sdks/python/apache_beam/runners/worker/statecache.py
@@ -23,17 +23,24 @@
 import collections
 import logging
 import threading
+from typing import Callable
+from typing import DefaultDict
+from typing import Hashable
+from typing import Set
+from typing import TypeVar
 
 from apache_beam.metrics import monitoring_infos
 
 _LOGGER = logging.getLogger(__name__)
 
+CallableT = TypeVar('CallableT', bound='Callable')
+
 
 class Metrics(object):
   """Metrics container for state cache metrics."""
 
   # A set of all registered metrics
-  ALL_METRICS = set()
+  ALL_METRICS = set()  # type: Set[Hashable]
   PREFIX = "beam:metric:statecache:"
 
   def __init__(self):
@@ -44,12 +51,14 @@
     """
     if hasattr(self._context, 'metrics'):
       return # Already initialized
-    self._context.metrics = collections.defaultdict(int)
+    self._context.metrics = collections.defaultdict(int)  # type: DefaultDict[Hashable, int]
 
   def count(self, name):
+    # type: (str) -> None
     self._context.metrics[name] += 1
 
   def hit_miss(self, total_name, hit_miss_name):
+    # type: (str, str) -> None
     self._context.metrics[total_name] += 1
     self._context.metrics[hit_miss_name] += 1
 
@@ -80,6 +89,7 @@
 
   @staticmethod
   def counter_hit_miss(total_name, hit_name, miss_name):
+    # type: (str, str, str) -> Callable[[CallableT], CallableT]
     """Decorator for counting function calls and whether
        the return value equals None (=miss) or not (=hit)."""
     Metrics.ALL_METRICS.update([total_name, hit_name, miss_name])
@@ -100,6 +110,7 @@
 
   @staticmethod
   def counter(metric_name):
+    # type: (str) -> Callable[[CallableT], CallableT]
     """Decorator for counting function calls."""
     Metrics.ALL_METRICS.add(metric_name)
 
diff --git a/sdks/python/apache_beam/runners/worker/statesampler.py b/sdks/python/apache_beam/runners/worker/statesampler.py
index a9de8b1..36a6568 100644
--- a/sdks/python/apache_beam/runners/worker/statesampler.py
+++ b/sdks/python/apache_beam/runners/worker/statesampler.py
@@ -23,9 +23,9 @@
 
 import contextlib
 import threading
-from collections import namedtuple
 from typing import TYPE_CHECKING
 from typing import Dict
+from typing import NamedTuple
 from typing import Optional
 from typing import Union
 
@@ -82,12 +82,12 @@
   return get_current_tracker()
 
 
-StateSamplerInfo = namedtuple(
+StateSamplerInfo = NamedTuple(
     'StateSamplerInfo',
-    ['state_name',
-     'transition_count',
-     'time_since_transition',
-     'tracked_thread'])
+    [('state_name', CounterName),
+     ('transition_count', int),
+     ('time_since_transition', int),
+     ('tracked_thread', Optional[threading.Thread])])
 
 
 # Default period for sampling current state of pipeline execution.
diff --git a/sdks/python/apache_beam/runners/worker/statesampler_slow.py b/sdks/python/apache_beam/runners/worker/statesampler_slow.py
index fb3cbf6..0034046 100644
--- a/sdks/python/apache_beam/runners/worker/statesampler_slow.py
+++ b/sdks/python/apache_beam/runners/worker/statesampler_slow.py
@@ -80,8 +80,7 @@
 
   def reset(self):
     # type: () -> None
-    for state in self._states_by_name.values():
-      state.nsecs = 0
+    pass
 
 
 class ScopedState(object):
diff --git a/sdks/python/apache_beam/testing/test_pipeline.py b/sdks/python/apache_beam/testing/test_pipeline.py
index a34af95..26819e5 100644
--- a/sdks/python/apache_beam/testing/test_pipeline.py
+++ b/sdks/python/apache_beam/testing/test_pipeline.py
@@ -23,8 +23,7 @@
 
 import argparse
 import shlex
-
-from nose.plugins.skip import SkipTest
+from unittest import SkipTest
 
 from apache_beam.internal import pickler
 from apache_beam.options.pipeline_options import PipelineOptions
diff --git a/sdks/python/apache_beam/testing/util_test.py b/sdks/python/apache_beam/testing/util_test.py
index 6716b05..c9385ee 100644
--- a/sdks/python/apache_beam/testing/util_test.py
+++ b/sdks/python/apache_beam/testing/util_test.py
@@ -140,7 +140,7 @@
     with TestPipeline() as p:
       assert_that(p | Create([1, 2, 3]), is_not_empty())
 
-  def test_assert_that_fails_on_empty_expected(self):
+  def test_assert_that_fails_on_is_not_empty_expected(self):
     with self.assertRaises(BeamAssertException):
       with TestPipeline() as p:
         assert_that(p | Create([]), is_not_empty())
diff --git a/sdks/python/apache_beam/transforms/combiners.py b/sdks/python/apache_beam/transforms/combiners.py
index 0aedaf7..80bbdfc 100644
--- a/sdks/python/apache_beam/transforms/combiners.py
+++ b/sdks/python/apache_beam/transforms/combiners.py
@@ -405,33 +405,44 @@
 
   def process(self, key_and_bundles):
     _, bundles = key_and_bundles
-    heap = []
-    for bundle in bundles:
-      if not heap:
-        if self._less_than or self._key:
-          heap = [
+
+    def push(hp, e):
+      if len(hp) < self._n:
+        heapq.heappush(hp, e)
+        return False
+      elif e < hp[0]:
+        # Because _TopPerBundle returns sorted lists, all other elements
+        # will also be smaller.
+        return True
+      else:
+        heapq.heappushpop(hp, e)
+        return False
+
+    if self._less_than or self._key:
+      heapc = []  # type: List[cy_combiners.ComparableValue]
+      for bundle in bundles:
+        if not heapc:
+          heapc = [
               cy_combiners.ComparableValue(element, self._less_than, self._key)
               for element in bundle]
-        else:
-          heap = bundle
-        continue
-      for element in reversed(bundle):
-        if self._less_than or self._key:
-          element = cy_combiners.ComparableValue(
-              element, self._less_than, self._key)
-        if len(heap) < self._n:
-          heapq.heappush(heap, element)
-        elif element < heap[0]:
-          # Because _TopPerBundle returns sorted lists, all other elements
-          # will also be smaller.
-          break
-        else:
-          heapq.heappushpop(heap, element)
+          continue
+        for element in reversed(bundle):
+          if push(heapc, cy_combiners.ComparableValue(
+              element, self._less_than, self._key)):
+            break
+      heapc.sort()
+      yield [wrapper.value for wrapper in reversed(heapc)]
 
-    heap.sort()
-    if self._less_than or self._key:
-      yield [wrapper.value for wrapper in reversed(heap)]
     else:
+      heap = []  # type: List[T]
+      for bundle in bundles:
+        if not heap:
+          heap = bundle
+          continue
+        for element in reversed(bundle):
+          if push(heap, element):
+            break
+      heap.sort()
       yield heap[::-1]
 
 
diff --git a/sdks/python/apache_beam/transforms/core.py b/sdks/python/apache_beam/transforms/core.py
index 5410b5b..1d13f26 100644
--- a/sdks/python/apache_beam/transforms/core.py
+++ b/sdks/python/apache_beam/transforms/core.py
@@ -25,7 +25,6 @@
 import inspect
 import logging
 import random
-import re
 import types
 import typing
 from builtins import map
@@ -50,6 +49,7 @@
 from apache_beam.transforms.display import HasDisplayData
 from apache_beam.transforms.ptransform import PTransform
 from apache_beam.transforms.ptransform import PTransformWithSideInputs
+from apache_beam.transforms.sideinputs import get_sideinput_index
 from apache_beam.transforms.userstate import StateSpec
 from apache_beam.transforms.userstate import TimerSpec
 from apache_beam.transforms.window import GlobalWindows
@@ -421,9 +421,10 @@
   TODO(BEAM-8537): Create WatermarkEstimatorProvider to support different types.
   """
   def __init__(self):
-    self._watermark = None
+    self._watermark = None  # type: typing.Optional[timestamp.Timestamp]
 
   def set_watermark(self, watermark):
+    # type: (timestamp.Timestamp) -> None
     """Update tracking output_watermark with latest output_watermark.
     This function is called inside an SDF.Process() to track the watermark of
     output element.
@@ -439,6 +440,7 @@
       self._watermark = min(self._watermark, watermark)
 
   def current_watermark(self):
+    # type: () -> typing.Optional[timestamp.Timestamp]
     """Get current output_watermark. This function is called by system."""
     return self._watermark
 
@@ -1338,7 +1340,7 @@
     # This is an ordered list stored as a dict (see the comments in
     # to_runner_api_parameter above).
     indexed_side_inputs = [
-        (int(re.match('side([0-9]+)(-.*)?$', tag).group(1)),
+        (get_sideinput_index(tag),
          pvalue.AsSideInput.from_runner_api(si, context))
         for tag, si in pardo_payload.side_inputs.items()]
     result.side_inputs = [si for _, si in sorted(indexed_side_inputs)]
diff --git a/sdks/python/apache_beam/transforms/display.py b/sdks/python/apache_beam/transforms/display.py
index 0f9fa53..21e9d32 100644
--- a/sdks/python/apache_beam/transforms/display.py
+++ b/sdks/python/apache_beam/transforms/display.py
@@ -132,15 +132,18 @@
 
   @classmethod
   def create_from_options(cls, pipeline_options):
-    """ Creates :class:`DisplayData` from a
+    """ Creates :class:`~apache_beam.transforms.display.DisplayData` from a
     :class:`~apache_beam.options.pipeline_options.PipelineOptions` instance.
 
-    When creating :class:`DisplayData`, this method will convert the value of
-    any item of a non-supported type to its string representation.
+    When creating :class:`~apache_beam.transforms.display.DisplayData`, this
+    method will convert the value of any item of a non-supported type to its
+    string representation.
     The normal :meth:`.create_from()` method rejects those items.
 
     Returns:
-      DisplayData: A :class:`DisplayData` instance with populated items.
+      ~apache_beam.transforms.display.DisplayData:
+        A :class:`~apache_beam.transforms.display.DisplayData` instance with
+        populated items.
 
     Raises:
       ~exceptions.ValueError: If the **has_display_data** argument is
@@ -160,10 +163,13 @@
 
   @classmethod
   def create_from(cls, has_display_data):
-    """ Creates :class:`DisplayData` from a :class:`HasDisplayData` instance.
+    """ Creates :class:`~apache_beam.transforms.display.DisplayData` from a
+    :class:`HasDisplayData` instance.
 
     Returns:
-      DisplayData: A :class:`DisplayData` instance with populated items.
+      ~apache_beam.transforms.display.DisplayData:
+        A :class:`~apache_beam.transforms.display.DisplayData` instance with
+        populated items.
 
     Raises:
       ~exceptions.ValueError: If the **has_display_data** argument is
diff --git a/sdks/python/apache_beam/transforms/environments.py b/sdks/python/apache_beam/transforms/environments.py
index f3b3c22..9a0c7e2 100644
--- a/sdks/python/apache_beam/transforms/environments.py
+++ b/sdks/python/apache_beam/transforms/environments.py
@@ -26,6 +26,16 @@
 import json
 import logging
 import sys
+from typing import TYPE_CHECKING
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Optional
+from typing import Tuple
+from typing import Type
+from typing import TypeVar
+from typing import Union
+from typing import overload
 
 from google.protobuf import message
 
@@ -35,11 +45,20 @@
 from apache_beam.portability.api import endpoints_pb2
 from apache_beam.utils import proto_utils
 
+if TYPE_CHECKING:
+  from apache_beam.options.pipeline_options import PipelineOptions
+  from apache_beam.runners.pipeline_context import PipelineContext
+
 __all__ = ['Environment',
            'DockerEnvironment', 'ProcessEnvironment', 'ExternalEnvironment',
            'EmbeddedPythonEnvironment', 'EmbeddedPythonGrpcEnvironment',
            'SubprocessSDKEnvironment', 'RunnerAPIEnvironmentHolder']
 
+T = TypeVar('T')
+EnvironmentT = TypeVar('EnvironmentT', bound='Environment')
+ConstructorFn = Callable[
+    [Optional[Any], 'PipelineContext'],
+    Any]
 
 def looks_like_json(s):
   import re
@@ -55,12 +74,52 @@
   For internal use only. No backwards compatibility guarantees.
   """
 
-  _known_urns = {}
-  _urn_to_env_cls = {}
+  _known_urns = {}  # type: Dict[str, Tuple[Optional[type], ConstructorFn]]
+  _urn_to_env_cls = {}  # type: Dict[str, type]
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, Optional[Union[message.Message, bytes, str]]]
     raise NotImplementedError
 
+
+  @classmethod
+  @overload
+  def register_urn(cls,
+                   urn,  # type: str
+                   parameter_type,  # type: Type[T]
+                  ):
+    # type: (...) -> Callable[[Union[type, Callable[[T, PipelineContext], Any]]], Callable[[T, PipelineContext], Any]]
+    pass
+
+  @classmethod
+  @overload
+  def register_urn(cls,
+                   urn,  # type: str
+                   parameter_type,  # type: None
+                  ):
+    # type: (...) -> Callable[[Union[type, Callable[[bytes, PipelineContext], Any]]], Callable[[bytes, PipelineContext], Any]]
+    pass
+
+  @classmethod
+  @overload
+  def register_urn(cls,
+                   urn,  # type: str
+                   parameter_type,  # type: Type[T]
+                   constructor  # type: Callable[[T, PipelineContext], Any]
+                  ):
+    # type: (...) -> None
+    pass
+
+  @classmethod
+  @overload
+  def register_urn(cls,
+                   urn,  # type: str
+                   parameter_type,  # type: None
+                   constructor  # type: Callable[[bytes, PipelineContext], Any]
+                  ):
+    # type: (...) -> None
+    pass
+
   @classmethod
   def register_urn(cls, urn, parameter_type, constructor=None):
 
@@ -88,6 +147,7 @@
     return cls._urn_to_env_cls[urn]
 
   def to_runner_api(self, context):
+    # type: (PipelineContext) -> beam_runner_api_pb2.Environment
     urn, typed_param = self.to_runner_api_parameter(context)
     return beam_runner_api_pb2.Environment(
         urn=urn,
@@ -99,7 +159,11 @@
     )
 
   @classmethod
-  def from_runner_api(cls, proto, context):
+  def from_runner_api(cls,
+                      proto,  # type: Optional[beam_runner_api_pb2.FunctionSpec]
+                      context  # type: PipelineContext
+                     ):
+    # type: (...) -> Optional[Environment]
     if proto is None or not proto.urn:
       return None
     parameter_type, constructor = cls._known_urns[proto.urn]
@@ -115,6 +179,7 @@
 
   @classmethod
   def from_options(cls, options):
+    # type: (Type[EnvironmentT], PipelineOptions) -> EnvironmentT
     """Creates an Environment object from PipelineOptions.
 
     Args:
@@ -148,6 +213,7 @@
     return 'DockerEnvironment(container_image=%s)' % self.container_image
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, beam_runner_api_pb2.DockerPayload]
     return (common_urns.environments.DOCKER.urn,
             beam_runner_api_pb2.DockerPayload(
                 container_image=self.container_image))
@@ -158,6 +224,7 @@
 
   @classmethod
   def from_options(cls, options):
+    # type: (PipelineOptions) -> DockerEnvironment
     return cls(container_image=options.environment_config)
 
   @staticmethod
@@ -212,6 +279,7 @@
     return 'ProcessEnvironment(%s)' % ','.join(repr_parts)
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, beam_runner_api_pb2.ProcessPayload]
     return (common_urns.environments.PROCESS.urn,
             beam_runner_api_pb2.ProcessPayload(
                 os=self.os,
@@ -257,6 +325,7 @@
     return 'ExternalEnvironment(url=%s,params=%s)' % (self.url, self.params)
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, beam_runner_api_pb2.ExternalPayload]
     return (common_urns.environments.EXTERNAL.urn,
             beam_runner_api_pb2.ExternalPayload(
                 endpoint=endpoints_pb2.ApiServiceDescriptor(url=self.url),
@@ -297,6 +366,7 @@
     return hash(self.__class__)
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, None]
     return python_urns.EMBEDDED_PYTHON, None
 
   @staticmethod
@@ -338,6 +408,7 @@
     return 'EmbeddedPythonGrpcEnvironment(%s)' % ','.join(repr_parts)
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, bytes]
     params = {}
     if self.state_cache_size is not None:
       params['state_cache_size'] = self.state_cache_size
@@ -404,6 +475,7 @@
     return 'SubprocessSDKEnvironment(command_string=%s)' % self.command_string
 
   def to_runner_api_parameter(self, context):
+    # type: (PipelineContext) -> Tuple[str, bytes]
     return python_urns.SUBPROCESS_SDK, self.command_string.encode('utf-8')
 
   @staticmethod
diff --git a/sdks/python/apache_beam/transforms/external_test.py b/sdks/python/apache_beam/transforms/external_test.py
index 2674b01..a2b7800 100644
--- a/sdks/python/apache_beam/transforms/external_test.py
+++ b/sdks/python/apache_beam/transforms/external_test.py
@@ -394,12 +394,8 @@
         p
         | beam.Create(list('aaabccxyyzzz'))
         | beam.Map(unicode)
-        # TODO(BEAM-6587): Use strings directly rather than ints.
-        | beam.Map(lambda x: int(ord(x)))
         | beam.ExternalTransform(TEST_FILTER_URN, b'middle', expansion_service)
         | beam.ExternalTransform(TEST_COUNT_URN, None, expansion_service)
-        # # TODO(BEAM-6587): Remove when above is removed.
-        | beam.Map(lambda kv: (chr(kv[0]), kv[1]))
         | beam.Map(lambda kv: '%s: %s' % kv))
 
     assert_that(res, equal_to(['a: 3', 'b: 1', 'c: 2']))
diff --git a/sdks/python/apache_beam/transforms/sideinputs.py b/sdks/python/apache_beam/transforms/sideinputs.py
index 8e57ede..79b8290 100644
--- a/sdks/python/apache_beam/transforms/sideinputs.py
+++ b/sdks/python/apache_beam/transforms/sideinputs.py
@@ -28,6 +28,7 @@
 
 from __future__ import absolute_import
 
+import re
 from builtins import object
 from typing import TYPE_CHECKING
 from typing import Any
@@ -60,6 +61,16 @@
   return map_via_end
 
 
+def get_sideinput_index(tag):
+  # type: (str) -> int
+  match = re.match('side([0-9]+)(-.*)?$', tag,
+                   re.DOTALL)
+  if match:
+    return int(match.group(1))
+  else:
+    raise RuntimeError("Invalid tag %r" % tag)
+
+
 class SideInputMap(object):
   """Represents a mapping of windows to side input values."""
 
diff --git a/sdks/python/apache_beam/transforms/userstate.py b/sdks/python/apache_beam/transforms/userstate.py
index dd2b296..c9b2038 100644
--- a/sdks/python/apache_beam/transforms/userstate.py
+++ b/sdks/python/apache_beam/transforms/userstate.py
@@ -50,8 +50,14 @@
 class StateSpec(object):
   """Specification for a user DoFn state cell."""
 
-  def __init__(self):
-    raise NotImplementedError
+  def __init__(self, name, coder):
+    # type: (str, Coder) -> None
+    if not isinstance(name, str):
+      raise TypeError("name is not a string")
+    if not isinstance(coder, Coder):
+      raise TypeError("coder is not of type Coder")
+    self.name = name
+    self.coder = coder
 
   def __repr__(self):
     return '%s(%s)' % (self.__class__.__name__, self.name)
@@ -63,13 +69,6 @@
 class BagStateSpec(StateSpec):
   """Specification for a user DoFn bag state cell."""
 
-  def __init__(self, name, coder):
-    # type: (str, Coder) -> None
-    assert isinstance(name, str)
-    assert isinstance(coder, Coder)
-    self.name = name
-    self.coder = coder
-
   def to_runner_api(self, context):
     # type: (PipelineContext) -> beam_runner_api_pb2.StateSpec
     return beam_runner_api_pb2.StateSpec(
@@ -80,15 +79,6 @@
 class SetStateSpec(StateSpec):
   """Specification for a user DoFn Set State cell"""
 
-  def __init__(self, name, coder):
-    # type: (str, Coder) -> None
-    if not isinstance(name, str):
-      raise TypeError("SetState name is not a string")
-    if not isinstance(coder, Coder):
-      raise TypeError("SetState coder is not of type Coder")
-    self.name = name
-    self.coder = coder
-
   def to_runner_api(self, context):
     return beam_runner_api_pb2.StateSpec(
         set_spec=beam_runner_api_pb2.SetStateSpec(
@@ -128,14 +118,11 @@
       else:
         coder, combine_fn = None, coder
     self.combine_fn = CombineFn.maybe_from_callable(combine_fn)
+    # The coder here should be for the accumulator type of the given CombineFn.
     if coder is None:
       coder = self.combine_fn.get_accumulator_coder()
 
-    assert isinstance(name, str)
-    assert isinstance(coder, Coder)
-    self.name = name
-    # The coder here should be for the accumulator type of the given CombineFn.
-    self.coder = coder
+    super(CombiningValueStateSpec, self).__init__(name, coder)
 
   def to_runner_api(self, context):
     # type: (PipelineContext) -> beam_runner_api_pb2.StateSpec
diff --git a/sdks/python/apache_beam/transforms/userstate_test.py b/sdks/python/apache_beam/transforms/userstate_test.py
index 8c1ace0..9c79551 100644
--- a/sdks/python/apache_beam/transforms/userstate_test.py
+++ b/sdks/python/apache_beam/transforms/userstate_test.py
@@ -113,10 +113,10 @@
 
   def test_spec_construction(self):
     BagStateSpec('statename', VarIntCoder())
-    with self.assertRaises(AssertionError):
+    with self.assertRaises(TypeError):
       BagStateSpec(123, VarIntCoder())
     CombiningValueStateSpec('statename', VarIntCoder(), TopCombineFn(10))
-    with self.assertRaises(AssertionError):
+    with self.assertRaises(TypeError):
       CombiningValueStateSpec(123, VarIntCoder(), TopCombineFn(10))
     with self.assertRaises(TypeError):
       CombiningValueStateSpec('statename', VarIntCoder(), object())
diff --git a/sdks/python/apache_beam/typehints/decorators_test_py3.py b/sdks/python/apache_beam/typehints/decorators_test_py3.py
index 647a4fa..a9e2aad 100644
--- a/sdks/python/apache_beam/typehints/decorators_test_py3.py
+++ b/sdks/python/apache_beam/typehints/decorators_test_py3.py
@@ -43,7 +43,7 @@
 class IOTypeHintsTest(unittest.TestCase):
 
   def test_from_callable(self):
-    def fn(a: int, b: str = None, *args: Tuple[T], foo: List[int],
+    def fn(a: int, b: str = '', *args: Tuple[T], foo: List[int],
            **kwargs: Dict[str, str]) -> Tuple[Any, ...]:
       return a, b, args, foo, kwargs
     th = decorators.IOTypeHints.from_callable(fn)
@@ -96,7 +96,7 @@
     self.assertEqual(th.output_types, ((Tuple[Any, ...],), {}))
 
   def test_getcallargs_forhints(self):
-    def fn(a: int, b: str = None, *args: Tuple[T], foo: List[int],
+    def fn(a: int, b: str = '', *args: Tuple[T], foo: List[int],
            **kwargs: Dict[str, str]) -> Tuple[Any, ...]:
       return a, b, args, foo, kwargs
     callargs = decorators.getcallargs_forhints(fn, float, foo=List[str])
diff --git a/sdks/python/apache_beam/typehints/opcodes.py b/sdks/python/apache_beam/typehints/opcodes.py
index 8061470..8034d0d 100644
--- a/sdks/python/apache_beam/typehints/opcodes.py
+++ b/sdks/python/apache_beam/typehints/opcodes.py
@@ -18,7 +18,8 @@
 """Defines the actions various bytecodes have on the frame.
 
 Each function here corresponds to a bytecode documented in
-https://docs.python.org/2/library/dis.html.  The first argument is a (mutable)
+https://docs.python.org/2/library/dis.html or
+https://docs.python.org/3/library/dis.html. The first argument is a (mutable)
 FrameState object, the second the integer opcode argument.
 
 Bytecodes with more complicated behavior (e.g. modifying the program counter)
@@ -38,17 +39,17 @@
 
 from past.builtins import unicode
 
-from . import typehints
-from .trivial_inference import BoundMethod
-from .trivial_inference import Const
-from .trivial_inference import element_type
-from .trivial_inference import union
-from .typehints import Any
-from .typehints import Dict
-from .typehints import Iterable
-from .typehints import List
-from .typehints import Tuple
-from .typehints import Union
+from apache_beam.typehints import typehints
+from apache_beam.typehints.trivial_inference import BoundMethod
+from apache_beam.typehints.trivial_inference import Const
+from apache_beam.typehints.trivial_inference import element_type
+from apache_beam.typehints.trivial_inference import union
+from apache_beam.typehints.typehints import Any
+from apache_beam.typehints.typehints import Dict
+from apache_beam.typehints.typehints import Iterable
+from apache_beam.typehints.typehints import List
+from apache_beam.typehints.typehints import Tuple
+from apache_beam.typehints.typehints import Union
 
 
 def pop_one(state, unused_arg):
@@ -154,7 +155,7 @@
 
 def binary_subscr(state, unused_arg):
   index = state.stack.pop()
-  base = state.stack.pop()
+  base = Const.unwrap(state.stack.pop())
   if base in (str, unicode):
     out = base
   elif (isinstance(index, Const) and isinstance(index.value, int)
@@ -177,33 +178,16 @@
 binary_xor = inplace_xor = symmetric_binary_op
 binary_or = inpalce_or = symmetric_binary_op
 
-# As far as types are concerned.
-slice_0 = nop
-slice_1 = slice_2 = pop_top
-slice_3 = pop_two
-store_slice_0 = store_slice_1 = store_slice_2 = store_slice_3 = nop
-delete_slice_0 = delete_slice_1 = delete_slice_2 = delete_slice_3 = nop
-
 
 def store_subscr(unused_state, unused_args):
   # TODO(robertwb): Update element/value type of iterable/dict.
   pass
 
 
-binary_divide = binary_floor_divide = binary_modulo = symmetric_binary_op
-binary_divide = binary_floor_divide = binary_modulo = symmetric_binary_op
-binary_divide = binary_floor_divide = binary_modulo = symmetric_binary_op
-
-# print_expr
 print_item = pop_top
-# print_item_to
 print_newline = nop
 
-# print_newline_to
 
-
-# break_loop
-# continue_loop
 def list_append(state, arg):
   new_element_type = Const.unwrap(state.stack.pop())
   state.stack[-arg] = List[Union[element_type(state.stack[-arg]),
@@ -219,21 +203,10 @@
 
 
 load_locals = push_value(Dict[str, Any])
-
-# return_value
-# yield_value
-# import_star
 exec_stmt = pop_three
-# pop_block
-# end_finally
 build_class = pop_three
 
-# setup_with
-# with_cleanup
 
-
-# store_name
-# delete_name
 def unpack_sequence(state, arg):
   t = state.stack.pop()
   if isinstance(t, Const):
@@ -333,18 +306,11 @@
 
 import_from = push_value(Any)
 
-# jump
-
-# for_iter
-
 
 def load_global(state, arg):
   state.stack.append(state.get_global(arg))
 
 
-# setup_loop
-# setup_except
-# setup_finally
 store_map = pop_two
 
 
@@ -366,7 +332,6 @@
 
 def load_deref(state, arg):
   state.stack.append(state.closure_type(arg))
-# raise_varargs
 
 
 def make_function(state, arg):
diff --git a/sdks/python/apache_beam/typehints/trivial_inference_test.py b/sdks/python/apache_beam/typehints/trivial_inference_test.py
index f163520..cd2ce29 100644
--- a/sdks/python/apache_beam/typehints/trivial_inference_test.py
+++ b/sdks/python/apache_beam/typehints/trivial_inference_test.py
@@ -72,6 +72,11 @@
     self.assertReturnType(str, lambda v: v[::-1], [str])
     self.assertReturnType(typehints.Any, lambda v: v[::-1], [typehints.Any])
     self.assertReturnType(typehints.Any, lambda v: v[::-1], [object])
+    if sys.version_info >= (3,):
+      # Test binary_subscr on a slice of a Const. On Py2.7 this will use the
+      # unsupported opcode SLICE+0.
+      test_list = ['a', 'b']
+      self.assertReturnType(typehints.List[str], lambda: test_list[:], [])
 
   def testUnpack(self):
     def reverse(a_b):
diff --git a/sdks/python/apache_beam/utils/profiler.py b/sdks/python/apache_beam/utils/profiler.py
index 1b42104..ab1da2f 100644
--- a/sdks/python/apache_beam/utils/profiler.py
+++ b/sdks/python/apache_beam/utils/profiler.py
@@ -107,6 +107,7 @@
         if random.random() < options.profile_sample_rate:
           return Profile(profile_id, options.profile_location, **kwargs)
       return create_profiler
+    return None
 
 
 class MemoryReporter(object):
diff --git a/sdks/python/apache_beam/utils/thread_pool_executor.py b/sdks/python/apache_beam/utils/thread_pool_executor.py
index 903d9f7..a71a113 100644
--- a/sdks/python/apache_beam/utils/thread_pool_executor.py
+++ b/sdks/python/apache_beam/utils/thread_pool_executor.py
@@ -27,7 +27,7 @@
 try:  # Python3
   import queue
 except Exception:  # Python2
-  import Queue as queue
+  import Queue as queue  # type: ignore[no-redef]
 
 
 class _WorkItem(object):
diff --git a/sdks/python/apache_beam/version.py b/sdks/python/apache_beam/version.py
index ba28fb7..be5d97f 100644
--- a/sdks/python/apache_beam/version.py
+++ b/sdks/python/apache_beam/version.py
@@ -18,4 +18,4 @@
 """Apache Beam SDK version information and utilities."""
 
 
-__version__ = '2.19.0.dev'
+__version__ = '2.20.0.dev'
diff --git a/sdks/python/build.gradle b/sdks/python/build.gradle
index d3f65a9..ca9cae8 100644
--- a/sdks/python/build.gradle
+++ b/sdks/python/build.gradle
@@ -48,7 +48,7 @@
       args '-c', ". ${envdir}/bin/activate && python setup.py -q sdist --formats zip,gztar --dist-dir ${buildDir}"
     }
 
-    def collection = fileTree(buildDir){ include "**/*${project['python_sdk_version']}*.tar.gz" exclude 'srcs/**'}
+    def collection = fileTree(buildDir){ include "**/*${project.sdk_version}*.tar.gz" exclude 'srcs/**'}
 
     // we need a fixed name for the artifact
     copy { from collection.singleFile; into buildDir; rename { tarball } }
diff --git a/sdks/python/container/py2/build.gradle b/sdks/python/container/py2/build.gradle
index 64f39f0..4e29de0 100644
--- a/sdks/python/container/py2/build.gradle
+++ b/sdks/python/container/py2/build.gradle
@@ -57,7 +57,7 @@
           root: project.rootProject.hasProperty(["docker-repository-root"]) ?
                   project.rootProject["docker-repository-root"] : "apachebeam",
           tag: project.rootProject.hasProperty(["docker-tag"]) ?
-                  project.rootProject["docker-tag"] : project['python_sdk_version'])
+                  project.rootProject["docker-tag"] : project.sdk_version)
   files "../Dockerfile", "./build"
   buildArgs(['py_version': "2.7"])
 }
diff --git a/sdks/python/container/py35/build.gradle b/sdks/python/container/py35/build.gradle
index 024847b..f8cd8c2 100644
--- a/sdks/python/container/py35/build.gradle
+++ b/sdks/python/container/py35/build.gradle
@@ -57,7 +57,7 @@
           root: project.rootProject.hasProperty(["docker-repository-root"]) ?
                   project.rootProject["docker-repository-root"] : "apachebeam",
           tag: project.rootProject.hasProperty(["docker-tag"]) ?
-                  project.rootProject["docker-tag"] : project['python_sdk_version'])
+                  project.rootProject["docker-tag"] : project.sdk_version)
   files "../Dockerfile", "./build"
   buildArgs(['py_version': "3.5"])
 }
diff --git a/sdks/python/container/py36/build.gradle b/sdks/python/container/py36/build.gradle
index f81f6ec..b7ced3d 100644
--- a/sdks/python/container/py36/build.gradle
+++ b/sdks/python/container/py36/build.gradle
@@ -57,7 +57,7 @@
           root: project.rootProject.hasProperty(["docker-repository-root"]) ?
                   project.rootProject["docker-repository-root"] : "apachebeam",
           tag: project.rootProject.hasProperty(["docker-tag"]) ?
-                  project.rootProject["docker-tag"] : project['python_sdk_version'])
+                  project.rootProject["docker-tag"] : project.sdk_version)
   files "../Dockerfile", "./build"
   buildArgs(['py_version': "3.6"])
 }
diff --git a/sdks/python/container/py37/build.gradle b/sdks/python/container/py37/build.gradle
index a7f10c4..d9e9b5d 100644
--- a/sdks/python/container/py37/build.gradle
+++ b/sdks/python/container/py37/build.gradle
@@ -57,7 +57,7 @@
           root: project.rootProject.hasProperty(["docker-repository-root"]) ?
                   project.rootProject["docker-repository-root"] : "apachebeam",
           tag: project.rootProject.hasProperty(["docker-tag"]) ?
-                  project.rootProject["docker-tag"] : project['python_sdk_version'])
+                  project.rootProject["docker-tag"] : project.sdk_version)
   files "../Dockerfile", "./build"
   buildArgs(['py_version': "3.7"])
 }
diff --git a/sdks/python/gen_protos.py b/sdks/python/gen_protos.py
index 5105ad5..3a7b23b 100644
--- a/sdks/python/gen_protos.py
+++ b/sdks/python/gen_protos.py
@@ -19,11 +19,14 @@
 from __future__ import absolute_import
 from __future__ import print_function
 
+import contextlib
 import glob
+import inspect
 import logging
 import multiprocessing
 import os
 import platform
+import re
 import shutil
 import subprocess
 import sys
@@ -47,6 +50,172 @@
 ]
 
 
+def generate_urn_files(log, out_dir):
+  """
+  Create python files with statically defined URN constants.
+
+  Creates a <proto>_pb2_urn.py file for each <proto>_pb2.py file that contains
+  an enum type.
+
+  This works by importing each api.<proto>_pb2 module created by `protoc`,
+  inspecting the module's contents, and generating a new side-car urn module.
+  This is executed at build time rather than dynamically on import to ensure
+  that it is compatible with static type checkers like mypy.
+  """
+  import google.protobuf.message as message
+  import google.protobuf.pyext._message as pyext_message
+
+  class Context(object):
+    INDENT = '  '
+    CAP_SPLIT = re.compile('([A-Z][^A-Z]*|^[a-z]+)')
+
+    def __init__(self, indent=0):
+      self.lines = []
+      self.imports = set()
+      self.empty_types = set()
+      self._indent = indent
+
+    @contextlib.contextmanager
+    def indent(self):
+      self._indent += 1
+      yield
+      self._indent -= 1
+
+    def prepend(self, s):
+      if s:
+        self.lines.insert(0, (self.INDENT * self._indent) + s + '\n')
+      else:
+        self.lines.insert(0, '\n')
+
+    def line(self, s):
+      if s:
+        self.lines.append((self.INDENT * self._indent) + s + '\n')
+      else:
+        self.lines.append('\n')
+
+    def import_type(self, typ):
+      modname = typ.__module__
+      if modname in ('__builtin__', 'builtin'):
+        return typ.__name__
+      else:
+        self.imports.add(modname)
+        return modname + '.' + typ.__name__
+
+    @staticmethod
+    def is_message_type(obj):
+      return isinstance(obj, type) and \
+             issubclass(obj, message.Message)
+
+    @staticmethod
+    def is_enum_type(obj):
+      return type(obj).__name__ == 'EnumTypeWrapper'
+
+    def python_repr(self, obj):
+      if isinstance(obj, message.Message):
+        return self.message_repr(obj)
+      elif isinstance(obj, (list,
+                            pyext_message.RepeatedCompositeContainer,  # pylint: disable=c-extension-no-member
+                            pyext_message.RepeatedScalarContainer)):  # pylint: disable=c-extension-no-member
+        return '[%s]' % ', '.join(self.python_repr(x) for x in obj)
+      else:
+        return repr(obj)
+
+    def empty_type(self, typ):
+      name = ('EMPTY_' +
+              '_'.join(x.upper()
+                       for x in self.CAP_SPLIT.findall(typ.__name__)))
+      self.empty_types.add('%s = %s()' % (name, self.import_type(typ)))
+      return name
+
+    def message_repr(self, msg):
+      parts = []
+      for field, value in msg.ListFields():
+        parts.append('%s=%s' % (field.name, self.python_repr(value)))
+      if parts:
+        return '%s(%s)' % (self.import_type(type(msg)), ', '.join(parts))
+      else:
+        return self.empty_type(type(msg))
+
+    def write_enum(self, enum_name, enum, indent):
+      ctx = Context(indent=indent)
+
+      with ctx.indent():
+        for v in enum.DESCRIPTOR.values:
+          extensions = v.GetOptions().Extensions
+
+          prop = (
+              extensions[beam_runner_api_pb2.beam_urn],
+              extensions[beam_runner_api_pb2.beam_constant],
+              extensions[metrics_pb2.monitoring_info_spec],
+              extensions[metrics_pb2.label_props],
+          )
+          reprs = [self.python_repr(x) for x in prop]
+          if all(x == "''" or x.startswith('EMPTY_') for x in reprs):
+            continue
+          ctx.line('%s = PropertiesFromEnumValue(%s)' %
+                   (v.name, ', '.join(self.python_repr(x) for x in prop)))
+
+      if ctx.lines:
+        ctx.prepend('class %s(object):' % enum_name)
+        ctx.prepend('')
+        ctx.line('')
+      return ctx.lines
+
+    def write_message(self, message_name, message, indent=0):
+      ctx = Context(indent=indent)
+
+      with ctx.indent():
+        for obj_name, obj in inspect.getmembers(message):
+          if self.is_message_type(obj):
+            ctx.lines += self.write_message(obj_name, obj, ctx._indent)
+          elif self.is_enum_type(obj):
+            ctx.lines += self.write_enum(obj_name, obj, ctx._indent)
+
+      if ctx.lines:
+        ctx.prepend('class %s(object):' % message_name)
+        ctx.prepend('')
+      return ctx.lines
+
+  pb2_files = [path for path in glob.glob(os.path.join(out_dir, '*_pb2.py'))]
+  api_path = os.path.dirname(pb2_files[0])
+  sys.path.insert(0, os.path.dirname(api_path))
+
+  def _import(m):
+    # TODO: replace with importlib when we drop support for python2.
+    return __import__('api.%s' % m, fromlist=[None])
+
+  try:
+    beam_runner_api_pb2 = _import('beam_runner_api_pb2')
+    metrics_pb2 = _import('metrics_pb2')
+
+    for pb2_file in pb2_files:
+      modname = os.path.splitext(pb2_file)[0]
+      out_file = modname + '_urns.py'
+      modname = os.path.basename(modname)
+      mod = _import(modname)
+
+      ctx = Context()
+      for obj_name, obj in inspect.getmembers(mod):
+        if ctx.is_message_type(obj):
+          ctx.lines += ctx.write_message(obj_name, obj)
+
+      if ctx.lines:
+        for line in reversed(sorted(ctx.empty_types)):
+          ctx.prepend(line)
+
+        for modname in reversed(sorted(ctx.imports)):
+          ctx.prepend('from . import %s' % modname)
+
+        ctx.prepend('from ..utils import PropertiesFromEnumValue')
+
+        log.info("Writing urn stubs: %s" % out_file)
+        with open(out_file, 'w') as f:
+          f.writelines(ctx.lines)
+
+  finally:
+    sys.path.pop(0)
+
+
 def generate_proto_files(force=False, log=None):
 
   try:
@@ -114,7 +283,8 @@
       # Note that this requires a separate module from setup.py for Windows:
       # https://docs.python.org/2/library/multiprocessing.html#windows
       p = multiprocessing.Process(
-          target=_install_grpcio_tools_and_generate_proto_files)
+          target=_install_grpcio_tools_and_generate_proto_files,
+          kwargs={'force': force})
       p.start()
       p.join()
       if p.exitcode:
@@ -151,6 +321,11 @@
         raise RuntimeError(
             'Error applying futurize to generated protobuf python files.')
 
+      generate_urn_files(log, out_dir)
+
+  else:
+    log.info('Skipping proto regeneration: all files up to date')
+
 
 # Though wheels are available for grpcio-tools, setup_requires uses
 # easy_install which doesn't understand them.  This means that it is
@@ -158,7 +333,7 @@
 # protoc compiler).  Instead, we attempt to install a wheel in a temporary
 # directory and add it to the path as needed.
 # See https://github.com/pypa/setuptools/issues/377
-def _install_grpcio_tools_and_generate_proto_files():
+def _install_grpcio_tools_and_generate_proto_files(force=False):
   py_sdk_root = os.path.dirname(os.path.abspath(__file__))
   install_path = os.path.join(py_sdk_root, '.eggs', 'grpcio-wheels')
   build_path = install_path + '-build'
@@ -179,10 +354,11 @@
     shutil.rmtree(build_path, ignore_errors=True)
   sys.path.append(install_path)
   try:
-    generate_proto_files()
+    generate_proto_files(force=force)
   finally:
     sys.stderr.flush()
 
 
 if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.INFO)
   generate_proto_files(force=True)
diff --git a/vendor/sdks-java-extensions-protobuf/build.gradle b/vendor/sdks-java-extensions-protobuf/build.gradle
index e3f0c94..a174db6 100644
--- a/vendor/sdks-java-extensions-protobuf/build.gradle
+++ b/vendor/sdks-java-extensions-protobuf/build.gradle
@@ -16,14 +16,16 @@
  * limitations under the License.
  */
 
+import org.apache.beam.gradle.GrpcVendoring_1_26_0
+
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
   automaticModuleName: 'org.apache.beam.vendor.sdks.java.extensions.protobuf',
   exportJavadoc: false,
   shadowClosure: {
     dependencies {
-        include(dependency('com.google.guava:guava:26.0-jre'))
-        include(dependency('com.google.protobuf:protobuf-java:3.7.1'))
+        include(dependency("com.google.guava:guava:${GrpcVendoring_1_26_0.guava_version}"))
+        include(dependency("com.google.protobuf:protobuf-java:${GrpcVendoring_1_26_0.protobuf_version}"))
     }
     // We specifically relocate beam-sdks-extensions-protobuf under a vendored namespace
     // but also vendor guava and protobuf to the same vendored namespace as the model/*
@@ -32,10 +34,10 @@
     relocate "org.apache.beam.sdk.extensions.protobuf", "org.apache.beam.vendor.sdk.v2.sdk.extensions.protobuf"
 
     // guava uses the com.google.common and com.google.thirdparty package namespaces
-    relocate "com.google.common", "org.apache.beam.vendor.grpc.v1p21p0.com.google.common"
-    relocate "com.google.thirdparty", "org.apache.beam.vendor.grpc.v1p21p0.com.google.thirdparty"
+    relocate "com.google.common", "org.apache.beam.vendor.grpc.v1p26p0.com.google.common"
+    relocate "com.google.thirdparty", "org.apache.beam.vendor.grpc.v1p26p0.com.google.thirdparty"
 
-    relocate "com.google.protobuf", "org.apache.beam.vendor.grpc.v1p21p0.com.google.protobuf"
+    relocate "com.google.protobuf", "org.apache.beam.vendor.grpc.v1p26p0.com.google.protobuf"
   }
 )
 
@@ -54,7 +56,7 @@
 }
 
 dependencies {
-    compile 'com.google.guava:guava:26.0-jre'
-    compile 'com.google.protobuf:protobuf-java:3.7.1'
+    compile "com.google.guava:guava:${GrpcVendoring_1_26_0.guava_version}"
+    compile "com.google.protobuf:protobuf-java:${GrpcVendoring_1_26_0.protobuf_version}"
     shadow project(path: ":sdks:java:core", configuration: "shadow")
 }
diff --git a/website/src/_includes/section-menu/get-started.html b/website/src/_includes/section-menu/get-started.html
index 61270a4..96facb3 100644
--- a/website/src/_includes/section-menu/get-started.html
+++ b/website/src/_includes/section-menu/get-started.html
@@ -30,4 +30,4 @@
    </ul>
 </li>
 <li><a href="{{ site.baseurl }}/get-started/downloads">Downloads</a></li>
-
+<li><a href="{{ site.baseurl }}/security">Security</a></li>
diff --git a/website/src/_posts/2020-01-06-beam-2.17.0.md b/website/src/_posts/2020-01-06-beam-2.17.0.md
index c5c4716..5c7caeb 100644
--- a/website/src/_posts/2020-01-06-beam-2.17.0.md
+++ b/website/src/_posts/2020-01-06-beam-2.17.0.md
@@ -25,6 +25,8 @@
 -->
 
 We are happy to present the new 2.17.0 release of Beam. This release includes both improvements and new functionality.
+Users of the MongoDbIO connector are encouraged to upgrade to this release to address a [security vulnerability]({{ site.baseurl }}/security/CVE-2020-1929/).
+
 See the [download page]({{ site.baseurl }}/get-started/downloads/#2170-2020-01-06) for this release.<!--more-->
 For more information on changes in 2.17.0, check out the
 [detailed release notes](https://issues.apache.org/jira/secure/ReleaseNote.jspa?version=12345970&projectId=12319527).
diff --git a/website/src/contribute/index.md b/website/src/contribute/index.md
index 62a1827..984d522 100644
--- a/website/src/contribute/index.md
+++ b/website/src/contribute/index.md
@@ -38,9 +38,9 @@
  - review proposed design ideas on [dev@beam.apache.org]({{ site.baseurl
 }}/community/contact-us/)
  - improve the documentation
- - contribute [bug reports](https://issues.apache.org/jira/projects/BEAM/issues)
- - contribute by testing releases
- - contribute by reviewing [changes](https://github.com/apache/beam/pulls)
+ - file [bug reports](https://issues.apache.org/jira/projects/BEAM/issues)
+ - test releases
+ - review [changes](https://github.com/apache/beam/pulls)
  - write new examples
  - improve your favorite language SDK (Java, Python, Go, etc)
  - improve specific runners (Apache Apex, Apache Flink, Apache Spark, Google
@@ -58,7 +58,7 @@
 
 ## Contributing code
 
-Below is a tutorial for contributing [code to Beam](https://github.com/apache/beam), covering our tools and typical process in
+Below is a tutorial for contributing code to Beam, covering our tools and typical process in
 detail.
 
 ### Prerequisites
diff --git a/website/src/security/CVE-2020-1929.md b/website/src/security/CVE-2020-1929.md
new file mode 100644
index 0000000..27facc4
--- /dev/null
+++ b/website/src/security/CVE-2020-1929.md
@@ -0,0 +1,17 @@
+---
+permalink: /security/CVE-2020-1929/
+redirect_to: /security/index.html#cve-2020-1929
+---
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
diff --git a/website/src/security/index.md b/website/src/security/index.md
new file mode 100644
index 0000000..c8db8e1
--- /dev/null
+++ b/website/src/security/index.md
@@ -0,0 +1,56 @@
+---
+layout: section
+title: "Beam Security"
+permalink: security/
+section_menu: section-menu/get-started.html
+---
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+# Reporting Security Issues
+
+Apache Beam uses the standard process outlined by the [Apache Security
+Team](https://www.apache.org/security/) for reporting vulnerabilities. Note
+that vulnerabilities should not be publicly disclosed until the project has
+responded.
+
+To report a possible security vulnerability, please email
+`security@apache.org` and `pmc@beam.apache.org`. This is a non-public list
+that will reach the Beam PMC.
+
+# Known Security Issues
+
+## CVE-2020-1929
+
+[CVE-2020-1929] Apache Beam MongoDB IO connector disables certificate trust verification
+
+Severity: Major  
+Vendor: The Apache Software Foundation   
+
+Versions Affected:  
+Apache Beam 2.10.0 to 2.16.0
+
+Description:  
+The Apache Beam MongoDB connector in versions 2.10.0 to 2.16.0 has an option to
+disable SSL trust verification. However this configuration is not respected and
+the certificate verification disables trust verification in every case. This
+exclusion also gets registered globally which disables trust checking for any
+code running in the same JVM.
+
+Mitigation:  
+Users of the affected versions should apply one of the following mitigations:
+- Upgrade to Apache Beam 2.17.0 or later
+
+Acknowledgements:  
+This issue was reported (and fixed) by Colm Ó hÉigeartaigh.