Merge pull request #6564 from udim/pubsub-0-35-4

[BEAM-5513] Upgrade Python SDK to PubSub 0.35.4
diff --git a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
index 454eb02..006db18 100644
--- a/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
+++ b/buildSrc/src/main/groovy/org/apache/beam/gradle/BeamModulePlugin.groovy
@@ -209,12 +209,21 @@
       includeCategories 'org.apache.beam.sdk.testing.ValidatesRunner'
       excludeCategories 'org.apache.beam.sdk.testing.FlattenWithHeterogeneousCoders'
       excludeCategories 'org.apache.beam.sdk.testing.LargeKeys$Above100MB'
-      excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
-      excludeCategories 'org.apache.beam.sdk.testing.UsesGaugeMetrics'
-      excludeCategories 'org.apache.beam.sdk.testing.UsesDistributionMetrics'
       excludeCategories 'org.apache.beam.sdk.testing.UsesAttemptedMetrics'
-      excludeCategories 'org.apache.beam.sdk.testing.UsesTimersInParDo'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesCommittedMetrics'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesCounterMetrics'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesCustomWindowMerging'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesDistributionMetrics'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesFailureMessage'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesGaugeMetrics'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesParDoLifecycle'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesStatefulParDo'
       excludeCategories 'org.apache.beam.sdk.testing.UsesTestStream'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesTimersInParDo'
+      //SplitableDoFnTests
+      excludeCategories 'org.apache.beam.sdk.testing.UsesBoundedSplittableParDo'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesSplittableParDoWithWindowedSideInputs'
+      excludeCategories 'org.apache.beam.sdk.testing.UsesUnboundedSplittableParDo'
     }
     // Configuration for the classpath when running the test.
     Configuration testClasspathConfiguration
@@ -312,7 +321,7 @@
     def hamcrest_version = "1.3"
     def hadoop_version = "2.7.3"
     def jackson_version = "2.9.5"
-    def spark_version = "2.3.1"
+    def spark_version = "2.3.2"
     def apex_core_version = "3.7.0"
     def apex_malhar_version = "3.4.0"
     def postgres_version = "42.2.2"
diff --git a/release/src/main/groovy/QuickstartArchetype.groovy b/release/src/main/groovy/QuickstartArchetype.groovy
index eb3fa43..5c77b29 100644
--- a/release/src/main/groovy/QuickstartArchetype.groovy
+++ b/release/src/main/groovy/QuickstartArchetype.groovy
@@ -21,6 +21,7 @@
   def static generate(TestScripts t) {
     // Generate a maven project from the snapshot repository
     String output_text = t.run """mvn archetype:generate \
+      --update-snapshots \
       -DarchetypeGroupId=org.apache.beam \
       -DarchetypeArtifactId=beam-sdks-java-maven-archetypes-examples \
       -DarchetypeVersion=${t.ver()} \
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
index bb3a890..e67020d 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkBatchPortablePipelineTranslator.java
@@ -140,6 +140,13 @@
     translatorMap.put(
         PTransformTranslation.RESHUFFLE_URN,
         FlinkBatchPortablePipelineTranslator::translateReshuffle);
+    translatorMap.put(
+        PTransformTranslation.CREATE_VIEW_TRANSFORM_URN,
+        // https://issues.apache.org/jira/browse/BEAM-5649
+        // Need to support this via a NOOP until the primitive is removed
+        (PTransformNode transform,
+            RunnerApi.Pipeline pipeline,
+            BatchTranslationContext context) -> {});
     return new FlinkBatchPortablePipelineTranslator(translatorMap.build());
   }
 
diff --git a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
index 3f4f7c8..9fe2b2b 100644
--- a/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
+++ b/runners/flink/src/main/java/org/apache/beam/runners/flink/FlinkStreamingPortablePipelineTranslator.java
@@ -169,6 +169,12 @@
     translatorMap.put(ExecutableStage.URN, this::translateExecutableStage);
     translatorMap.put(PTransformTranslation.RESHUFFLE_URN, this::translateReshuffle);
 
+    translatorMap.put(
+        // https://issues.apache.org/jira/browse/BEAM-5649
+        // Need to support this via a NOOP until the primitive is removed
+        PTransformTranslation.CREATE_VIEW_TRANSFORM_URN,
+        (String id, RunnerApi.Pipeline pipeline, StreamingTranslationContext context) -> {});
+
     this.urnToTransformTranslator = translatorMap.build();
   }
 
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
index 7a7f990..651e522 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/SparkPipelineOptions.java
@@ -19,6 +19,7 @@
 package org.apache.beam.runners.spark;
 
 import java.util.List;
+import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.options.ApplicationNameOptions;
 import org.apache.beam.sdk.options.Default;
 import org.apache.beam.sdk.options.DefaultValueFactory;
@@ -99,6 +100,16 @@
 
   void setCheckpointDurationMillis(Long durationMillis);
 
+  @Description(
+      "If set bundleSize will be used for splitting BoundedSources, otherwise default to "
+          + "splitting BoundedSources on Spark defaultParallelism. Most effective when used with "
+          + "Spark dynamicAllocation.")
+  @Default.Long(0)
+  Long getBundleSize();
+
+  @Experimental
+  void setBundleSize(Long value);
+
   @Description("Enable/disable sending aggregator values to Spark's metric sinks")
   @Default.Boolean(true)
   Boolean getEnableSparkMetricSinks();
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SourceRDD.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SourceRDD.java
index 67d0ab2..c94c4ec 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SourceRDD.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/io/SourceRDD.java
@@ -30,6 +30,7 @@
 import java.util.NoSuchElementException;
 import org.apache.beam.runners.core.construction.SerializablePipelineOptions;
 import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
+import org.apache.beam.runners.spark.SparkPipelineOptions;
 import org.apache.beam.runners.spark.metrics.MetricsAccumulator;
 import org.apache.beam.sdk.io.BoundedSource;
 import org.apache.beam.sdk.io.Source;
@@ -65,6 +66,7 @@
     private final BoundedSource<T> source;
     private final SerializablePipelineOptions options;
     private final int numPartitions;
+    private final long bundleSize;
     private final String stepName;
     private final Accumulator<MetricsContainerStepMap> metricsAccum;
 
@@ -88,6 +90,7 @@
       // ** the configuration "spark.default.parallelism" takes precedence over all of the above **
       this.numPartitions = sc.defaultParallelism();
       checkArgument(this.numPartitions > 0, "Number of partitions must be greater than zero.");
+      this.bundleSize = options.get().as(SparkPipelineOptions.class).getBundleSize();
       this.stepName = stepName;
       this.metricsAccum = MetricsAccumulator.getInstance();
     }
@@ -96,19 +99,23 @@
 
     @Override
     public Partition[] getPartitions() {
-      long desiredSizeBytes = DEFAULT_BUNDLE_SIZE;
       try {
-        desiredSizeBytes = source.getEstimatedSizeBytes(options.get()) / numPartitions;
-      } catch (Exception e) {
-        LOG.warn(
-            "Failed to get estimated bundle size for source {}, using default bundle "
-                + "size of {} bytes.",
-            source,
-            DEFAULT_BUNDLE_SIZE);
-      }
-      try {
-        List<? extends Source<T>> partitionedSources =
-            source.split(desiredSizeBytes, options.get());
+        List<? extends Source<T>> partitionedSources;
+        if (bundleSize > 0) {
+          partitionedSources = source.split(bundleSize, options.get());
+        } else {
+          long desiredSizeBytes = DEFAULT_BUNDLE_SIZE;
+          try {
+            desiredSizeBytes = source.getEstimatedSizeBytes(options.get()) / numPartitions;
+          } catch (Exception e) {
+            LOG.warn(
+                "Failed to get estimated bundle size for source {}, using default bundle "
+                    + "size of {} bytes.",
+                source,
+                DEFAULT_BUNDLE_SIZE);
+          }
+          partitionedSources = source.split(desiredSizeBytes, options.get());
+        }
         Partition[] partitions = new SourcePartition[partitionedSources.size()];
         for (int i = 0; i < partitionedSources.size(); i++) {
           partitions[i] = new SourcePartition<>(id(), i, partitionedSources.get(i));
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
index 24ea8ad..21b9ff9 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/GroupCombineFunctions.java
@@ -30,7 +30,6 @@
 import org.apache.beam.sdk.values.KV;
 import org.apache.beam.sdk.values.WindowingStrategy;
 import org.apache.spark.HashPartitioner;
-import org.apache.spark.Partitioner;
 import org.apache.spark.api.java.JavaPairRDD;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.function.Function;
@@ -44,7 +43,10 @@
    * org.apache.beam.runners.core.GroupByKeyViaGroupByKeyOnly.GroupByKeyOnly} for the Spark runner.
    */
   public static <K, V> JavaRDD<WindowedValue<KV<K, Iterable<WindowedValue<V>>>>> groupByKeyOnly(
-      JavaRDD<WindowedValue<KV<K, V>>> rdd, Coder<K> keyCoder, WindowedValueCoder<V> wvCoder) {
+      JavaRDD<WindowedValue<KV<K, V>>> rdd,
+      Coder<K> keyCoder,
+      WindowedValueCoder<V> wvCoder,
+      boolean defaultParallelism) {
     // we use coders to convert objects in the PCollection to byte arrays, so they
     // can be transferred over the network for the shuffle.
     JavaPairRDD<ByteArray, byte[]> pairRDD =
@@ -52,13 +54,17 @@
             .map(WindowingHelpers.unwindowFunction())
             .mapToPair(TranslationUtils.toPairFunction())
             .mapToPair(CoderHelpers.toByteFunction(keyCoder, wvCoder));
-    // use a default parallelism HashPartitioner.
-    Partitioner partitioner = new HashPartitioner(rdd.rdd().sparkContext().defaultParallelism());
+    JavaPairRDD<ByteArray, Iterable<byte[]>> groupedRDD;
+    if (defaultParallelism) {
+      groupedRDD =
+          pairRDD.groupByKey(new HashPartitioner(rdd.rdd().sparkContext().defaultParallelism()));
+    } else {
+      groupedRDD = pairRDD.groupByKey();
+    }
 
     // using mapPartitions allows to preserve the partitioner
     // and avoid unnecessary shuffle downstream.
-    return pairRDD
-        .groupByKey(partitioner)
+    return groupedRDD
         .mapPartitionsToPair(
             TranslationUtils.pairFunctionToPairFlatMapFunction(
                 CoderHelpers.fromByteFunctionIterable(keyCoder, wvCoder)),
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
index 3f508b1..aa25aaa 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/TransformTranslator.java
@@ -30,6 +30,7 @@
 import java.util.Map;
 import org.apache.beam.runners.core.SystemReduceFn;
 import org.apache.beam.runners.core.metrics.MetricsContainerStepMap;
+import org.apache.beam.runners.spark.SparkPipelineOptions;
 import org.apache.beam.runners.spark.aggregators.AggregatorsAccumulator;
 import org.apache.beam.runners.spark.aggregators.NamedAggregators;
 import org.apache.beam.runners.spark.coders.CoderHelpers;
@@ -131,7 +132,16 @@
 
         // --- group by key only.
         JavaRDD<WindowedValue<KV<K, Iterable<WindowedValue<V>>>>> groupedByKey =
-            GroupCombineFunctions.groupByKeyOnly(inRDD, keyCoder, wvCoder);
+            GroupCombineFunctions.groupByKeyOnly(
+                inRDD,
+                keyCoder,
+                wvCoder,
+                context
+                        .getSerializableOptions()
+                        .get()
+                        .as(SparkPipelineOptions.class)
+                        .getBundleSize()
+                    > 0);
 
         // --- now group also by window.
         // for batch, GroupAlsoByWindow uses an in-memory StateInternals.
@@ -424,7 +434,7 @@
         WindowedValue.FullWindowedValueCoder.of(kvCoder.getValueCoder(), windowCoder);
 
     JavaRDD<WindowedValue<KV<K, Iterable<WindowedValue<V>>>>> groupRDD =
-        GroupCombineFunctions.groupByKeyOnly(kvInRDD, keyCoder, wvCoder);
+        GroupCombineFunctions.groupByKeyOnly(kvInRDD, keyCoder, wvCoder, true);
 
     return groupRDD
         .map(
diff --git a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
index a307cc9..e4dda18 100644
--- a/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
+++ b/runners/spark/src/main/java/org/apache/beam/runners/spark/translation/streaming/StreamingTransformTranslator.java
@@ -300,7 +300,8 @@
         // --- group by key only.
         JavaDStream<WindowedValue<KV<K, Iterable<WindowedValue<V>>>>> groupedByKeyStream =
             dStream.transform(
-                rdd -> GroupCombineFunctions.groupByKeyOnly(rdd, coder.getKeyCoder(), wvCoder));
+                rdd ->
+                    GroupCombineFunctions.groupByKeyOnly(rdd, coder.getKeyCoder(), wvCoder, true));
 
         // --- now group also by window.
         JavaDStream<WindowedValue<KV<K, Iterable<V>>>> outStream =
diff --git a/sdks/go/pkg/beam/core/runtime/exec/emit.go b/sdks/go/pkg/beam/core/runtime/exec/emit.go
index 9b47821..b26c5dd 100644
--- a/sdks/go/pkg/beam/core/runtime/exec/emit.go
+++ b/sdks/go/pkg/beam/core/runtime/exec/emit.go
@@ -53,6 +53,12 @@
 	emitters[key] = maker
 }
 
+// IsEmitterRegistered returns whether an emitter maker has already been registered.
+func IsEmitterRegistered(t reflect.Type) bool {
+	_, exists := emitters[t.String()]
+	return exists
+}
+
 func makeEmit(t reflect.Type, n ElementProcessor) ReusableEmitter {
 	emittersMu.Lock()
 	maker, exists := emitters[t.String()]
diff --git a/sdks/go/pkg/beam/core/runtime/exec/input.go b/sdks/go/pkg/beam/core/runtime/exec/input.go
index dff79fe..a3ccb78 100644
--- a/sdks/go/pkg/beam/core/runtime/exec/input.go
+++ b/sdks/go/pkg/beam/core/runtime/exec/input.go
@@ -60,6 +60,12 @@
 	inputs[t] = maker
 }
 
+// IsInputRegistered returns whether an input maker has already been registered.
+func IsInputRegistered(t reflect.Type) bool {
+	_, exists := inputs[t]
+	return exists
+}
+
 type reIterValue struct {
 	t  reflect.Type
 	s  ReStream
diff --git a/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/UsesFailureMessage.java b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/UsesFailureMessage.java
new file mode 100644
index 0000000..85d6290
--- /dev/null
+++ b/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/UsesFailureMessage.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.testing;
+
+/**
+ * Category tag for tests which validate that currect failure message is provided by failed
+ * pipeline.
+ */
+public interface UsesFailureMessage extends NeedsRunner {}
diff --git a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
index c4aa9ca..bedbb5d 100644
--- a/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
+++ b/sdks/java/core/src/test/java/org/apache/beam/sdk/testing/PAssertTest.java
@@ -335,7 +335,7 @@
 
   /** Test that we throw an error for false assertion on singleton. */
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testPAssertEqualsSingletonFalse() throws Exception {
     PCollection<Integer> pcollection = pipeline.apply(Create.of(42));
     PAssert.thatSingleton("The value was not equal to 44", pcollection).isEqualTo(44);
@@ -351,7 +351,7 @@
 
   /** Test that we throw an error for false assertion on singleton. */
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testPAssertEqualsSingletonFalseDefaultReasonString() throws Exception {
     PCollection<Integer> pcollection = pipeline.apply(Create.of(42));
     PAssert.thatSingleton(pcollection).isEqualTo(44);
@@ -431,7 +431,7 @@
 
   /** Tests that {@code containsInAnyOrder} fails when and how it should. */
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testContainsInAnyOrderFalse() throws Exception {
     PCollection<Integer> pcollection = pipeline.apply(Create.of(1, 2, 3, 4));
 
@@ -452,7 +452,7 @@
   }
 
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testEmptyFalse() throws Exception {
     PCollection<Long> vals = pipeline.apply(GenerateSequence.from(0).to(5));
     PAssert.that("Vals should have been empty", vals).empty();
@@ -466,7 +466,7 @@
   }
 
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testEmptyFalseDefaultReasonString() throws Exception {
     PCollection<Long> vals = pipeline.apply(GenerateSequence.from(0).to(5));
     PAssert.that(vals).empty();
@@ -494,7 +494,7 @@
   }
 
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testAssertionSiteIsCapturedWithMessage() throws Exception {
     PCollection<Long> vals = pipeline.apply(GenerateSequence.from(0).to(5));
     assertThatCollectionIsEmptyWithMessage(vals);
@@ -509,7 +509,7 @@
   }
 
   @Test
-  @Category(ValidatesRunner.class)
+  @Category({ValidatesRunner.class, UsesFailureMessage.class})
   public void testAssertionSiteIsCapturedWithoutMessage() throws Exception {
     PCollection<Long> vals = pipeline.apply(GenerateSequence.from(0).to(5));
     assertThatCollectionIsEmptyWithoutMessage(vals);
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
index 1aca83b..44eed39 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/BeamSqlEnv.java
@@ -22,7 +22,6 @@
 import org.apache.beam.sdk.annotations.Internal;
 import org.apache.beam.sdk.extensions.sql.BeamSqlTable;
 import org.apache.beam.sdk.extensions.sql.BeamSqlUdf;
-import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.UdafImpl;
 import org.apache.beam.sdk.extensions.sql.impl.rel.BeamRelNode;
 import org.apache.beam.sdk.extensions.sql.meta.provider.ReadOnlyTableProvider;
 import org.apache.beam.sdk.extensions.sql.meta.provider.TableProvider;
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/UdafImpl.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdafImpl.java
similarity index 93%
rename from sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/UdafImpl.java
rename to sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdafImpl.java
index a0890596..fd3f472 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/UdafImpl.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/UdafImpl.java
@@ -15,12 +15,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.beam.sdk.extensions.sql.impl.interpreter.operator;
+package org.apache.beam.sdk.extensions.sql.impl;
 
 import java.io.Serializable;
 import java.lang.reflect.ParameterizedType;
 import java.util.ArrayList;
 import java.util.List;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.annotations.Internal;
 import org.apache.beam.sdk.transforms.Combine.CombineFn;
 import org.apache.calcite.adapter.enumerable.AggImplementor;
 import org.apache.calcite.rel.type.RelDataType;
@@ -30,11 +32,13 @@
 import org.apache.calcite.schema.ImplementableAggFunction;
 
 /** Implement {@link AggregateFunction} to take a {@link CombineFn} as UDAF. */
+@Experimental
+@Internal
 public final class UdafImpl<InputT, AccumT, OutputT>
     implements AggregateFunction, ImplementableAggFunction, Serializable {
   private CombineFn<InputT, AccumT, OutputT> combineFn;
 
-  public UdafImpl(CombineFn<InputT, AccumT, OutputT> combineFn) {
+  UdafImpl(CombineFn<InputT, AccumT, OutputT> combineFn) {
     this.combineFn = combineFn;
   }
 
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
index 61342c4..53fdee8 100644
--- a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/impl/transform/BeamAggregationTransforms.java
@@ -39,7 +39,7 @@
 import org.apache.beam.sdk.coders.KvCoder;
 import org.apache.beam.sdk.coders.RowCoder;
 import org.apache.beam.sdk.coders.VarIntCoder;
-import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.UdafImpl;
+import org.apache.beam.sdk.extensions.sql.impl.UdafImpl;
 import org.apache.beam.sdk.extensions.sql.impl.transform.agg.CovarianceFn;
 import org.apache.beam.sdk.extensions.sql.impl.transform.agg.VarianceFn;
 import org.apache.beam.sdk.extensions.sql.impl.utils.BigDecimalConverter;
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java
similarity index 80%
rename from sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
rename to sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java
index 8fc0555..bbee5f5 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedBoundedTable.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestBoundedTable.java
@@ -15,15 +15,13 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.beam.sdk.extensions.sql.mock;
-
-import static org.apache.beam.sdk.extensions.sql.TestUtils.buildBeamSqlSchema;
-import static org.apache.beam.sdk.extensions.sql.TestUtils.buildRows;
+package org.apache.beam.sdk.extensions.sql.meta.provider.test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 import java.util.concurrent.ConcurrentLinkedQueue;
+import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.transforms.Create;
 import org.apache.beam.sdk.transforms.DoFn;
@@ -35,13 +33,14 @@
 import org.apache.beam.sdk.values.Row;
 
 /** Mocked table for bounded data sources. */
-public class MockedBoundedTable extends MockedTable {
+@Experimental
+public class TestBoundedTable extends TestTable {
   /** rows written to this table. */
   private static final ConcurrentLinkedQueue<Row> CONTENT = new ConcurrentLinkedQueue<>();
   /** rows flow out from this table. */
   private final List<Row> rows = new ArrayList<>();
 
-  public MockedBoundedTable(Schema beamSchema) {
+  public TestBoundedTable(Schema beamSchema) {
     super(beamSchema);
   }
 
@@ -51,20 +50,20 @@
    * <p>e.g.
    *
    * <pre>{@code
-   * MockedUnboundedTable
+   * TestUnboundedTable
    *   .of(Types.BIGINT, "order_id",
    *       Types.INTEGER, "site_id",
    *       Types.DOUBLE, "price",
    *       Types.TIMESTAMP, "order_time")
    * }</pre>
    */
-  public static MockedBoundedTable of(final Object... args) {
-    return new MockedBoundedTable(buildBeamSqlSchema(args));
+  public static TestBoundedTable of(final Object... args) {
+    return new TestBoundedTable(TestTableUtils.buildBeamSqlSchema(args));
   }
 
   /** Build a mocked bounded table with the specified type. */
-  public static MockedBoundedTable of(final Schema type) {
-    return new MockedBoundedTable(type);
+  public static TestBoundedTable of(final Schema type) {
+    return new TestBoundedTable(type);
   }
 
   /**
@@ -80,8 +79,8 @@
    * )
    * }</pre>
    */
-  public MockedBoundedTable addRows(Object... args) {
-    List<Row> rows = buildRows(getSchema(), Arrays.asList(args));
+  public TestBoundedTable addRows(Object... args) {
+    List<Row> rows = TestTableUtils.buildRows(getSchema(), Arrays.asList(args));
     this.rows.addAll(rows);
     return this;
   }
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTable.java
similarity index 85%
rename from sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
rename to sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTable.java
index fed4eb3..83b645c 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedTable.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTable.java
@@ -16,9 +16,10 @@
  * limitations under the License.
  */
 
-package org.apache.beam.sdk.extensions.sql.mock;
+package org.apache.beam.sdk.extensions.sql.meta.provider.test;
 
 import java.util.concurrent.atomic.AtomicInteger;
+import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.values.PCollection;
@@ -26,10 +27,11 @@
 import org.apache.beam.sdk.values.Row;
 
 /** Base class for mocked table. */
-public abstract class MockedTable extends BaseBeamTable {
+@Experimental
+public abstract class TestTable extends BaseBeamTable {
   public static final AtomicInteger COUNTER = new AtomicInteger();
 
-  public MockedTable(Schema beamSchema) {
+  public TestTable(Schema beamSchema) {
     super(beamSchema);
   }
 
diff --git a/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTableUtils.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTableUtils.java
new file mode 100644
index 0000000..1c8ffd1
--- /dev/null
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestTableUtils.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.extensions.sql.meta.provider.test;
+
+import static java.util.stream.Collectors.toList;
+import static org.apache.beam.sdk.schemas.Schema.toSchema;
+import static org.apache.beam.sdk.values.Row.toRow;
+
+import com.google.common.collect.Lists;
+import java.util.List;
+import java.util.stream.Stream;
+import org.apache.beam.sdk.annotations.Experimental;
+import org.apache.beam.sdk.schemas.Schema;
+import org.apache.beam.sdk.schemas.Schema.FieldType;
+import org.apache.beam.sdk.values.Row;
+
+/** Utility functions for mock classes. */
+@Experimental
+public class TestTableUtils {
+
+  /**
+   * Create a RowsBuilder with the specified row type info.
+   *
+   * <p>For example:
+   *
+   * <pre>{@code
+   * TestUtils.RowsBuilder.of(
+   *   Types.INTEGER, "order_id",
+   *   Types.INTEGER, "sum_site_id",
+   *   Types.VARCHAR, "buyer"
+   * )
+   * }</pre>
+   *
+   * @param args pairs of column type and column names.
+   */
+  public static Schema buildBeamSqlSchema(Object... args) {
+    return Stream.iterate(0, i -> i + 2)
+        .limit(args.length / 2)
+        .map(i -> toRecordField(args, i))
+        .collect(toSchema());
+  }
+
+  // TODO: support nested.
+  public static Schema.Field toRecordField(Object[] args, int i) {
+    return Schema.Field.of((String) args[i + 1], (FieldType) args[i]);
+  }
+
+  /**
+   * Convenient way to build a {@code BeamSqlRow}s.
+   *
+   * <p>e.g.
+   *
+   * <pre>{@code
+   * buildRows(
+   *     schema,
+   *     1, 1, 1, // the first row
+   *     2, 2, 2, // the second row
+   *     ...
+   * )
+   * }</pre>
+   */
+  public static List<Row> buildRows(Schema type, List<?> rowsValues) {
+    return Lists.partition(rowsValues, type.getFieldCount())
+        .stream()
+        .map(values -> values.stream().collect(toRow(type)))
+        .collect(toList());
+  }
+}
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java
similarity index 83%
rename from sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
rename to sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java
index 82b47aa..de15f8a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/mock/MockedUnboundedTable.java
+++ b/sdks/java/extensions/sql/src/main/java/org/apache/beam/sdk/extensions/sql/meta/provider/test/TestUnboundedTable.java
@@ -16,12 +16,12 @@
  * limitations under the License.
  */
 
-package org.apache.beam.sdk.extensions.sql.mock;
+package org.apache.beam.sdk.extensions.sql.meta.provider.test;
 
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import org.apache.beam.sdk.extensions.sql.TestUtils;
+import org.apache.beam.sdk.annotations.Experimental;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.TestStream;
 import org.apache.beam.sdk.transforms.SerializableFunctions;
@@ -34,13 +34,14 @@
 import org.joda.time.Instant;
 
 /** A mocked unbounded table. */
-public class MockedUnboundedTable extends MockedTable {
+@Experimental
+public class TestUnboundedTable extends TestTable {
   /** rows flow out from this table with the specified watermark instant. */
   private final List<Pair<Duration, List<Row>>> timestampedRows = new ArrayList<>();
   /** specify the index of column in the row which stands for the event time field. */
   private int timestampField;
 
-  private MockedUnboundedTable(Schema beamSchema) {
+  private TestUnboundedTable(Schema beamSchema) {
     super(beamSchema);
   }
 
@@ -50,18 +51,18 @@
    * <p>e.g.
    *
    * <pre>{@code
-   * MockedUnboundedTable
+   * TestUnboundedTable
    *   .of(Types.BIGINT, "order_id",
    *       Types.INTEGER, "site_id",
    *       Types.DOUBLE, "price",
    *       Types.TIMESTAMP, "order_time")
    * }</pre>
    */
-  public static MockedUnboundedTable of(final Object... args) {
-    return new MockedUnboundedTable(TestUtils.buildBeamSqlSchema(args));
+  public static TestUnboundedTable of(final Object... args) {
+    return new TestUnboundedTable(TestTableUtils.buildBeamSqlSchema(args));
   }
 
-  public MockedUnboundedTable timestampColumnIndex(int idx) {
+  public TestUnboundedTable timestampColumnIndex(int idx) {
     this.timestampField = idx;
     return this;
   }
@@ -80,8 +81,8 @@
    * )
    * }</pre>
    */
-  public MockedUnboundedTable addRows(Duration duration, Object... args) {
-    List<Row> rows = TestUtils.buildRows(getSchema(), Arrays.asList(args));
+  public TestUnboundedTable addRows(Duration duration, Object... args) {
+    List<Row> rows = TestTableUtils.buildRows(getSchema(), Arrays.asList(args));
     // record the watermark + rows
     this.timestampedRows.add(Pair.of(duration, rows));
     return this;
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamComplexTypeTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamComplexTypeTest.java
index 3ce2ac5..9360344 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamComplexTypeTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamComplexTypeTest.java
@@ -22,7 +22,7 @@
 import org.apache.beam.sdk.extensions.sql.impl.BeamSqlEnv;
 import org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils;
 import org.apache.beam.sdk.extensions.sql.meta.provider.ReadOnlyTableProvider;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.schemas.Schema.FieldType;
 import org.apache.beam.sdk.testing.PAssert;
@@ -83,14 +83,14 @@
           "test_provider",
           ImmutableMap.of(
               "arrayWithRowTestTable",
-              MockedBoundedTable.of(FieldType.array(FieldType.row(innerRowSchema)), "col")
+              TestBoundedTable.of(FieldType.array(FieldType.row(innerRowSchema)), "col")
                   .addRows(
                       Arrays.asList(Row.withSchema(innerRowSchema).addValues("str", 1L).build())),
               "nestedArrayTestTable",
-              MockedBoundedTable.of(FieldType.array(FieldType.array(FieldType.INT64)), "col")
+              TestBoundedTable.of(FieldType.array(FieldType.array(FieldType.INT64)), "col")
                   .addRows(Arrays.asList(Arrays.asList(1L, 2L, 3L), Arrays.asList(4L, 5L))),
               "nestedRowTestTable",
-              MockedBoundedTable.of(Schema.FieldType.row(nestedRowSchema), "col")
+              TestBoundedTable.of(Schema.FieldType.row(nestedRowSchema), "col")
                   .addRows(
                       Row.withSchema(nestedRowSchema)
                           .addValues(
@@ -100,10 +100,10 @@
                               Row.withSchema(innerRowSchema).addValues("inner_str_two", 3L).build())
                           .build()),
               "basicRowTestTable",
-              MockedBoundedTable.of(Schema.FieldType.row(innerRowSchema), "col")
+              TestBoundedTable.of(Schema.FieldType.row(innerRowSchema), "col")
                   .addRows(Row.withSchema(innerRowSchema).addValues("innerStr", 1L).build()),
               "rowWithArrayTestTable",
-              MockedBoundedTable.of(Schema.FieldType.row(rowWithArraySchema), "col")
+              TestBoundedTable.of(Schema.FieldType.row(rowWithArraySchema), "col")
                   .addRows(
                       Row.withSchema(rowWithArraySchema)
                           .addValues("str", 4L, Arrays.asList(5L, 6L))
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslExistsTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslExistsTest.java
index 64a8c72..f9f57e4 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslExistsTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/BeamSqlDslExistsTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql;
 
 import org.apache.beam.sdk.extensions.sql.impl.rel.BaseRelTest;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -37,7 +37,7 @@
   public static void prepare() {
     registerTable(
         "CUSTOMER",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT32, "c_custkey",
                 Schema.FieldType.DOUBLE, "c_acctbal",
                 Schema.FieldType.STRING, "c_city")
@@ -45,7 +45,7 @@
 
     registerTable(
         "ORDERS",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT32, "o_orderkey",
                 Schema.FieldType.INT32, "o_custkey",
                 Schema.FieldType.DOUBLE, "o_totalprice")
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
index fbfe28a..7dca69a 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/TestUtils.java
@@ -19,18 +19,13 @@
 package org.apache.beam.sdk.extensions.sql;
 
 import static com.google.common.base.Preconditions.checkArgument;
-import static java.util.stream.Collectors.toList;
-import static org.apache.beam.sdk.schemas.Schema.toSchema;
-import static org.apache.beam.sdk.values.Row.toRow;
 
-import com.google.common.collect.Lists;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
-import java.util.stream.Stream;
 import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableUtils;
 import org.apache.beam.sdk.schemas.Schema;
-import org.apache.beam.sdk.schemas.Schema.FieldType;
 import org.apache.beam.sdk.testing.TestStream;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.SerializableFunctions;
@@ -103,7 +98,7 @@
      * @args pairs of column type and column names.
      */
     public static RowsBuilder of(final Object... args) {
-      Schema beamSQLSchema = buildBeamSqlSchema(args);
+      Schema beamSQLSchema = TestTableUtils.buildBeamSqlSchema(args);
       RowsBuilder builder = new RowsBuilder();
       builder.type = beamSQLSchema;
 
@@ -134,7 +129,7 @@
      * <p>Note: check the class javadoc for for detailed example.
      */
     public RowsBuilder addRows(final Object... args) {
-      this.rows.addAll(buildRows(type, Arrays.asList(args)));
+      this.rows.addAll(TestTableUtils.buildRows(type, Arrays.asList(args)));
       return this;
     }
 
@@ -144,7 +139,7 @@
      * <p>Note: check the class javadoc for for detailed example.
      */
     public RowsBuilder addRows(final List args) {
-      this.rows.addAll(buildRows(type, args));
+      this.rows.addAll(TestTableUtils.buildRows(type, args));
       return this;
     }
 
@@ -223,53 +218,6 @@
     }
   }
 
-  /**
-   * Convenient way to build a {@link Schema}.
-   *
-   * <p>e.g.
-   *
-   * <pre>{@code
-   * buildBeamSqlSchema(
-   *     SqlCoders.BIGINT, "order_id",
-   *     SqlCoders.INTEGER, "site_id",
-   *     SqlCoders.DOUBLE, "price",
-   *     SqlCoders.TIMESTAMP, "order_time"
-   * )
-   * }</pre>
-   */
-  public static Schema buildBeamSqlSchema(Object... args) {
-    return Stream.iterate(0, i -> i + 2)
-        .limit(args.length / 2)
-        .map(i -> toRecordField(args, i))
-        .collect(toSchema());
-  }
-
-  // TODO: support nested.
-  private static Schema.Field toRecordField(Object[] args, int i) {
-    return Schema.Field.of((String) args[i + 1], (FieldType) args[i]);
-  }
-
-  /**
-   * Convenient way to build a {@code BeamSqlRow}s.
-   *
-   * <p>e.g.
-   *
-   * <pre>{@code
-   * buildRows(
-   *     schema,
-   *     1, 1, 1, // the first row
-   *     2, 2, 2, // the second row
-   *     ...
-   * )
-   * }</pre>
-   */
-  public static List<Row> buildRows(Schema type, List<?> rowsValues) {
-    return Lists.partition(rowsValues, type.getFieldCount())
-        .stream()
-        .map(values -> values.stream().collect(toRow(type)))
-        .collect(toList());
-  }
-
   public static <T> PCollectionTuple tuple(String tag, PCollection<T> pCollection) {
 
     return PCollectionTuple.of(new TupleTag<>(tag), pCollection);
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/JdbcDriverTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/JdbcDriverTest.java
index 8eec1b6..7899c86 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/JdbcDriverTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/JdbcDriverTest.java
@@ -43,9 +43,9 @@
 import java.util.TimeZone;
 import java.util.stream.Collectors;
 import org.apache.beam.sdk.extensions.sql.meta.provider.ReadOnlyTableProvider;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
-import org.apache.beam.sdk.extensions.sql.mock.MockedUnboundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestUnboundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.values.Row;
 import org.apache.calcite.jdbc.CalciteConnection;
@@ -82,7 +82,7 @@
           "test",
           ImmutableMap.of(
               "test",
-              MockedBoundedTable.of(
+              TestBoundedTable.of(
                       Schema.FieldType.INT32, "id",
                       Schema.FieldType.STRING, "name")
                   .addRows(1, "first")));
@@ -375,7 +375,7 @@
             "test",
             ImmutableMap.of(
                 "test",
-                MockedBoundedTable.of(
+                TestBoundedTable.of(
                         Schema.FieldType.INT32, "id",
                         Schema.FieldType.STRING, "name")
                     .addRows(1, "first")
@@ -414,7 +414,7 @@
             "test",
             ImmutableMap.of(
                 "test",
-                MockedUnboundedTable.of(
+                TestUnboundedTable.of(
                         Schema.FieldType.INT32, "order_id",
                         Schema.FieldType.INT32, "site_id",
                         Schema.FieldType.INT32, "price",
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
index dcfa2a1..9ce389b 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamIntersectRelTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -38,7 +38,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS1",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price")
@@ -46,7 +46,7 @@
 
     registerTable(
         "ORDER_DETAILS2",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price")
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
index e5cda16..5f8f41f 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelBoundedVsBoundedTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -33,15 +33,15 @@
 public class BeamJoinRelBoundedVsBoundedTest extends BaseRelTest {
   @Rule public final TestPipeline pipeline = TestPipeline.create();
 
-  public static final MockedBoundedTable ORDER_DETAILS1 =
-      MockedBoundedTable.of(
+  public static final TestBoundedTable ORDER_DETAILS1 =
+      TestBoundedTable.of(
               Schema.FieldType.INT32, "order_id",
               Schema.FieldType.INT32, "site_id",
               Schema.FieldType.INT32, "price")
           .addRows(1, 2, 3, 2, 3, 3, 3, 4, 5);
 
-  public static final MockedBoundedTable ORDER_DETAILS2 =
-      MockedBoundedTable.of(
+  public static final TestBoundedTable ORDER_DETAILS2 =
+      TestBoundedTable.of(
               Schema.FieldType.INT32, "order_id",
               Schema.FieldType.INT32, "site_id",
               Schema.FieldType.INT32, "price")
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
index 149a57e..250cf18 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsBoundedTest.java
@@ -24,8 +24,9 @@
 import org.apache.beam.sdk.extensions.sql.TestUtils;
 import org.apache.beam.sdk.extensions.sql.impl.schema.BaseBeamTable;
 import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlOutputToConsoleFn;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
-import org.apache.beam.sdk.extensions.sql.mock.MockedUnboundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableUtils;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestUnboundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -52,7 +53,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS",
-        MockedUnboundedTable.of(
+        TestUnboundedTable.of(
                 Schema.FieldType.INT32, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.INT32, "price",
@@ -88,7 +89,7 @@
 
     registerTable(
         "ORDER_DETAILS1",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT32, "order_id",
                 Schema.FieldType.STRING, "buyer")
             .addRows(
@@ -98,7 +99,7 @@
     registerTable(
         "SITE_LKP",
         new SiteLookupTable(
-            TestUtils.buildBeamSqlSchema(
+            TestTableUtils.buildBeamSqlSchema(
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.STRING, "site_name")));
   }
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
index c99435a..51f48ea 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamJoinRelUnboundedVsUnboundedTest.java
@@ -20,7 +20,7 @@
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
 import org.apache.beam.sdk.extensions.sql.impl.transform.BeamSqlOutputToConsoleFn;
-import org.apache.beam.sdk.extensions.sql.mock.MockedUnboundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestUnboundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -45,7 +45,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS",
-        MockedUnboundedTable.of(
+        TestUnboundedTable.of(
                 Schema.FieldType.INT32, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.INT32, "price",
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
index 3736bde..02e84b9 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamMinusRelTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -37,7 +37,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS1",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price")
@@ -45,7 +45,7 @@
 
     registerTable(
         "ORDER_DETAILS2",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price")
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
index 3f87fda..20ab75b 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSetOperatorRelBaseTest.java
@@ -20,7 +20,7 @@
 
 import org.apache.beam.sdk.Pipeline;
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.options.PipelineOptionsFactory;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
@@ -42,7 +42,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price",
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
index 10dace3..fe542e9 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamSortRelTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -41,7 +41,7 @@
   public void prepare() {
     registerTable(
         "ORDER_DETAILS",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price",
@@ -89,7 +89,7 @@
                 new DateTime(9)));
     registerTable(
         "SUB_ORDER_RAM",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
             Schema.builder()
                 .addField("order_id", Schema.FieldType.INT64)
                 .addField("site_id", Schema.FieldType.INT32)
@@ -164,9 +164,9 @@
 
     registerTable(
         "ORDER_DETAILS",
-        MockedBoundedTable.of(schema)
+        TestBoundedTable.of(schema)
             .addRows(1L, 2, 1.0, 1L, null, 2.0, 2L, 1, 3.0, 2L, null, 4.0, 5L, 5, 5.0));
-    registerTable("SUB_ORDER_RAM", MockedBoundedTable.of(schema));
+    registerTable("SUB_ORDER_RAM", TestBoundedTable.of(schema));
 
     String sql =
         "INSERT INTO SUB_ORDER_RAM(order_id, site_id, price)  SELECT "
@@ -194,9 +194,9 @@
 
     registerTable(
         "ORDER_DETAILS",
-        MockedBoundedTable.of(schema)
+        TestBoundedTable.of(schema)
             .addRows(1L, 2, 1.0, 1L, null, 2.0, 2L, 1, 3.0, 2L, null, 4.0, 5L, 5, 5.0));
-    registerTable("SUB_ORDER_RAM", MockedBoundedTable.of(schema));
+    registerTable("SUB_ORDER_RAM", TestBoundedTable.of(schema));
 
     String sql =
         "INSERT INTO SUB_ORDER_RAM(order_id, site_id, price)  SELECT "
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
index efbb5a6..63e63e8 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamUnionRelTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -37,7 +37,7 @@
   public static void prepare() {
     registerTable(
         "ORDER_DETAILS",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
                 Schema.FieldType.INT64, "order_id",
                 Schema.FieldType.INT32, "site_id",
                 Schema.FieldType.DOUBLE, "price")
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
index 19cdd62..dd88857 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/rel/BeamValuesRelTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.impl.rel;
 
 import org.apache.beam.sdk.extensions.sql.TestUtils;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.testing.PAssert;
 import org.apache.beam.sdk.testing.TestPipeline;
@@ -37,12 +37,12 @@
   public static void prepare() {
     registerTable(
         "string_table",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
             Schema.FieldType.STRING, "name",
             Schema.FieldType.STRING, "description"));
     registerTable(
         "int_table",
-        MockedBoundedTable.of(
+        TestBoundedTable.of(
             Schema.FieldType.INT32, "c0",
             Schema.FieldType.INT32, "c1"));
   }
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
index ccd1f21..178d682 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlBuiltinFunctionsIntegrationTestBase.java
@@ -37,8 +37,8 @@
 import org.apache.beam.sdk.extensions.sql.TestUtils;
 import org.apache.beam.sdk.extensions.sql.impl.JdbcDriver;
 import org.apache.beam.sdk.extensions.sql.meta.provider.ReadOnlyTableProvider;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestTableProvider;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.schemas.Schema.FieldType;
 import org.apache.beam.sdk.schemas.Schema.TypeName;
@@ -116,7 +116,7 @@
 
   protected PCollection<Row> getTestPCollection() {
     try {
-      return MockedBoundedTable.of(ROW_TYPE)
+      return TestBoundedTable.of(ROW_TYPE)
           .addRows(
               parseDate("1986-02-15 11:35:26"),
               (byte) 1,
@@ -139,7 +139,7 @@
 
   protected PCollection<Row> getFloorCeilingTestPCollection() {
     try {
-      return MockedBoundedTable.of(ROW_TYPE_THREE)
+      return TestBoundedTable.of(ROW_TYPE_THREE)
           .addRows(parseDate("1986-02-15 11:35:26"), 1.4)
           .buildIOReader(pipeline.begin())
           .setRowSchema(ROW_TYPE_THREE);
@@ -150,7 +150,7 @@
 
   protected PCollection<Row> getAggregationTestPCollection() {
     try {
-      return MockedBoundedTable.of(ROW_TYPE_TWO)
+      return TestBoundedTable.of(ROW_TYPE_TWO)
           .addRows(
               parseDate("1986-02-15 11:35:26"),
               (byte) 1,
@@ -346,7 +346,7 @@
             "test",
             ImmutableMap.of(
                 "test",
-                MockedBoundedTable.of(
+                TestBoundedTable.of(
                         Schema.FieldType.INT32, "id",
                         Schema.FieldType.STRING, "name")
                     .addRows(1, "first")));
diff --git a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
index 936c4ed..7028b8c 100644
--- a/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
+++ b/sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/integrationtest/BeamSqlComparisonOperatorsIntegrationTest.java
@@ -19,7 +19,7 @@
 package org.apache.beam.sdk.extensions.sql.integrationtest;
 
 import java.math.BigDecimal;
-import org.apache.beam.sdk.extensions.sql.mock.MockedBoundedTable;
+import org.apache.beam.sdk.extensions.sql.meta.provider.test.TestBoundedTable;
 import org.apache.beam.sdk.schemas.Schema;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.Row;
@@ -326,7 +326,7 @@
             .build();
 
     try {
-      return MockedBoundedTable.of(type)
+      return TestBoundedTable.of(type)
           .addRows(
               (byte) 0,
               (byte) 1,
diff --git a/sdks/java/io/synthetic/build.gradle b/sdks/java/io/synthetic/build.gradle
index 22bc680..900f61d 100644
--- a/sdks/java/io/synthetic/build.gradle
+++ b/sdks/java/io/synthetic/build.gradle
@@ -24,10 +24,12 @@
 
 dependencies {
   compile library.java.joda_time
-  compile library.java.commons_math3
+  shadow library.java.commons_math3
   shadow library.java.jackson_core
   shadow library.java.jackson_annotations
   shadow library.java.jackson_databind
+  shadow library.java.guava
+
   testCompile library.java.guava
   testCompile library.java.junit
   testCompile library.java.hamcrest_core
diff --git a/sdks/java/load-tests/OWNERS b/sdks/java/load-tests/OWNERS
new file mode 100644
index 0000000..2a14010
--- /dev/null
+++ b/sdks/java/load-tests/OWNERS
@@ -0,0 +1,5 @@
+# See the OWNERS docs at https://s.apache.org/beam-owners
+
+reviewers:
+  - lgajowy
+  - kkucharc
diff --git a/sdks/java/load-tests/build.gradle b/sdks/java/load-tests/build.gradle
new file mode 100644
index 0000000..c3a1f63
--- /dev/null
+++ b/sdks/java/load-tests/build.gradle
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+apply plugin: org.apache.beam.gradle.BeamModulePlugin
+applyJavaNature()
+
+description = "Apache Beam :: SDKs :: Java :: Load Tests"
+
+
+def mainClassProperty = "loadTest.mainClass"
+def mainClass = project.findProperty(mainClassProperty)
+
+// When running via Gradle, this property can be used to pass commandline arguments
+// to the nexmark launch
+def loadTestArgsProperty = "loadTest.args"
+def loadTestArgs = project.hasProperty(loadTestArgsProperty) ?
+        project.getProperty(loadTestArgsProperty).split() : []
+
+// When running via Gradle, this property sets the runner dependency
+def runnerProperty = "runner"
+def runnerDependency = (project.hasProperty(runnerProperty)
+        ? project.getProperty(runnerProperty)
+        : ":beam-runners-direct-java")
+
+def shouldProvideSpark = ":beam-runners-spark".equals(runnerDependency)
+
+configurations {
+  // A configuration for running the Load testlauncher directly from Gradle, which
+  // uses Gradle to put the appropriate dependencies on the Classpath rather than
+  // bundling them into a fat jar
+  gradleRun
+}
+
+dependencies {
+  shadow project(path: ":beam-sdks-java-core", configuration: "shadow")
+  shadow project(path: ":beam-runners-direct-java", configuration: "shadow")
+  shadow project(path: ":beam-sdks-java-io-synthetic", configuration: "shadow")
+
+  gradleRun project(path: project.path, configuration: "shadow")
+  gradleRun project(path: runnerDependency, configuration: "shadow")
+
+  // The Spark runner requires the user to provide a Spark dependency. For self-contained
+  // runs with the Spark runner, we can provide such a dependency. This is deliberately phrased
+  // to not hardcode any runner other than :beam-runners-direct-java
+  if (shouldProvideSpark) {
+    gradleRun library.java.spark_streaming
+    gradleRun library.java.spark_core, {
+      exclude group:"org.slf4j", module:"jul-to-slf4j"
+    }
+  }
+}
+
+if (shouldProvideSpark) {
+  configurations.gradleRun {
+    // Using Spark runner causes a StackOverflowError if slf4j-jdk14 is on the classpath
+    exclude group: "org.slf4j", module: "slf4j-jdk14"
+  }
+}
+
+task run(type: JavaExec) {
+  main = mainClass
+  classpath = configurations.gradleRun
+  args loadTestArgs
+}
+
diff --git a/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/GroupByKeyLoadTest.java b/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/GroupByKeyLoadTest.java
new file mode 100644
index 0000000..775220c
--- /dev/null
+++ b/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/GroupByKeyLoadTest.java
@@ -0,0 +1,175 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.beam.sdk.loadtests;
+
+import static java.lang.String.format;
+import static org.apache.beam.sdk.loadtests.GroupByKeyLoadTest.Options.fromJsonString;
+import static org.apache.beam.sdk.loadtests.GroupByKeyLoadTest.Options.readFromArgs;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.IOException;
+import java.util.Optional;
+import org.apache.beam.sdk.Pipeline;
+import org.apache.beam.sdk.io.synthetic.SyntheticBoundedIO;
+import org.apache.beam.sdk.io.synthetic.SyntheticBoundedIO.SyntheticSourceOptions;
+import org.apache.beam.sdk.io.synthetic.SyntheticOptions;
+import org.apache.beam.sdk.io.synthetic.SyntheticStep;
+import org.apache.beam.sdk.options.ApplicationNameOptions;
+import org.apache.beam.sdk.options.Default;
+import org.apache.beam.sdk.options.Description;
+import org.apache.beam.sdk.options.PipelineOptions;
+import org.apache.beam.sdk.options.PipelineOptionsFactory;
+import org.apache.beam.sdk.options.Validation;
+import org.apache.beam.sdk.transforms.DoFn;
+import org.apache.beam.sdk.transforms.GroupByKey;
+import org.apache.beam.sdk.transforms.ParDo;
+import org.apache.beam.sdk.values.KV;
+import org.apache.beam.sdk.values.PCollection;
+
+/**
+ * Load test for {@link GroupByKey} operation.
+ *
+ * <p>The purpose of this test is to measure {@link GroupByKey}'s behaviour in stressful conditions.
+ * it uses {@link SyntheticBoundedIO} and {@link SyntheticStep} which both can be parametrized to
+ * generate keys and values of various size, impose delay (sleep or cpu burnout) in various moments
+ * during the pipeline execution and provide some other performance challenges (see Source's and
+ * Step's documentation for more details).
+ *
+ * <p>In addition, this test allows to: - fanout: produce one input (using Synthetic Source) and
+ * process it with multiple sessions performing the same set of operations - reiterate produced
+ * PCollection multiple times
+ *
+ * <p>To run it manually, use the following command:
+ *
+ * <pre>
+ *    ./gradlew run -p sdks/java/load-tests -PloadTest.args='
+ *      --fanout=1
+ *      --iterations=1
+ *      --sourceOptions={"numRecords":1000,...}
+ *      --stepOptions={"outputRecordsPerInputRecord":2...}'
+ *  </pre>
+ */
+public class GroupByKeyLoadTest {
+
+  /** Pipeline options for the test. */
+  public interface Options extends PipelineOptions, ApplicationNameOptions {
+
+    @Description("Options for synthetic source")
+    @Validation.Required
+    String getSourceOptions();
+
+    void setSourceOptions(String sourceOptions);
+
+    @Description("Options for synthetic step")
+    String getStepOptions();
+
+    void setStepOptions(String stepOptions);
+
+    @Description("The number of GroupByKey operations to perform in parallel (fanout)")
+    @Default.Integer(1)
+    Integer getFanout();
+
+    void setFanout(Integer fanout);
+
+    @Description("Number of reiterations over per-key-grouped values to perform.")
+    @Default.Integer(1)
+    Integer getIterations();
+
+    void setIterations(Integer iterations);
+
+    static Options readFromArgs(String[] args) {
+      return PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
+    }
+
+    static <T extends SyntheticOptions> T fromJsonString(String json, Class<T> type)
+        throws IOException {
+      ObjectMapper mapper = new ObjectMapper();
+      T result = mapper.readValue(json, type);
+      result.validate();
+      return result;
+    }
+  }
+
+  public static void main(String[] args) throws IOException {
+    Options options = readFromArgs(args);
+
+    SyntheticSourceOptions sourceOptions =
+        fromJsonString(options.getSourceOptions(), SyntheticSourceOptions.class);
+
+    Optional<SyntheticStep> syntheticStep = createSyntheticStep(options);
+
+    Pipeline pipeline = Pipeline.create(options);
+
+    PCollection<KV<byte[], byte[]>> input =
+        pipeline.apply(SyntheticBoundedIO.readFrom(sourceOptions));
+
+    for (int branch = 0; branch < options.getFanout(); branch++) {
+      applySyntheticStep(input, branch, syntheticStep)
+          .apply(format("Group by key (%s)", branch), GroupByKey.create())
+          .apply(
+              format("Ungroup and reiterate (%s)", branch),
+              ParDo.of(new UngroupAndReiterate(options.getIterations())));
+    }
+
+    pipeline.run().waitUntilFinish();
+  }
+
+  private static PCollection<KV<byte[], byte[]>> applySyntheticStep(
+      PCollection<KV<byte[], byte[]>> input, int branch, Optional<SyntheticStep> syntheticStep) {
+
+    if (syntheticStep.isPresent()) {
+      return input.apply(format("Synthetic step (%s)", branch), ParDo.of(syntheticStep.get()));
+    } else {
+      return input;
+    }
+  }
+
+  private static Optional<SyntheticStep> createSyntheticStep(Options options) throws IOException {
+    if (options.getStepOptions() != null && !options.getStepOptions().isEmpty()) {
+      return Optional.of(
+          new SyntheticStep(fromJsonString(options.getStepOptions(), SyntheticStep.Options.class)));
+    } else {
+      return Optional.empty();
+    }
+  }
+
+  private static class UngroupAndReiterate
+      extends DoFn<KV<byte[], Iterable<byte[]>>, KV<byte[], byte[]>> {
+
+    private int iterations;
+
+    UngroupAndReiterate(int iterations) {
+      this.iterations = iterations;
+    }
+
+    @ProcessElement
+    public void processElement(ProcessContext c) {
+      byte[] key = c.element().getKey();
+
+      // reiterate "iterations" times, emit output only once
+      for (int i = 0; i < iterations; i++) {
+        for (byte[] value : c.element().getValue()) {
+
+          if (i == iterations - 1) {
+            c.output(KV.of(key, value));
+          }
+        }
+      }
+    }
+  }
+}
diff --git a/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/package-info.java b/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/package-info.java
new file mode 100644
index 0000000..5bf3d56
--- /dev/null
+++ b/sdks/java/load-tests/src/main/java/org/apache/beam/sdk/loadtests/package-info.java
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/** Load test suite. */
+package org.apache.beam.sdk.loadtests;
diff --git a/sdks/python/apache_beam/options/pipeline_options.py b/sdks/python/apache_beam/options/pipeline_options.py
index a0059db..a86cab9 100644
--- a/sdks/python/apache_beam/options/pipeline_options.py
+++ b/sdks/python/apache_beam/options/pipeline_options.py
@@ -20,6 +20,7 @@
 from __future__ import absolute_import
 
 import argparse
+import logging
 from builtins import list
 from builtins import object
 
@@ -213,7 +214,20 @@
       subset[str(cls)] = cls
     for cls in subset.values():
       cls._add_argparse_args(parser)  # pylint: disable=protected-access
-    known_args, _ = parser.parse_known_args(self._flags)
+    known_args, unknown_args = parser.parse_known_args(self._flags)
+    # Parse args which are not known at this point but might be recognized
+    # at a later point in time, i.e. by the actual Runner.
+    if unknown_args and unknown_args[0] != '':
+      logging.info("Parsing unknown args: %s", unknown_args)
+      for arg in unknown_args:
+        # https://issues.apache.org/jira/browse/BEAM-5442
+        if arg.startswith('--') and not arg.startswith('--beam_plugins'):
+          parser.add_argument(arg.split('=', 1)[0], nargs='?')
+      # repeat parsing with unknown options added
+      known_args, unknown_args = parser.parse_known_args(self._flags)
+      if unknown_args:
+        logging.warning("Discarding unparseable args: %s", unknown_args)
+
     result = vars(known_args)
 
     # Apply the overrides if any
diff --git a/sdks/python/apache_beam/options/pipeline_options_test.py b/sdks/python/apache_beam/options/pipeline_options_test.py
index 9c14c25..651e733 100644
--- a/sdks/python/apache_beam/options/pipeline_options_test.py
+++ b/sdks/python/apache_beam/options/pipeline_options_test.py
@@ -45,16 +45,24 @@
        'display_data': [DisplayDataItemMatcher('num_workers', 5)]},
       {
           'flags': [
-              '--profile_cpu', '--profile_location', 'gs://bucket/', 'ignored'],
+              '--profile_cpu', '--profile_location', 'gs://bucket/',
+              'ignored', '-invalid=arg', '--unknown_arg', 'unknown_value',
+              '--unknown_flag'
+          ],
           'expected': {
               'profile_cpu': True, 'profile_location': 'gs://bucket/',
               'mock_flag': False, 'mock_option': None,
-              'mock_multi_option': None},
+              'mock_multi_option': None,
+              'unknown_arg': 'unknown_value',
+              'unknown_flag': None},
           'display_data': [
               DisplayDataItemMatcher('profile_cpu',
                                      True),
               DisplayDataItemMatcher('profile_location',
-                                     'gs://bucket/')]
+                                     'gs://bucket/'),
+              DisplayDataItemMatcher('unknown_arg',
+                                     'unknown_value')
+          ]
       },
       {'flags': ['--num_workers', '5', '--mock_flag'],
        'expected': {'num_workers': 5,
diff --git a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
index 868d264..1acd348 100644
--- a/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
+++ b/sdks/python/apache_beam/runners/dataflow/dataflow_runner.py
@@ -155,9 +155,15 @@
               or str(response.currentState) == 'JOB_STATE_UPDATED'
               or str(response.currentState) == 'JOB_STATE_DRAINED'):
             break
-          # The job has failed; ensure we see any final error messages.
-          sleep_secs = 1.0      # poll faster during the final countdown
-          final_countdown_timer_secs -= sleep_secs
+
+          # Check that job is in a post-preparation state before starting the
+          # final countdown.
+          if (str(response.currentState) not in (
+              'JOB_STATE_PENDING', 'JOB_STATE_QUEUED')):
+            # The job has failed; ensure we see any final error messages.
+            sleep_secs = 1.0      # poll faster during the final countdown
+            final_countdown_timer_secs -= sleep_secs
+
       time.sleep(sleep_secs)
 
       # Get all messages since beginning of the job run or since last message.
diff --git a/sdks/python/apache_beam/runners/portability/flink_runner_test.py b/sdks/python/apache_beam/runners/portability/flink_runner_test.py
index 09261c9..044e0e9 100644
--- a/sdks/python/apache_beam/runners/portability/flink_runner_test.py
+++ b/sdks/python/apache_beam/runners/portability/flink_runner_test.py
@@ -81,6 +81,9 @@
     def test_pardo_state_only(self):
       raise unittest.SkipTest("BEAM-2918 - User state not yet supported.")
 
+    def test_pardo_timers(self):
+      raise unittest.SkipTest("BEAM-4681 - User timers not yet supported.")
+
     # Inherits all other tests.
 
   # Run the tests.
diff --git a/sdks/python/apache_beam/runners/worker/sdk_worker_main_test.py b/sdks/python/apache_beam/runners/worker/sdk_worker_main_test.py
index 6b5972e..e9b584a 100644
--- a/sdks/python/apache_beam/runners/worker/sdk_worker_main_test.py
+++ b/sdks/python/apache_beam/runners/worker/sdk_worker_main_test.py
@@ -56,8 +56,8 @@
   def test_work_count_default_value(self):
     self._check_worker_count('{}', 12)
 
-  def test_parse_pipeine_options(self):
-    expected_options = PipelineOptions()
+  def test_parse_pipeline_options(self):
+    expected_options = PipelineOptions([])
     expected_options.view_as(
         SdkWorkerMainTest.MockOptions).m_m_option = [
             'worker_threads=1', 'beam_fn_api'
diff --git a/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py b/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
index f2ca4e7..7b2c9cf 100644
--- a/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
+++ b/sdks/python/apache_beam/runners/worker/worker_id_interceptor.py
@@ -21,7 +21,6 @@
 
 import collections
 import os
-import uuid
 
 import grpc
 
@@ -39,7 +38,7 @@
   # and throw exception in worker_id_interceptor.py after we have rolled out
   # the corresponding container changes.
   # Unique worker Id for this worker.
-  _worker_id = os.environ.get('WORKER_ID', str(uuid.uuid4()))
+  _worker_id = os.environ.get('WORKER_ID')
 
   def __init__(self):
     pass
diff --git a/sdks/python/apache_beam/testing/pipeline_verifiers_test.py b/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
index dd7cf6d..0cce915 100644
--- a/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
+++ b/sdks/python/apache_beam/testing/pipeline_verifiers_test.py
@@ -92,7 +92,7 @@
 
   def create_temp_file(self, content, directory=None):
     with tempfile.NamedTemporaryFile(delete=False, dir=directory) as f:
-      f.write(content)
+      f.write(content.encode('utf-8'))
       return f.name
 
   def test_file_checksum_matcher_success(self):
@@ -133,7 +133,7 @@
     self.assertEqual(cm.exception.args[0],
                      'Sleep seconds, if received, must be int. '
                      'But received: \'invalid_sleep_time\', '
-                     '<type \'str\'>')
+                     '{}'.format(str))
 
   @patch('time.sleep', return_value=None)
   def test_file_checksum_matcher_sleep_before_verify(self, mocked_sleep):
diff --git a/sdks/python/apache_beam/testing/test_utils.py b/sdks/python/apache_beam/testing/test_utils.py
index 50dd7b4..1f0e99e 100644
--- a/sdks/python/apache_beam/testing/test_utils.py
+++ b/sdks/python/apache_beam/testing/test_utils.py
@@ -79,7 +79,7 @@
   content.sort()
   m = hashlib.new(hashing_alg)
   for elem in content:
-    m.update(str(elem))
+    m.update(str(elem).encode('utf-8'))
   return m.hexdigest()
 
 
diff --git a/sdks/python/apache_beam/testing/test_utils_test.py b/sdks/python/apache_beam/testing/test_utils_test.py
index cd22df0..2b16c30c 100644
--- a/sdks/python/apache_beam/testing/test_utils_test.py
+++ b/sdks/python/apache_beam/testing/test_utils_test.py
@@ -74,13 +74,13 @@
     with utils.TempDir() as tempdir:
       filename = tempdir.create_temp_file(
           suffix='.txt',
-          lines=['line1\n', 'line2\n', 'line3\n'])
+          lines=[b'line1\n', b'line2\n', b'line3\n'])
       self.assertTrue(filename.endswith('.txt'))
 
       with open(filename, 'rb') as f:
-        self.assertEqual(f.readline(), 'line1\n')
-        self.assertEqual(f.readline(), 'line2\n')
-        self.assertEqual(f.readline(), 'line3\n')
+        self.assertEqual(f.readline(), b'line1\n')
+        self.assertEqual(f.readline(), b'line2\n')
+        self.assertEqual(f.readline(), b'line3\n')
 
   def test_cleanup_subscriptions(self):
     sub_client = mock.Mock()
diff --git a/sdks/python/container/Dockerfile b/sdks/python/container/Dockerfile
index 7464592..4203b74 100644
--- a/sdks/python/container/Dockerfile
+++ b/sdks/python/container/Dockerfile
@@ -34,7 +34,7 @@
 # SDK dependencies not listed in base_image_requirements.txt will be installed when we install SDK
 # in the next RUN statement.
 
-COPY base_image_requirements.txt /tmp/base_image_requirements.txt
+COPY target/base_image_requirements.txt /tmp/base_image_requirements.txt
 RUN \
     pip install -r /tmp/base_image_requirements.txt && \
     # Check that the fast implementation of protobuf is used.
@@ -43,11 +43,11 @@
     rm -rf /root/.cache/pip
 
 
-COPY apache-beam.tar.gz /opt/apache/beam/tars/
+COPY target/apache-beam.tar.gz /opt/apache/beam/tars/
 RUN pip install /opt/apache/beam/tars/apache-beam.tar.gz[gcp] && \
     # Remove pip cache.
     rm -rf /root/.cache/pip
 
-ADD linux_amd64/boot /opt/apache/beam/
+ADD target/launcher/linux_amd64/boot /opt/apache/beam/
 
 ENTRYPOINT ["/opt/apache/beam/boot"]
diff --git a/sdks/python/container/base_image_requirements.txt b/sdks/python/container/base_image_requirements.txt
index 095dc6d..8e352a6 100644
--- a/sdks/python/container/base_image_requirements.txt
+++ b/sdks/python/container/base_image_requirements.txt
@@ -23,12 +23,14 @@
 # Any SDK dependencies not listed here will be installed when SDK is installed
 # into the container.
 
+avro==1.8.2;python_version<="2.7"
+avro-python3==1.8.2;python_version>="3.4"
 fastavro==0.21.4
 crcmod==1.7
 dill==0.2.8.2
 future==0.16.0
 futures==3.1.1
-grpcio==1.10.0
+grpcio==1.15.0
 hdfs==2.1.0
 httplib2==0.9.2
 mock==2.0.0
@@ -54,10 +56,10 @@
 python-snappy==0.5.3
 
 # These are additional packages likely to be used by customers.
-numpy==1.14.5
+numpy==1.15.2
 scipy==1.1.0
-pandas==0.22.0
+pandas==0.23.4
 protorpc==0.11.1
 python-gflags==3.0.6
 setuptools<=39.1.0 # requirement for Tensorflow.
-tensorflow==1.10.0
+tensorflow==1.11.0
diff --git a/sdks/python/container/build.gradle b/sdks/python/container/build.gradle
index 8f4256d..17bdcab 100644
--- a/sdks/python/container/build.gradle
+++ b/sdks/python/container/build.gradle
@@ -44,7 +44,6 @@
 
 task copyDockerfileDependencies(type: Copy, dependsOn: build) {
   from configurations.sdkSourceTarball
-  from file("build/launcher")
   from file("./base_image_requirements.txt")
   into "build/target"
   configurations.sdkSourceTarball.stopExecutionIfEmpty()
@@ -55,17 +54,17 @@
   build {
     // TODO(herohde): build local platform + linux-amd64, if possible.
     targetPlatform = ['linux-amd64']
-    outputLocation = './build/launcher/${GOOS}_${GOARCH}/boot'
+    outputLocation = './build/target/launcher/${GOOS}_${GOARCH}/boot'
   }
 }
 
 docker {
   name containerImageName(name: "python")
-  files "./build/target"
+  files "./build"
 }
 
 artifacts {
-  sdkHarnessLauncher file: file('./build/launcher'), builtBy: build
+  sdkHarnessLauncher file: file('./build/target/launcher'), builtBy: build
 }
 
 // Ensure that making the docker image builds any required artifacts
diff --git a/sdks/python/container/py3/Dockerfile b/sdks/python/container/py3/Dockerfile
index 66e17e0..3425653 100644
--- a/sdks/python/container/py3/Dockerfile
+++ b/sdks/python/container/py3/Dockerfile
@@ -34,7 +34,7 @@
 # SDK dependencies not listed in base_image_requirements.txt will be installed when we install SDK
 # in the next RUN statement.
 
-COPY base_image_requirements.txt /tmp/base_image_requirements.txt
+COPY target/base_image_requirements.txt /tmp/base_image_requirements.txt
 RUN \
     pip install -r /tmp/base_image_requirements.txt && \
     # Check that the fast implementation of protobuf is used.
@@ -44,11 +44,11 @@
 
 
 ENV BEAM_EXPERIMENTAL_PY3=1
-COPY apache-beam.tar.gz /opt/apache/beam/tars/
+COPY target/apache-beam.tar.gz /opt/apache/beam/tars/
 RUN pip install /opt/apache/beam/tars/apache-beam.tar.gz[gcp] && \
     # Remove pip cache.
     rm -rf /root/.cache/pip
 
-ADD linux_amd64/boot /opt/apache/beam/
+ADD target/linux_amd64/boot /opt/apache/beam/
 
 ENTRYPOINT ["/opt/apache/beam/boot"]
diff --git a/sdks/python/container/py3/build.gradle b/sdks/python/container/py3/build.gradle
index 5fef2be..d8d25a1 100644
--- a/sdks/python/container/py3/build.gradle
+++ b/sdks/python/container/py3/build.gradle
@@ -42,7 +42,7 @@
 
 docker {
   name containerImageName(name: "python3")
-  files "./build/target"
+  files "./build"
 }
 
 dockerPrepare.dependsOn copyDockerfileDependencies
diff --git a/sdks/python/setup.py b/sdks/python/setup.py
index e7eb836..ce94945 100644
--- a/sdks/python/setup.py
+++ b/sdks/python/setup.py
@@ -124,7 +124,7 @@
     'pytz>=2018.3,<=2018.4',
     'pyyaml>=3.12,<4.0.0',
     'pyvcf>=0.6.8,<0.7.0',
-    'typing>=3.6.0,<3.7.0',
+    'typing>=3.6.0,<3.7.0; python_version < "3.5.0"',
     'futures>=3.1.1,<4.0.0',
     'future>=0.16.0,<1.0.0',
     ]
diff --git a/settings.gradle b/settings.gradle
index 80d6250..673c78a 100644
--- a/settings.gradle
+++ b/settings.gradle
@@ -160,6 +160,8 @@
 project(":beam-sdks-java-io-synthetic").dir = file("sdks/java/io/synthetic")
 include "beam-sdks-java-javadoc"
 project(":beam-sdks-java-javadoc").dir = file("sdks/java/javadoc")
+include "beam-sdks-java-load-tests"
+project(":beam-sdks-java-load-tests").dir = file("sdks/java/load-tests")
 include "beam-sdks-java-maven-archetypes-examples"
 project(":beam-sdks-java-maven-archetypes-examples").dir = file("sdks/java/maven-archetypes/examples")
 include "beam-sdks-java-maven-archetypes-starter"
diff --git a/website/.github/PULL_REQUEST_TEMPLATE.md b/website/.github/PULL_REQUEST_TEMPLATE.md
index 7862359..ffb5c42 100644
--- a/website/.github/PULL_REQUEST_TEMPLATE.md
+++ b/website/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,3 +1,10 @@
+**Deprecation notice:** The website is being migrated to
+https://github.com/apache/beam/tree/master/website
+
+Please create new pull requests against the above repo.
+
+---
+
 *Please* add a meaningful description for your change here.
 
 Once your pull request has been opened and assigned a number, please edit the
diff --git a/website/README.md b/website/README.md
index 0fe1882..acf79f91 100644
--- a/website/README.md
+++ b/website/README.md
@@ -1,16 +1,24 @@
-# Apache Beam website
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one
+    or more contributor license agreements.  See the NOTICE file
+    distributed with this work for additional information
+    regarding copyright ownership.  The ASF licenses this file
+    to you under the Apache License, Version 2.0 (the
+    "License"); you may not use this file except in compliance
+    with the License.  You may obtain a copy of the License at
 
-_**The website source code is currently being migrated from 
-https://github.com/apache/beam-site, but is not yet ready. Please 
-continue to contribute changes at 
-[apache/beam-site](https://github.com/apache/beam-site) according to 
-the [website contribution guide](https://beam.apache.org/contribute/#contributing-to-the-website). 
-You can track migration progress via 
-[[BEAM-4493]](https://issues.apache.org/jira/browse/BEAM-4493).**_
+      http://www.apache.org/licenses/LICENSE-2.0
 
-This is the website for [Apache Beam](https://beam.apache.org/), hosted at:
+    Unless required by applicable law or agreed to in writing,
+    software distributed under the License is distributed on an
+    "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+    KIND, either express or implied.  See the License for the
+    specific language governing permissions and limitations
+    under the License.
+-->
 
-    https://beam.apache.org/
+These are the main sources of the website for Apache Beam, hosted at
+https://beam.apache.org/.
 
 ## About this site
 
@@ -18,55 +26,32 @@
 for additional formatting capabilities, this website uses
 [Twitter Bootstrap](http://getbootstrap.com/).
 
-### Repository Structure
+Documentation generated from source code, such as Javadoc and Pydoc, is stored
+separately on the [beam-site
+repository](https://github.com/apache/beam-site/tree/release-docs).
 
-This repository contains:
+## Active development
 
-1. `src/`: the source of the site, including markdown files containing the bulk of the content
-1. `content/`: html generated from the markdown (which is what is actually hosted on the website)
+Website development requires Docker installed if you wish to preview changes and
+run website tests.
 
-## Development Workflow
+The following command is used to build and serve the website locally.
 
-### Setup
+    $ ./gradlew -PpublishJekyllPort :beam-website:serveWebsite
 
-You need Ruby version >= 2.2.0 to build the project.
+Any changes made locally will trigger a rebuild of the website.
 
-Install [Ruby Gems](https://rubygems.org/pages/download), a package management framework for Ruby.
+Websites tests may be run using this command:
 
-Install [Bundler](http://bundler.io/v1.3/rationale.html), which  we use to specify dependencies and ensure
-a consistent environment for building the website, even across multiple developers on different machines:
+    $ ./gradlew :beam-website:testWebsite
 
-    $ gem install bundler
+## Website push
 
-Use Bundler to download the versions of each dependency specified in the website's `Gemfile.lock`,
-including [Jekyll](https://jekyllrb.com/):
-
-    $ bundle install --deployment
-
-This will install a number of gems in a local `./vendor` directory.
-
-### Active development
-
-Launch Jekyll via Bundler in order to guarantee that the appropriate versions of the dependencies are used:
-
-    $ bundle exec jekyll serve
-
-Jekyll will start a webserver on port `4000`. As you make changes to the
-content, Jekyll will rebuild it automatically. This is helpful if you want to see
-how your changes will render in realtime.
-
-In addition, check for dead links and the like by running the tests via:
-
-    $ bundle exec rake test
-
-Running Jekyll may cause the `content/` directory to be generated.
-Merging autogenerated content can get tricky, so regenerating content is
-responsibility of the committer doing the final merge of your PR, and
-your PR should not contain changes to that directory.
-
-Before sending the PR for review, please run:
-
-    $ git checkout -- content
+After a PR is merged, a background Jenkins job will automatically generate and
+push [website
+content](https://github.com/apache/beam/tree/asf-site/website/generated-content)
+to the asf-site branch. This content is later picked up and pushed to
+https://beam.apache.org/.
 
 ## Additional Information
 
@@ -87,19 +72,3 @@
 Make sure that the updated `Gemfile.lock` is included in your pull request. For more information,
 see the Bundler [documentation](http://bundler.io/v1.3/rationale.html).
 
-## Apache License
-
-Except as otherwise noted this software is licensed under the
-[Apache License, Version 2.0](http://www.apache.org/licenses/LICENSE-2.0.html)
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
diff --git a/website/Rakefile b/website/Rakefile
index e814956..00ee3c6 100644
--- a/website/Rakefile
+++ b/website/Rakefile
@@ -5,22 +5,20 @@
 task :test do
   HTMLProofer.check_directory("./generated-content", {
     :typhoeus => {
-      :timeout => 60,
-      :connecttimeout => 40 },
+      :timeout => 120,
+      :connecttimeout => 90 },
     :allow_hash_href => true,
     :check_html => true,
-    :file_ignore => [/javadoc/, /v2/, /pydoc/],
+    :file_ignore => [/v2/],
     :url_ignore => [
-        # Javadocs and Pydocs are only available on asf-site branch
-        /documentation\/sdks\/javadoc/,
-        /documentation\/sdks\/pydoc/,
-
         /jstorm.io/,
         /datatorrent.com/,
         /ai.google/, # https://issues.apache.org/jira/browse/INFRA-16527
         /globenewswire.com/, # https://issues.apache.org/jira/browse/BEAM-5518
-        /www.se-radio.net/ # BEAM-5611: Can fail with rate limit HTTP 508 error
+        /www.se-radio.net/, # BEAM-5611: Can fail with rate limit HTTP 508 error
+        /beam.apache.org\/releases/, # BEAM-4499 remove once publishing is migrated
+        /atrato.io/ # BEAM-5665 atrato.io seems to be down
     ],
-    :parallel => { :in_processes => Etc.nprocessors },
+    :parallel => { :in_processes => 4 },
     }).run
 end
diff --git a/website/_config.yml b/website/_config.yml
index 4a42147..50826fa 100644
--- a/website/_config.yml
+++ b/website/_config.yml
@@ -60,7 +60,7 @@
   toc_levels:     2..6
 
 # The most recent release of Beam.
-release_latest: 2.6.0
+release_latest: 2.7.0
 
 # Plugins are configured in the Gemfile.
 
diff --git a/website/build.gradle b/website/build.gradle
index eb52de7..a0d8d95 100644
--- a/website/build.gradle
+++ b/website/build.gradle
@@ -65,8 +65,12 @@
   ext.containerId = {
     return standardOutput.toString().trim()
   }
+  def extraOptions = ''
+  if (project.hasProperty('publishJekyllPort')) {
+    extraOptions = '-p 127.0.0.1:4000:4000'
+  }
   commandLine '/bin/bash', '-c',
-    "docker create -v $project.rootDir:$dockerWorkDir -u \$(id -u):\$(id -g) $dockerImageTag"
+    "docker create -v $project.rootDir:$dockerWorkDir -u \$(id -u):\$(id -g) $extraOptions $dockerImageTag"
 }
 
 task startDockerContainer(type: Exec) {
@@ -115,6 +119,24 @@
 }
 build.dependsOn buildWebsite
 
+task serveWebsite(type: Exec) {
+  dependsOn startDockerContainer, setupBuildDir
+  finalizedBy stopAndRemoveDockerContainer
+  inputs.files 'Gemfile.lock', '_config.yml'
+  inputs.dir 'src'
+  outputs.dir "$buildDir/.sass-cache"
+  outputs.dir buildContentDir
+  commandLine 'docker', 'exec',
+    "${->startDockerContainer.containerId()}", '/bin/bash', '-c',
+    """cd $dockerWorkDir/build/website && \
+      bundle exec jekyll serve \
+      --config $dockerWorkDir/website/_config.yml \
+      --incremental \
+      --source $dockerWorkDir/website/src \
+      --host 0.0.0.0
+      """
+}
+
 task testWebsite(type: Exec) {
   dependsOn startDockerContainer, buildWebsite
   finalizedBy stopAndRemoveDockerContainer
@@ -199,7 +221,7 @@
   shell "git push ${gitboxUrl} asf-site"
 }
 
-commitWebsite.dependsOn testWebsite
+commitWebsite.dependsOn buildWebsite
 publishWebsite.dependsOn commitWebsite
 
 /*
diff --git a/website/src/.htaccess b/website/src/.htaccess
index 06fc74b..82e9c99 100644
--- a/website/src/.htaccess
+++ b/website/src/.htaccess
@@ -13,3 +13,12 @@
 # * Redirect (R) permanently (301) to https://beam.apache.org/,
 # * Stop processing more rules (L).
 RewriteRule ^(.*)$ https://beam.apache.org/$1 [L,R=301]
+
+# Javadocs / pydocs are available only on the published website, published from
+# https://github.com/apache/beam-site/tree/release-docs
+# They were previously hosted within this repository, and published at the URL
+# path /documentation/sdks/(javadoc|pydoc)/..
+# The following redirect maintains the previously supported URLs.
+RedirectMatch permanent "/documentation/sdks/(javadoc|pydoc)(.*)" "https://beam.apache.org/releases/$1$2"
+# Keep this updated to point to the current release.
+RedirectMatch "/releases/([^/]+)/current(.*)" "https://beam.apache.org/releases/$1/2.6.0$2"
diff --git a/website/src/_data/authors.yml b/website/src/_data/authors.yml
index 6d34750..1950cae 100644
--- a/website/src/_data/authors.yml
+++ b/website/src/_data/authors.yml
@@ -18,6 +18,9 @@
 altay:
     name: Ahmet Altay
     email: altay@apache.org
+ccy:
+    name: Charles Chen
+    email: ccy@apache.org
 davor:
     name: Davor Bonaci
     email: davor@apache.org
diff --git a/website/src/_includes/section-menu/contribute.html b/website/src/_includes/section-menu/contribute.html
index 9ec08b8..b9bbba3 100644
--- a/website/src/_includes/section-menu/contribute.html
+++ b/website/src/_includes/section-menu/contribute.html
@@ -16,8 +16,8 @@
   <span class="section-nav-list-title">IDE tips</span>
 
   <ul class="section-nav-list">
-    <li><a href="{{ site.baseurl }}/contribute/intellij/">IntelliJ Tips</a></li>
-    <li><a href="{{ site.baseurl }}/contribute/eclipse/">Eclipse Tips</a></li>
+    <li><a href="https://cwiki.apache.org/confluence/display/BEAM/IntelliJ+Tips">IntelliJ Tips</a></li>
+    <li><a href="https://cwiki.apache.org/confluence/display/BEAM/Eclipse+Tips">Eclipse Tips</a></li>
   </ul>
 </li>
 <li>
diff --git a/website/src/_includes/section-menu/sdks.html b/website/src/_includes/section-menu/sdks.html
index 0102b4b..e9a661a 100644
--- a/website/src/_includes/section-menu/sdks.html
+++ b/website/src/_includes/section-menu/sdks.html
@@ -16,7 +16,7 @@
   <span class="section-nav-list-title">Java</span>
   <ul class="section-nav-list">
     <li><a href="{{ site.baseurl }}/documentation/sdks/java/">Java SDK overview</a></li>
-    <li><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/" target="_blank">Java SDK API reference <img src="{{ site.baseurl }}/images/external-link-icon.png"
+    <li><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/" target="_blank">Java SDK API reference <img src="{{ site.baseurl }}/images/external-link-icon.png"
                                                                                                                                    width="14" height="14"
                                                                                                                                    alt="External link."></a>
     </li>
@@ -30,7 +30,7 @@
   <span class="section-nav-list-title">Python</span>
   <ul class="section-nav-list">
     <li><a href="{{ site.baseurl }}/documentation/sdks/python/">Python SDK overview</a></li>
-    <li><a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/" target="_blank">Python SDK API reference <img src="{{ site.baseurl }}/images/external-link-icon.png"
+    <li><a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/" target="_blank">Python SDK API reference <img src="{{ site.baseurl }}/images/external-link-icon.png"
                                                                                                                                    width="14" height="14"
                                                                                                                                    alt="External link."></a>
     </li>
@@ -64,7 +64,7 @@
       <ul class="section-nav-list">
         <li><a href="{{ site.baseurl }}/documentation/dsls/sql/data-types/">Data types</a></li>
         <li><a href="{{ site.baseurl }}/documentation/dsls/sql/lexical/">Lexical structure</a></li>
-        <li><a href="{{ site.baseurl }}/documentation/dsls/sql/create-table/">CREATE TABLE</a></li>
+        <li><a href="{{ site.baseurl }}/documentation/dsls/sql/create-external-table/">CREATE EXTERNAL TABLE</a></li>
         <li><a href="{{ site.baseurl }}/documentation/dsls/sql/select/">SELECT</a></li>
         <li><a href="{{ site.baseurl }}/documentation/dsls/sql/windowing-and-triggering/">Windowing & Triggering</a></li>
         <li><a href="{{ site.baseurl }}/documentation/dsls/sql/joins/">Joins</a></li>
diff --git a/website/src/_posts/2016-10-20-test-stream.md b/website/src/_posts/2016-10-20-test-stream.md
index 876b4d7d..be940e9 100644
--- a/website/src/_posts/2016-10-20-test-stream.md
+++ b/website/src/_posts/2016-10-20-test-stream.md
@@ -73,7 +73,7 @@
 ## Writing Deterministic Tests to Emulate Nondeterminism
 
 The Beam testing infrastructure provides the
-[PAssert]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/testing/PAssert.html)
+[PAssert](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/testing/PAssert.html)
 methods, which assert properties about the contents of a PCollection from within
 a pipeline. We have expanded this infrastructure to include
 [TestStream](https://github.com/apache/beam/blob/master/sdks/java/core/src/main/java/org/apache/beam/sdk/testing/TestStream.java),
diff --git a/website/src/_posts/2017-03-16-python-sdk-release.md b/website/src/_posts/2017-03-16-python-sdk-release.md
index c56449a..443a00f 100644
--- a/website/src/_posts/2017-03-16-python-sdk-release.md
+++ b/website/src/_posts/2017-03-16-python-sdk-release.md
@@ -31,7 +31,7 @@
 
 #### Try the Apache Beam Python SDK
 
-If you would like to try out the Python SDK, a good place to start is the [Quickstart]({{ site.baseurl }}/get-started/quickstart-py/). After that, you can take a look at additional [examples](https://github.com/apache/beam/tree/v0.6.0/sdks/python/apache_beam/examples), and deep dive into the [API reference]({{ site.baseurl }}/documentation/sdks/pydoc/).
+If you would like to try out the Python SDK, a good place to start is the [Quickstart]({{ site.baseurl }}/get-started/quickstart-py/). After that, you can take a look at additional [examples](https://github.com/apache/beam/tree/v0.6.0/sdks/python/apache_beam/examples), and deep dive into the [API reference](https://beam.apache.org/releases/pydoc/).
 
 Let’s take a look at a quick example together. First, install the `apache-beam` package from PyPI and start your Python interpreter.
 
diff --git a/website/src/_posts/2017-08-04-splittable-do-fn.md b/website/src/_posts/2017-08-04-splittable-do-fn.md
index 64a8363..3922825 100644
--- a/website/src/_posts/2017-08-04-splittable-do-fn.md
+++ b/website/src/_posts/2017-08-04-splittable-do-fn.md
@@ -85,24 +85,24 @@
 ## Beam Source API
 
 Apache Beam historically provides a Source API
-([BoundedSource]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/BoundedSource.html)
+([BoundedSource](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/BoundedSource.html)
 and
-[UnboundedSource]({{ site.baseurl }}/documentation/sdks/javadoc/{{
+[UnboundedSource](https://beam.apache.org/releases/javadoc/{{
 site.release_latest }}/org/apache/beam/sdk/io/UnboundedSource.html)) which does
 not have these limitations and allows development of efficient data sources for
 batch and streaming systems. Pipelines use this API via the
-[`Read.from(Source)`]({{ site.baseurl }}/documentation/sdks/javadoc/{{
+[`Read.from(Source)`](https://beam.apache.org/releases/javadoc/{{
 site.release_latest }}/org/apache/beam/sdk/io/Read.html) built-in `PTransform`.
 
 The Source API is largely similar to that of most other data processing
 frameworks, and allows the system to read data in parallel using multiple
 workers, as well as checkpoint and resume reading from an unbounded data source.
 Additionally, the Beam
-[`BoundedSource`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/BoundedSource.html)
+[`BoundedSource`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/BoundedSource.html)
 API provides advanced features such as progress reporting and [dynamic
 rebalancing]({{ site.baseurl }}/blog/2016/05/18/splitAtFraction-method.html)
 (which together enable autoscaling), and
-[`UnboundedSource`]({{ site.baseurl }}/documentation/sdks/javadoc/{{
+[`UnboundedSource`](https://beam.apache.org/releases/javadoc/{{
 site.release_latest }}/org/apache/beam/sdk/io/UnboundedSource.html) supports
 reporting the source's watermark and backlog *(until SDF, we believed that
 "batch" and "streaming" data sources are fundamentally different and thus
diff --git a/website/src/_posts/2018-06-26-beam-2.5.0.md b/website/src/_posts/2018-06-26-beam-2.5.0.md
index 9ee57a5..fe6d3ba 100644
--- a/website/src/_posts/2018-06-26-beam-2.5.0.md
+++ b/website/src/_posts/2018-06-26-beam-2.5.0.md
@@ -28,7 +28,7 @@
 # New Features / Improvements
 
 ## Go SDK support
-The Go SDK has been officially accepted into the project, after an incubation period and community effort. Go pipelines run on Dataflow runner. More details are [here](https://beam.apache.org/documentation/sdks/go/).
+The Go SDK has been officially accepted into the project, after an incubation period and community effort. Go pipelines run on Dataflow runner. More details are [here]({{ site.baseurl }}/documentation/sdks/go/).
 
 ## Parquet support
 Support for Apache Parquet format was added. It uses Parquet 1.10 release which, thanks to AvroParquerWriter's API changes, allows FileIO.Sink implementation.
diff --git a/website/src/_posts/2018-08-20-review-input-streaming-connectors.md b/website/src/_posts/2018-08-20-review-input-streaming-connectors.md
index d3a9c9a..09da93d 100644
--- a/website/src/_posts/2018-08-20-review-input-streaming-connectors.md
+++ b/website/src/_posts/2018-08-20-review-input-streaming-connectors.md
@@ -54,7 +54,7 @@
    </td>
    <td>Local<br>(Using the <code>file://</code> URI)
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/TextIO.html">TextIO</a>
+   <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/TextIO.html">TextIO</a>
    </td>
    <td><a href="https://spark.apache.org/docs/latest/api/java/org/apache/spark/streaming/StreamingContext.html#textFileStream-java.lang.String-">textFileStream</a><br>(Spark treats most Unix systems as HDFS-compatible, but the location should be accessible from all nodes)
    </td>
@@ -62,7 +62,7 @@
   <tr>
    <td>HDFS<br>(Using the <code>hdfs://</code> URI)
    </td>
-    <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/hdfs/HadoopFileSystemOptions.html">HadoopFileSystemOptions</a>
+    <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/hdfs/HadoopFileSystemOptions.html">HadoopFileSystemOptions</a>
    </td>
    <td><a href="https://spark.apache.org/docs/latest/api/java/org/apache/spark/streaming/util/HdfsUtils.html">HdfsUtils</a>
    </td>
@@ -72,7 +72,7 @@
    </td>
    <td>Cloud Storage<br>(Using the <code>gs://</code> URI)
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.html">GcsOptions</a>
+   <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/extensions/gcp/options/GcsOptions.html">GcsOptions</a>
    </td>
    <td rowspan="2" ><a href="https://spark.apache.org/docs/latest/api/java/org/apache/spark/SparkContext.html#hadoopConfiguration--">hadoopConfiguration</a>
 and <a href="https://spark.apache.org/docs/latest/api/java/org/apache/spark/streaming/StreamingContext.html#textFileStream-java.lang.String-">textFileStream</a>
@@ -81,7 +81,7 @@
   <tr>
    <td>S3<br>(Using the <code>s3://</code> URI)
    </td>
-    <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/aws/options/S3Options.html">S3Options</a>
+    <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/FileIO.html">FileIO</a> + <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/aws/options/S3Options.html">S3Options</a>
    </td>
   </tr>
   <tr>
@@ -89,7 +89,7 @@
    </td>
    <td>Kafka
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/kafka/KafkaIO.html">KafkaIO</a>
+   <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/kafka/KafkaIO.html">KafkaIO</a>
    </td>
    <td><a href="https://spark.apache.org/docs/latest/streaming-kafka-0-10-integration.html">spark-streaming-kafka</a>
    </td>
@@ -97,7 +97,7 @@
   <tr>
    <td>Kinesis
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/kinesis/KinesisIO.html">KinesisIO</a>
+   <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/kinesis/KinesisIO.html">KinesisIO</a>
    </td>
    <td><a href="https://spark.apache.org/docs/latest/streaming-kinesis-integration.html">spark-streaming-kinesis</a>
    </td>
@@ -105,7 +105,7 @@
   <tr>
    <td>Cloud Pub/Sub
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.html">PubsubIO</a>
+   <td><a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.html">PubsubIO</a>
    </td>
    <td><a href="https://github.com/apache/bahir/tree/master/streaming-pubsub">spark-streaming-pubsub</a> from <a href="http://bahir.apache.org">Apache Bahir</a>
    </td>
@@ -146,7 +146,7 @@
    </td>
    <td>Local
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.textio.html">io.textio</a>
+   <td><a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.textio.html">io.textio</a>
    </td>
    <td><a href="http://spark.apache.org/docs/latest/api/python/pyspark.streaming.html#pyspark.streaming.StreamingContext.textFileStream">textFileStream</a>
    </td>
@@ -154,7 +154,7 @@
   <tr>
    <td>HDFS
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.hadoopfilesystem.html">io.hadoopfilesystem</a>
+   <td><a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.hadoopfilesystem.html">io.hadoopfilesystem</a>
    </td>
    <td><a href="https://spark.apache.org/docs/latest/api/java/org/apache/spark/SparkContext.html#hadoopConfiguration--">hadoopConfiguration</a> (Access through <code>sc._jsc</code> with Py4J)
 and <a href="http://spark.apache.org/docs/latest/api/python/pyspark.streaming.html#pyspark.streaming.StreamingContext.textFileStream">textFileStream</a>
@@ -165,7 +165,7 @@
    </td>
    <td>Google Cloud Storage
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.gcsio.html">io.gcp.gcsio</a>
+   <td><a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.gcsio.html">io.gcp.gcsio</a>
    </td>
    <td rowspan="2" ><a href="http://spark.apache.org/docs/latest/api/python/pyspark.streaming.html#pyspark.streaming.StreamingContext.textFileStream">textFileStream</a>
    </td>
@@ -197,7 +197,7 @@
   <tr>
    <td>Cloud Pub/Sub
    </td>
-   <td><a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html">io.gcp.pubsub</a>
+   <td><a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html">io.gcp.pubsub</a>
    </td>
    <td>N/A
    </td>
diff --git a/website/src/_posts/2018-10-03-beam-2.7.0.md b/website/src/_posts/2018-10-03-beam-2.7.0.md
new file mode 100644
index 0000000..c515de6
--- /dev/null
+++ b/website/src/_posts/2018-10-03-beam-2.7.0.md
@@ -0,0 +1,76 @@
+---
+layout: post
+title:  "Apache Beam 2.7.0"
+date:   2018-10-03 00:00:01 -0800
+excerpt_separator: <!--more-->
+categories: blog
+authors:
+        - ccy
+
+---
+<!--
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+We are happy to present the new 2.7.0 release of Beam. This release includes both improvements and new functionality.
+See the [download page]({{ site.baseurl }}/get-started/downloads/#270-2018-10-02) for this release.<!--more-->
+For more information on changes in 2.7.0, check out the
+[detailed release notes](https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12319527&version=12343654).
+
+## New Features / Improvements
+
+### New I/Os
+
+* KuduIO
+* Amazon SNS sink
+* Amazon SqsIO
+
+### Dependency Upgrades
+
+* Apache Calcite dependency upgraded to 1.17.0
+* Apache Derby dependency upgraded to 10.14.2.0
+* Apache HTTP components upgraded (see release notes).
+
+### Portability
+
+* Experimental support for Python on local Flink runner for simple
+examples, see latest information here:
+{{ site.baseurl }}/contribute/portability/#status.
+
+## Miscellaneous Fixes
+
+### I/Os
+
+* KinesisIO, fixed dependency issue 
+
+## List of Contributors
+
+According to git shortlog, the following 72 people contributed
+to the 2.7.0 release. Thank you to all contributors!
+
+Ahmet Altay, Alan Myrvold, Alexey Romanenko, Aljoscha Krettek,
+Andrew Pilloud, Ankit Jhalaria, Ankur Goenka, Anton Kedin, Boyuan
+Zhang, Carl McGraw, Carlos Alonso, cclauss, Chamikara Jayalath,
+Charles Chen, Cory Brzycki, Daniel Oliveira, Dariusz Aniszewski,
+devinduan, Eric Beach, Etienne Chauchot, Eugene Kirpichov, Garrett
+Jones, Gene Peters, Gleb Kanterov, Henning Rohde, Henry Suryawirawan,
+Holden Karau, Huygaa Batsaikhan, Ismaël Mejía, Jason Kuster, Jean-
+Baptiste Onofré, Joachim van der Herten, Jozef Vilcek, jxlewis, Kai
+Jiang, Katarzyna Kucharczyk, Kenn Knowles, Krzysztof Trubalski, Kyle
+Winkelman, Leen Toelen, Luis Enrique Ortíz Ramirez, Lukasz Cwik,
+Łukasz Gajowy, Luke Cwik, Mark Liu, Matthias Feys, Maximilian Michels,
+Melissa Pashniak, Mikhail Gryzykhin, Mikhail Sokolov, mingmxu, Norbert
+Chen, Pablo Estrada, Prateek Chanda, Raghu Angadi, Ravi Pathak, Reuven
+Lax, Robert Bradshaw, Robert Burke, Rui Wang, Ryan Williams, Sindy Li,
+Thomas Weise, Tim Robertson, Tormod Haavi, Udi Meiri, Vaclav Plajt,
+Valentyn Tymofieiev, xiliu, XuMingmin, Yifan Zou, Yueyang Qiu.
diff --git a/website/src/contribute/dependencies.md b/website/src/contribute/dependencies.md
index 99ec6e6..c8c4ccc 100644
--- a/website/src/contribute/dependencies.md
+++ b/website/src/contribute/dependencies.md
@@ -52,16 +52,17 @@
 
 These kind of urgently required upgrades might not get automatically picked up by the Jenkins job for few months. So Beam community has to act to identify such issues and perform upgrades early.
 
-## JIRA Automation
+## JIRA Issue Automation
 
 In order to track the dependency upgrade process, JIRA tickets will be created per significant outdated dependency based on the report. A bot named *Beam Jira Bot* was created for managing JIRA issues. Beam community agrees on the following policies that creates and updates issues.
-* Issues will be named as "Beam Dependency Update Request: <dep_name> <dep_newest_version>".
-* Issues will be created under the component *"dependencies"*
-* Issues will be assigned to the primary owner of the dependencies, who are mentioned in the dependency ownership files. ([Java Dependency Owners](https://github.com/apache/beam/blob/master/ownership/JAVA_DEPENDENCY_OWNERS.yaml) and [Python Dependency Owners](https://github.com/apache/beam/blob/master/ownership/PYTHON_DEPENDENCY_OWNERS.yaml))
-* If more than one owners found for a dependency, the first owner will be picked as the primary owner, the others will be pinged in the issue's description.
-* If no owners found, leave the assignee empty. The component lead is responsible for triaging the issue.
-* Avoid creating duplicate issues. Updating the descriptions of the open issues created by the previous dependency check.
-* The dependency sometimes is not able to be upgraded, the issue should be closed as *"won't fix"*. And, the bot should avoid recreating issues with "won't fix".
+* Title (summary) of the issues will be in the format "Beam Dependency Update Request: <dep_name>" where <dep_name> is the dependency artifact name.
+* Issues will be created under the component *"dependencies"*.
+* Owners of dependencies will be notified by tagging the corresponding JIRA IDs mentioned in the ownership files in the issue description. See [Java Dependency Owners](https://github.com/apache/beam/blob/master/ownership/JAVA_DEPENDENCY_OWNERS.yaml) and [Python Dependency Owners](https://github.com/apache/beam/blob/master/ownership/PYTHON_DEPENDENCY_OWNERS.yaml) for current owners for Java SDK and Python SDK dependencies respectively.
+* Automated tool will not create duplicate issues for the same dependency. Instead the tool will look for an existing JIRA when one has to be created for a given dependency and description of the JIRA will be updated with latest information, for example, current version of the dependency.
+* If a Beam community member determines that a given dependency should not be upgraded the corresponding JIRA issue can be closed with a fix version specified.
+* Automated tool will reopen a JIRA for a given dependency when one of following conditions is met:
+  * Next SDK release is for a fix version mentioned in the JIRA. 
+  * Six months __and__ three or more minor releases have passed since the JIRA was closed.
 
 ## Upgrading identified outdated dependencies
 
@@ -91,4 +92,4 @@
 
 ## Dependency updates and backwards compatibility 
 
-Beam releases [adhere to](https://beam.apache.org/get-started/downloads/) semantic versioning. Hence, community members should take care when updating dependencies. Minor version updates to dependencies should be backwards compatible in most cases. Some updates to dependencies though may result in backwards incompatible API or functionality changes to Beam. PR reviewers and committers should take care to detect any dependency updates that could potentially introduce backwards incompatible changes to Beam before merging and PRs that update dependencies should include a statement regarding this verification in the form of a PR comment. Dependency updates that result in backwards incompatible changes to non-experimental features of Beam should be held till next major version release of Beam. Any exceptions to this policy should only occur in extreme cases (for example, due to a security vulnerability of an existing dependency that is only fixed in a subsequent major version) and should be discussed in the Beam dev list. Note that backwards incompatible changes to experimental features may be introduced in a minor version release. 
+Beam releases [adhere to]({{ site.baseurl }}/get-started/downloads/) semantic versioning. Hence, community members should take care when updating dependencies. Minor version updates to dependencies should be backwards compatible in most cases. Some updates to dependencies though may result in backwards incompatible API or functionality changes to Beam. PR reviewers and committers should take care to detect any dependency updates that could potentially introduce backwards incompatible changes to Beam before merging and PRs that update dependencies should include a statement regarding this verification in the form of a PR comment. Dependency updates that result in backwards incompatible changes to non-experimental features of Beam should be held till next major version release of Beam. Any exceptions to this policy should only occur in extreme cases (for example, due to a security vulnerability of an existing dependency that is only fixed in a subsequent major version) and should be discussed in the Beam dev list. Note that backwards incompatible changes to experimental features may be introduced in a minor version release. 
diff --git a/website/src/contribute/eclipse.md b/website/src/contribute/eclipse.md
deleted file mode 100644
index 4eff1f0..0000000
--- a/website/src/contribute/eclipse.md
+++ /dev/null
@@ -1,96 +0,0 @@
----
-layout: section
-title: "Beam Eclipse Tips"
-permalink: /contribute/eclipse/
-section_menu: section-menu/contribute.html
----
-<!--
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-# Eclipse Tips
-
-> These are best-effort community-contributed tips, and are not guaranteed to work with any particular Eclipse setup.
-
-## Eclipse version
-
-Use a recent Eclipse version that includes
-[Buildship](https://projects.eclipse.org/projects/tools.buildship) for Gradle
-integration. Currently we recommend Eclipse Oxygen. Start Eclipse with a fresh
-workspace in a separate directory from your checkout.
-
-## Initial setup
-
-Before setting up Eclipse integration, verify that you can successfully build
-from the commandline by building a sample package:
-
-```
-./gradlew :beam-examples-java:build
-```
-
-If you receive any errors, first verify your environment setup:
-
-1. If running on Mac or Linux, launch Eclipse from a terminal. This is necessary
-   to ensure that Eclipse has proper environment setup from user profile
-   scripts, i.e. .bashrc.
-2. Install [Buildship Gradle
-   Integration](https://marketplace.eclipse.org/content/buildship-gradle-integration).
-   This will allow importing and interacting with the Gradle build.
-3. Open the project import wizard in Eclipse via "File" > "Import".
-4. From the "Import" screen, select "Gradle" > "Existing Gradle Project", and click
-   Next.
-5. From the "Import Gradle Project" screen, fill in the Project root directory
-   with your local git path, and click Finish.
-
-Eclipse will scan the project tree and import each as a separate Package.
-
-Verify that your workspace is correctly configured by invoking
-'beam-runners-direct-java:build' from the "Gradle Tasks" pane. The build should
-succeed with no errors.
-
-## Building
-
-After your Eclipse workspace is properly setup, you will have a "Gradle
-Tasks" window with a set of operations. If you don't see the pane, open it
-from "Window" > "Show View" > Other.. > "Gradle" > "Gradle Tasks".
-
-From the "Gradle Tasks" window, you can build any task registered with Gradle.
-For example, if you are working on Kinesis IO, select 'beam-sdks-java-io-kinesis:build'.
-
-## Checkstyle
-
-Eclipse supports checkstyle within the IDE using the Checkstyle plugin.
-
-1. Install the [Checkstyle
-   plugin](https://marketplace.eclipse.org/content/checkstyle-plug).
-2. Configure the Checkstyle plugin by going to Preferences -> Checkstyle.
-    1. Click "New..."
-    2. Select "External Configuration File" for type
-    3. Click "Browse..." and select
-       `sdks/java/build-tools/src/main/resources/beam/checkstyle.xml`
-    4. Enter "Beam Checks" under "Name:"
-    5. Click "OK", then "OK"
-
-## Code Style
-
-Eclipse supports code styles within the IDE. Use one or both of the following
-to ensure your code style matches the project's checkstyle enforcement.
-
-1. The simplest way to have uniform code style is to use the [Google
-   Java Format plugin](https://github.com/google/google-java-format#eclipse)
-2. You can also configure Eclipse to use `beam-codestyle.xml`
-    1. Go to Preferences -> Java -> Code Style -> Formatter
-    2. Click "Import..." and select
-       `sdks/java/build-tools/src/main/resources/beam/beam-codestyle.xml`
-    3. Click "Apply" and "OK"
-
diff --git a/website/src/contribute/index.md b/website/src/contribute/index.md
index 9390e65..230c3df 100644
--- a/website/src/contribute/index.md
+++ b/website/src/contribute/index.md
@@ -273,12 +273,10 @@
 
 ## Contributing to the website
 
-The Beam website is in the [Beam Site GitHub
-mirror](https://github.com/apache/beam-site) repository in the `asf-site`
-branch (_not_ `master`).  The
-[README](https://github.com/apache/beam-site/blob/asf-site/README.md) there
-explains how to modify different parts of the site. The GitHub workflow is the
-same - make your change and open a pull request.
+The Beam website is in the `/website` directory of the repo. The
+[README](https://github.com/apache/beam/blob/master/website) there explains how
+to modify different parts of the site. The GitHub workflow is the same - make
+your change and open a pull request.
 
 Issues are tracked in the
 [website](https://issues.apache.org/jira/issues/?jql=project%20%3D%20BEAM%20AND%20component%20%3D%20website)
@@ -350,7 +348,7 @@
  - running Performance Tests on runners other than Dataflow and Direct
  - improving existing Performance Testing Framework and it's documentation
 
-See the [documentation](https://beam.apache.org/documentation/io/testing/#i-o-transform-integration-tests) and the [initial proposal](https://docs.google.com/document/d/1dA-5s6OHiP_cz-NRAbwapoKF5MEC1wKps4A5tFbIPKE/edit?usp=sharing)(for file based tests).
+See the [documentation]({{ site.baseurl }}/documentation/io/testing/#i-o-transform-integration-tests) and the [initial proposal](https://docs.google.com/document/d/1dA-5s6OHiP_cz-NRAbwapoKF5MEC1wKps4A5tFbIPKE/edit?usp=sharing)(for file based tests).
 
 If you're willing to help in this area, tag the following people in PRs: [@chamikaramj](https://github.com/chamikaramj), [@DariuszAniszewski](https://github.com/dariuszaniszewski), [@lgajowy](https://github.com/lgajowy), [@szewi](https://github.com/szewi), [@kkucharc](https://github.com/kkucharc)
 
diff --git a/website/src/contribute/intellij.md b/website/src/contribute/intellij.md
deleted file mode 100644
index e30911e..0000000
--- a/website/src/contribute/intellij.md
+++ /dev/null
@@ -1,94 +0,0 @@
----
-layout: section
-title: "Beam IntelliJ Tips"
-permalink: /contribute/intellij/
-section_menu: section-menu/contribute.html
----
-<!--
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-# IntelliJ Tips
-
-> These are best-effort community-contributed tips, and are not guaranteed to work with any particular IntelliJ setup. 
-
-## Create a working Gradle module
-
-(as of Intellij 2018.1.6)
-
-1. Create an empty IntelliJ project outside of the Beam source tree.
-2. Under Project Structure > Project, select a Project SDK.
-3. Under Project Structure > Modules, click the + sign to add a module and
-   select "Import Module".
-    1. Select the directory containing the Beam source tree.
-    2. Tick the "Import module from external model" button and select Gradle
-       from the list.
-    3. Tick the following boxes.
-       * Create separate module per source set
-       * Use default gradle wrapper
-4. Delegate build actions to Gradle by going to Preferences/Settings > Build, Execution,
-   Deployment > Build Tools > Gradle > Runner and checking "Delegate IDE build/run
-   actions to gradle".
-
-This should result in a working Gradle project. Build the project by executing
-the "build" task in the root Gradle module.
-
-## Checkstyle
-
-IntelliJ supports checkstyle within the IDE using the Checkstyle-IDEA plugin.
-
-Note: Older versions of IntelliJ may not support the Checkstyle file used by Beam.
-
-1. Install the "Checkstyle-IDEA" plugin from the IntelliJ plugin repository
-2. Configure the plugin by going to Settings -> Other Settings -> Checkstyle
-3. Set Checkstyle version to the same as in `/build_rules.gradle` (e.g. 8.7)
-4. Set the "Scan Scope" to "Only Java sources (including tests)"
-5. In the "Configuration File" pane, add a new configuration using the plus icon:
-    1. Set the "Description" to "Beam"
-    2. Select "Use a local Checkstyle file", and point it to
-      `sdks/java/build-tools/src/main/resources/beam/checkstyle.xml` within
-      your repository
-    3. Check the box for "Store relative to project location", and click
-      "Next"
-    4. Configure the `checkstyle.suppressions.file` property value to
-      `suppressions.xml`, and click "Next", then "Finish"
-6. Select "Beam" as the only active configuration file, and click "Apply" and
-   "OK"
-7. Checkstyle will now give warnings in the editor for any Checkstyle
-   violations
-
-You can also scan an entire module by opening the Checkstyle tools window and
-clicking the "Check Module" button. The scan should report no errors.
-
-Note: Selecting "Check Project" may report some errors from the archetype
-modules as they are not configured for Checkstyle validation.
-
-## Code Style
-
-Note: As of release 2.6.0 uniform formatting for Java and Groovy code is automated by the build
-through the [Spotless Gradle plugin](https://github.com/diffplug/spotless/tree/master/plugin-gradle).
-Instead of relying on the IDE, now you can run `./gradlew spotlessApply`
-to reformat changes prior to commit.
-
-IntelliJ supports code styles within the IDE. Use one or both of the following
-to ensure your code style matches the project's checkstyle enforcements.
-
-1. The simplest way to have uniform code style is to use the
-   [Google Java Format
-   plugin](https://plugins.jetbrains.com/plugin/8527-google-java-format)
-2. You can also configure IntelliJ to use `beam-codestyle.xml`
-    1. Go to Settings -> Code Style -> Java
-    2. Click the cogwheel icon next to 'Scheme' and select Import Scheme -> Eclipse XML Profile
-    3. Select `sdks/java/build-tools/src/main/resources/beam/beam-codestyle.xml`
-    4. Click "OK"
-    5. Click "Apply" and "OK"
diff --git a/website/src/contribute/postcommits-policies.md b/website/src/contribute/postcommits-policies.md
index b4e6d35..8e6af72 100644
--- a/website/src/contribute/postcommits-policies.md
+++ b/website/src/contribute/postcommits-policies.md
@@ -49,8 +49,7 @@
 
 ### I found a test failure {#found-failing-test}
 
-1.  Create a [JIRA issue](https://issues.apache.org/jira/issues/?jql=project%20%3D%20BEAM%20AND%20component%20%3D%20test-failures)
-    and assign it to yourself.
+1.  Create a [JIRA issue](https://s.apache.org/beam-test-failure) and assign it to yourself.
 1.  Do high level triage of the failure.
 1.  [Assign the JIRA issue to a relevant person]({{ site.baseurl }}/contribute/postcommits-guides/index.html#find_specialist).
 
diff --git a/website/src/contribute/ptransform-style-guide.md b/website/src/contribute/ptransform-style-guide.md
index da04baf..4cdcb7b 100644
--- a/website/src/contribute/ptransform-style-guide.md
+++ b/website/src/contribute/ptransform-style-guide.md
@@ -202,8 +202,8 @@
 Do:
 
 * Generally, follow the rules of [semantic versioning](http://semver.org/).
-* If the API of the transform is not yet stable, annotate it as `@Experimental` (Java) or `@experimental` ([Python]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.utils.annotations.html)).
-* If the API deprecated, annotate it as `@Deprecated` (Java) or `@deprecated` ([Python]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.utils.annotations.html)).
+* If the API of the transform is not yet stable, annotate it as `@Experimental` (Java) or `@experimental` ([Python](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.utils.annotations.html)).
+* If the API deprecated, annotate it as `@Deprecated` (Java) or `@deprecated` ([Python](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.utils.annotations.html)).
 * Pay attention to the stability and versioning of third-party classes exposed by the transform's API: if they are unstable or improperly versioned (do not obey [semantic versioning](http://semver.org/)), it is better to wrap them in your own classes.
 
 Do not:
diff --git a/website/src/contribute/release-guide.md b/website/src/contribute/release-guide.md
index cddad39..c184a72 100644
--- a/website/src/contribute/release-guide.md
+++ b/website/src/contribute/release-guide.md
@@ -167,8 +167,24 @@
 
 #### Website development setup
 
-Prepare to update the Beam website by following the [website development
-instructions]({{ site.baseurl }}/contribute/website-contributions/).
+Updating the Beam website requires submitting PRs to both the main `apache/beam`
+repo and the `apache/beam-site` repo. The first contains reference manuals
+generated from SDK code, while the second updates the current release version
+number.
+
+You should already have setup a local clone of `apache/beam`. Setting up a clone
+of `apache/beam-site` is similar:
+
+    $ git clone -b release-docs https://github.com/apache/beam-site.git
+    $ cd beam-site
+    $ git remote add <GitHub_user> git@github.com:<GitHub_user>/beam-site.git
+    $ git fetch --all
+    $ git checkout -b <my-branch> origin/release-docs
+
+Further instructions on website development on `apache/beam` is
+[here](https://github.com/apache/beam/blob/master/website). Background
+information about how the website is updated can be found in [Beam-Site
+Automation Reliability](https://s.apache.org/beam-site-automation).
 
 #### Register to PyPI
 
@@ -180,8 +196,9 @@
 
 __Attention__: Only PMC has permission to perform this. If you are not a PMC, please ask for help in dev@ mailing list.
 
-1. In JIRA, navigate to the [`Beam > Administration > Versions`](https://issues.apache.org/jira/plugins/servlet/project-config/BEAM/versions).
-1. Add a new release: choose the next minor version number compared to the one currently underway, select today’s date as the `Start Date`, and choose `Add`.
+1. In JIRA, navigate to [`Beam > Administration > Versions`](https://issues.apache.org/jira/plugins/servlet/project-config/BEAM/versions).
+1. Add a new release. Choose the next minor version number after the version currently underway, select the release cut date (today’s date) as the `Start Date`, and choose `Add`.
+1. At the end of the release, go to the same page and mark the recently released version as released. Use the `...` menu and choose `Release`.
 
 ### Triage release-blocking issues in JIRA
 
@@ -449,15 +466,15 @@
      When prompted for a description, enter “Apache Beam, version X, release candidate Y”.
   1. Stage source release into dist.apache.org dev [repo](https://dist.apache.org/repos/dist/dev/beam/).
   1. Stage,sign and hash python binaries into dist.apache.ord dev repo python dir
-  1. Create a PR to update beam-site, changes includes:
+  1. Create a PR to update beam and beam-site, changes includes:
      * Copy python doc into beam-site
      * Copy java doc into beam-site
-     * Update release version into [_config.yml](https://github.com/apache/beam-site/blob/asf-site/_config.yml).
+     * Update release version into [_config.yml](https://github.com/apache/beam/blob/master/website/_config.yml).
      
 * Tasks you need to do manually
-  1. Add new release into src/get-started/downloads.md
-  1. Update last release download links in src/get-started/downloads.md
-  1. Update the Pydoc link on this page to point to the new version (in src/documentation/sdks/pydoc/current.md.
+  1. Add new release into `website/src/get-started/downloads.md`.
+  1. Update last release download links in `website/src/get-started/downloads.md`.
+  1. Update `website/src/.htaccess` to redirect to the new version.
 
 ### Run all steps manually
 
@@ -547,7 +564,6 @@
 
 ```
 pip install tox
-
 ```
 Create the Python SDK documentation using sphinx by running a helper script.
 ```
@@ -555,33 +571,41 @@
 ```
 By default the Pydoc is generated in `sdks/python/target/docs/_build`. Let `${PYDOC_ROOT}` be the absolute path to `_build`.
 
-#### Propose a pull request for website updates
+#### Propose pull requests for website updates
 
-The final step of building the candidate is to propose a website pull request.
+Beam publishes API reference manuals for each release on the website. For Java
+and Python SDKs, that’s Javadoc and PyDoc, respectively. The final step of
+building the candidate is to propose website pull requests that update these
+manuals.
 
-Start by updating `release_latest` version flag in the top-level `_config.yml`, and list the new release in the [Apache Beam Downloads]({{ site.baseurl }}/get-started/downloads/), linking to the source code download and the Release Notes in JIRA.
+Merge the pull requests only after finalizing the release. To avoid invalid
+redirects for the 'current' version, merge these PRs in the order listed. Once
+the PR is merged, the new contents will get picked up automatically and served
+to the Beam website, usually within an hour.
 
-Beam publishes API reference manual for each release on the website. For Java SDK, that’s Javadoc.
+**PR 1: apache/beam-site**
 
-One of the artifacts created in the release contains the Javadoc for the
-website. To update the website, you must unpack this jar file from the release
-candidate into the source tree of the website.
+This pull request is against the `apache/beam-site` repo, on the `release-docs`
+branch.
 
-Add the new Javadoc to [SDK API Reference page]({{ site.baseurl }}/documentation/sdks/javadoc/) page, as follows:
+* Add the new Javadoc to [SDK API Reference page](https://beam.apache.org/releases/javadoc/) page, as follows:
+  * Unpack the Maven artifact `org.apache.beam:beam-sdks-java-javadoc` into some temporary location. Call this `${JAVADOC_TMP}`.
+  * Copy the generated Javadoc into the website repository: `cp -r ${JAVADOC_TMP} javadoc/${RELEASE}`.
+* Add the new Pydoc to [SDK API Reference page](https://beam.apache.org/releases/pydoc/) page, as follows:
+  * Copy the generated Pydoc into the website repository: `cp -r ${PYDOC_ROOT} pydoc/${RELEASE}`.
+  * Remove `.doctrees` directory.
+* Stage files using: `git add --all javadoc/ pydoc/`.
 
-* Unpack the Maven artifact `org.apache.beam:beam-sdks-java-javadoc` into some temporary location. Call this `${JAVADOC_TMP}`.
-* Copy the generated Javadoc into the website repository: `cp -r ${JAVADOC_TMP} src/documentation/sdks/javadoc/${RELEASE}`.
-* Set up the necessary git commands to account for the new and deleted files from the javadoc.
-* Update the Javadoc link on this page to point to the new version (in `src/documentation/sdks/javadoc/current.md`).
+**PR 2: apache/beam**
 
-##### Create Pydoc
-Add the new Pydoc to [SDK API Reference page]({{ site.baseurl }}/documentation/sdks/pydoc/) page, as follows:
+This pull request is against the `apache/beam` repo, on the `master` branch.
 
-* Copy the generated Pydoc into the website repository: `cp -r ${PYDOC_ROOT} src/documentation/sdks/pydoc/${RELEASE}`.
-* Remove `.doctrees` directory.
-* Update the Pydoc link on this page to point to the new version (in `src/documentation/sdks/pydoc/current.md`).
-
-Finally, propose a pull request with these changes. (Don’t merge before finalizing the release.)
+* Update the `release_latest` version flag in `/website/_config.yml`, and list
+  the new release in `/website/src/get-started/downloads.md`, linking to the
+  source code download and the Release Notes in JIRA.
+* Update the `RedirectMatch` rule in
+  [/website/src/.htaccess](https://github.com/apache/beam/blob/master/website/src/.htaccess)
+  to point to the new release. See file history for examples.
 
 #### Build and stage python wheels
 
@@ -595,7 +619,7 @@
 
 1. Maven artifacts deployed to the staging repository of [repository.apache.org](https://repository.apache.org/content/repositories/)
 1. Source distribution deployed to the dev repository of [dist.apache.org](https://dist.apache.org/repos/dist/dev/beam/)
-1. Website pull request proposed to list the [release]({{ site.baseurl }}/get-started/downloads/), publish the [Java API reference manual]({{ site.baseurl }}/documentation/sdks/javadoc/), and publish the [Python API reference manual]({{ site.baseurl }}/documentation/sdks/pydoc/).
+1. Website pull request proposed to list the [release]({{ site.baseurl }}/get-started/downloads/), publish the [Java API reference manual](https://beam.apache.org/releases/javadoc/), and publish the [Python API reference manual](https://beam.apache.org/releases/pydoc/).
 
 You can (optionally) also do additional verification by:
 1. Check that Python zip file contains the `README.md`, `NOTICE`, and `LICENSE` files.
@@ -958,7 +982,7 @@
 
 ### Merge website pull request
 
-Merge the website pull request to [list the release]({{ site.baseurl }}/get-started/downloads/), publish the [Python API reference manual]({{ site.baseurl }}/documentation/sdks/pydoc/), and the [Java API reference manual]({{ site.baseurl }}/documentation/sdks/javadoc/) created earlier.
+Merge the website pull request to [list the release]({{ site.baseurl }}/get-started/downloads/), publish the [Python API reference manual](https://beam.apache.org/releases/pydoc/), and the [Java API reference manual](https://beam.apache.org/releases/javadoc/) created earlier.
 
 ### Mark the version as released in JIRA
 
@@ -973,7 +997,7 @@
 * Maven artifacts released and indexed in the [Maven Central Repository](https://search.maven.org/#search%7Cga%7C1%7Cg%3A%22org.apache.beam%22)
 * Source distribution available in the release repository of [dist.apache.org](https://dist.apache.org/repos/dist/release/beam/)
 * Source distribution removed from the dev repository of [dist.apache.org](https://dist.apache.org/repos/dist/dev/beam/)
-* Website pull request to [list the release]({{ site.baseurl }}/get-started/downloads/) and publish the [API reference manual]({{ site.baseurl }}/documentation/sdks/javadoc/) merged
+* Website pull request to [list the release]({{ site.baseurl }}/get-started/downloads/) and publish the [API reference manual](https://beam.apache.org/releases/javadoc/) merged
 * Release tagged in the source code repository
 * Release version finalized in JIRA. (Note: Not all committers have administrator access to JIRA. If you end up getting permissions errors ask on the mailing list for assistance.)
 * Release version is listed at reporter.apache.org
diff --git a/website/src/contribute/runner-guide.md b/website/src/contribute/runner-guide.md
index 212ff04..bd70899 100644
--- a/website/src/contribute/runner-guide.md
+++ b/website/src/contribute/runner-guide.md
@@ -340,7 +340,7 @@
 
 **Python**
 
-See the [DoFnRunner pydoc](https://beam.apache.org/documentation/sdks/pydoc/2.0.0/apache_beam.runners.html#apache_beam.runners.common.DoFnRunner).
+See the [DoFnRunner pydoc](https://beam.apache.org/releases/pydoc/2.0.0/apache_beam.runners.html#apache_beam.runners.common.DoFnRunner).
 
 #### Side Inputs
 
@@ -387,7 +387,7 @@
 
 **Python**
 
-In Python, [`SideInputMap`](https://beam.apache.org/documentation/sdks/pydoc/2.0.0/apache_beam.transforms.html#apache_beam.transforms.sideinputs.SideInputMap) maps
+In Python, [`SideInputMap`](https://beam.apache.org/releases/pydoc/2.0.0/apache_beam.transforms.html#apache_beam.transforms.sideinputs.SideInputMap) maps
 windows to side input values. The `WindowMappingFn` manifests as a simple
 function. See
 [sideinputs.py](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/sideinputs.py).
@@ -443,9 +443,9 @@
 The elements you are processing will be key-value pairs, and you'll need to extract
 the keys. For this reason, the format of key-value pairs is standardized and
 shared across all SDKS. See either
-[`KvCoder`](https://beam.apache.org/documentation/sdks/javadoc/2.0.0/org/apache/beam/sdk/coders/KvCoder.html)
+[`KvCoder`](https://beam.apache.org/releases/javadoc/2.0.0/org/apache/beam/sdk/coders/KvCoder.html)
 in Java or
-[`TupleCoder`](https://beam.apache.org/documentation/sdks/pydoc/2.0.0/apache_beam.coders.html#apache_beam.coders.coders.TupleCoder.key_coder)
+[`TupleCoder`](https://beam.apache.org/releases/pydoc/2.0.0/apache_beam.coders.html#apache_beam.coders.coders.TupleCoder.key_coder)
 in Python for documentation on the binary format.
 
 #### Window Merging
@@ -566,7 +566,7 @@
  * timestamps to associate with each element read
  * `splitAtFraction` for dynamic splitting to enable work stealing, and other
    methods to support it - see the [Beam blog post on dynamic work
-   rebalancing](https://beam.apache.org/blog/2016/05/18/splitAtFraction-method.html)
+   rebalancing]({{ site.baseurl }}/blog/2016/05/18/splitAtFraction-method.html)
 
 The `BoundedSource` does not report a watermark currently. Most of the time, reading
 from a bounded source can be parallelized in ways that result in utterly out-of-order
@@ -610,9 +610,9 @@
 that builds a job specification as it walks the graph of `PTransforms`.
 
 The entry point for this in Java is
-[`Pipeline.traverseTopologically`](https://beam.apache.org/documentation/sdks/javadoc/2.0.0/org/apache/beam/sdk/Pipeline.html#traverseTopologically-org.apache.beam.sdk.Pipeline.PipelineVisitor-)
+[`Pipeline.traverseTopologically`](https://beam.apache.org/releases/javadoc/2.0.0/org/apache/beam/sdk/Pipeline.html#traverseTopologically-org.apache.beam.sdk.Pipeline.PipelineVisitor-)
 and
-[`Pipeline.visit`](https://beam.apache.org/documentation/sdks/pydoc/2.0.0/apache_beam.html#apache_beam.pipeline.Pipeline.visit)
+[`Pipeline.visit`](https://beam.apache.org/releases/pydoc/2.0.0/apache_beam.html#apache_beam.pipeline.Pipeline.visit)
 in Python. See the generated documentation for details.
 
 ### Altering a pipeline
@@ -634,7 +634,7 @@
 of work. In Python, support code is still under development.
 
 All pipeline alteration is done via
-[`Pipeline.replaceAll(PTransformOverride)`](https://beam.apache.org/documentation/sdks/javadoc/2.0.0/org/apache/beam/sdk/Pipeline.html#replaceAll-java.util.List-)
+[`Pipeline.replaceAll(PTransformOverride)`](https://beam.apache.org/releases/javadoc/2.0.0/org/apache/beam/sdk/Pipeline.html#replaceAll-java.util.List-)
 method. A
 [`PTransformOverride`](https://github.com/apache/beam/blob/master/sdks/java/core/src/main/java/org/apache/beam/sdk/runners/PTransformOverride.java)
 is a pair of a
@@ -682,7 +682,7 @@
 #### Allowing users to pass options to your runner
 
 The mechanism for configuration is
-[`PipelineOptions`](https://beam.apache.org/documentation/sdks/javadoc/2.0.0/org/apache/beam/sdk/options/PipelineOptions.html),
+[`PipelineOptions`](https://beam.apache.org/releases/javadoc/2.0.0/org/apache/beam/sdk/options/PipelineOptions.html),
 an interface that works completely differently than normal Java objects. Forget
 what you know, and follow the rules, and `PipelineOptions` will treat you well.
 
diff --git a/website/src/contribute/testing.md b/website/src/contribute/testing.md
index 2d847a7..7e21c22 100644
--- a/website/src/contribute/testing.md
+++ b/website/src/contribute/testing.md
@@ -107,13 +107,13 @@
 NeedsRunner tests:
 
 ```
-$ ./gradlew :runners:direct-java:needsRunnerTests
+$ ./gradlew beam-runners-direct-java:needsRunnerTests
 ```
 
 To run a single NeedsRunner test use the `test` property, e.g.
 
 ```
-$ ./gradlew :runners:direct-java:needsRunnerTests --tests org.apache.beam.sdk.transforms.MapElementsTest.testMapBasic
+$ ./gradlew beam-runners-direct-java:needsRunnerTests --tests org.apache.beam.sdk.transforms.MapElementsTest.testMapBasic
 ```
 
 will run the `MapElementsTest.testMapBasic()` test.
@@ -123,7 +123,7 @@
 command:
 
 ```
-$ ./gradlew sdks:java:io:google-cloud-platform:test --tests org.apache.beam.sdk.io.gcp.spanner.SpannerIOWriteTest
+$ ./gradlew beam-sdks-java-io-google-cloud-platform:test --tests org.apache.beam.sdk.io.gcp.spanner.SpannerIOWriteTest
 ```
 
 ### ValidatesRunner
diff --git a/website/src/contribute/website-contributions.md b/website/src/contribute/website-contributions.md
deleted file mode 100644
index 7ec2458..0000000
--- a/website/src/contribute/website-contributions.md
+++ /dev/null
@@ -1,84 +0,0 @@
----
-layout: section
-title: "Beam Website Contributions"
-permalink: /contribute/website-contributions/
-section_menu: section-menu/contribute.html
----
-<!--
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-
-# Website Contributions
-
-Contributions to the website require the same steps as contributions to the
-main repository, so start with the instructions for [Contributing
-Code]({{ site.baseurl }}/contribute/contribution-guide).
-
-The Beam website is in the [Beam Site GitHub
-mirror](https://github.com/apache/beam-site) repository in the `asf-site`
-branch (_not_ `master`).
-
-Issues are tracked in the
-[website](https://issues.apache.org/jira/issues/?jql=project%20%3D%20BEAM%20AND%20component%20%3D%20website)
-component in JIRA.
-
-## One-time Setup
-
-The [README file](https://github.com/apache/beam-site/blob/asf-site/README.md)
-in the website repository has more information on how to set up the required
-dependencies for your development environment.
-
-The general guidelines for cloning a repository can be adjusted to use the
-`asf-site` branch of `beam-site`:
-
-	$ git clone -b asf-site https://github.com/apache/beam-site.git
-	$ cd beam-site
-	$ git remote add <GitHub_user> git@github.com:<GitHub_user>/beam-site.git
-	$ git fetch --all
-	$ git checkout -b <my-branch> origin/asf-site
-
-## Working on your change
-
-While you are working on your pull request, you can test and develop live by
-running the following command in the root folder of the website:
-
-	$ bundle exec jekyll serve --incremental
-
-Jekyll will start a webserver on port 4000. As you make changes to the content,
-Jekyll will rebuild it automatically.
-
-In addition, you can run the tests to valid your links using:
-
-	$ bundle exec rake test
-
-Both of these commands will cause the `content/` directory to be generated.
-Merging autogenerated content can get tricky, so please leave this directory
-out of your commits and pull request by doing:
-
-	$ git checkout -- content
-
-When you are ready, submit a pull request using the [Beam Site GitHub
-mirror](https://github.com/apache/beam-site), including the JIRA issue as
-usual.
-
-During review, committers will patch in your PR, generate the static
-`content/`, and review the changes.
-
-## Committing website changes (committers only)
-
-We have a bot that runs tests and merges changes to the website.
-When the pull request is fully ready to be merged, a committer
-can comment
-
-    @asfgit merge
-
diff --git a/website/src/documentation/dsls/sql/create-table.md b/website/src/documentation/dsls/sql/create-external-table.md
similarity index 84%
rename from website/src/documentation/dsls/sql/create-table.md
rename to website/src/documentation/dsls/sql/create-external-table.md
index cfa1d2d..a6f6e32 100644
--- a/website/src/documentation/dsls/sql/create-table.md
+++ b/website/src/documentation/dsls/sql/create-external-table.md
@@ -1,9 +1,11 @@
 ---
 layout: section
-title: "Beam SQL: CREATE TABLE Statement"
+title: "Beam SQL: CREATE EXTERNAL TABLE Statement"
 section_menu: section-menu/sdks.html
-permalink: /documentation/dsls/sql/create-table/
-redirect_from: /documentation/dsls/sql/statements/create-table/
+permalink: /documentation/dsls/sql/create-external-table/
+redirect_from:
+  - /documentation/dsls/sql/statements/create-table/
+  - /documentation/dsls/sql/create-table/
 ---
 <!--
 Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,20 +21,20 @@
 limitations under the License.
 -->
 
-# CREATE TABLE
+# CREATE EXTERNAL TABLE
 
-Beam SQL's `CREATE TABLE` statement registers a virtual table that maps to an
-[external storage system](https://beam.apache.org/documentation/io/built-in/).
-For some storage systems, `CREATE TABLE` does not create a physical table until
+Beam SQL's `CREATE EXTERNAL TABLE` statement registers a virtual table that maps to an
+[external storage system]({{ site.baseurl }}/documentation/io/built-in/).
+For some storage systems, `CREATE EXTERNAL TABLE` does not create a physical table until
 a write occurs. After the physical table exists, you can access the table with
 the `SELECT`, `JOIN`, and `INSERT INTO` statements.
 
-The `CREATE TABLE` statement includes a schema and extended clauses.
+The `CREATE EXTERNAL TABLE` statement includes a schema and extended clauses.
 
 ## Syntax
 
 ```
-CREATE TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
+CREATE EXTERNAL TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
 TYPE type
 [LOCATION location]
 [TBLPROPERTIES tblProperties]
@@ -48,7 +50,7 @@
     ignores the statement instead of returning an error.
 *   `tableName`: The case sensitive name of the table to create and register,
     specified as an
-    [Identifier](https://beam.apache.org/documentation/dsls/sql/lexical/#identifiers).
+    [Identifier]({{ site.baseurl }}/documentation/dsls/sql/lexical/#identifiers).
     The table name does not need to match the name in the underlying data
     storage system.
 *   `tableElement`: `columnName` `fieldType` `[ NOT NULL ]`
@@ -63,7 +65,7 @@
         *   `ROW<tableElement [, tableElement ]*>`
     *   `NOT NULL`: Optional. Indicates that the column is not nullable.
 *   `type`: The I/O transform that backs the virtual table, specified as an
-    [Identifier](https://beam.apache.org/documentation/dsls/sql/lexical/#identifiers)
+    [Identifier]({{ site.baseurl }}/documentation/dsls/sql/lexical/#identifiers)
     with one of the following values:
     *   `bigquery`
     *   `pubsub`
@@ -71,11 +73,11 @@
     *   `text`
 *   `location`: The I/O specific location of the underlying table, specified as
     a [String
-    Literal](https://beam.apache.org/documentation/dsls/sql/lexical/#string-literals).
+    Literal]({{ site.baseurl }}/documentation/dsls/sql/lexical/#string-literals).
     See the I/O specific sections for `location` format requirements.
 *   `tblProperties`: The I/O specific quoted key value JSON object with extra
     configuration, specified as a [String
-    Literal](https://beam.apache.org/documentation/dsls/sql/lexical/#string-literals).
+    Literal]({{ site.baseurl }}/documentation/dsls/sql/lexical/#string-literals).
     See the I/O specific sections for `tblProperties` format requirements.
 
 ## BigQuery
@@ -83,7 +85,7 @@
 ### Syntax
 
 ```
-CREATE TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
+CREATE EXTERNAL TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
 TYPE bigquery
 LOCATION '[PROJECT_ID]:[DATASET].[TABLE]'
 ```
@@ -183,7 +185,7 @@
 ### Example
 
 ```
-CREATE TABLE users (id INTEGER, username VARCHAR)
+CREATE EXTERNAL TABLE users (id INTEGER, username VARCHAR)
 TYPE bigquery
 LOCATION 'testing-integration:apache.users'
 ```
@@ -193,7 +195,7 @@
 ### Syntax
 
 ```
-CREATE TABLE [ IF NOT EXISTS ] tableName
+CREATE EXTERNAL TABLE [ IF NOT EXISTS ] tableName
   (
    event_timestamp TIMESTAMP,
    attributes MAP<VARCHAR, VARCHAR>,
@@ -212,7 +214,7 @@
         The attribute key is configured by the `timestampAttributeKey` field of
         the `tblProperties` blob. The value of the attribute should conform to
         the [requirements of
-        PubsubIO](https://beam.apache.org/documentation/sdks/javadoc/2.4.0/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.Read.html#withTimestampAttribute-java.lang.String-),
+        PubsubIO](https://beam.apache.org/releases/javadoc/2.4.0/org/apache/beam/sdk/io/gcp/pubsub/PubsubIO.Read.html#withTimestampAttribute-java.lang.String-),
         which is either millis since Unix epoch or [RFC 339
         ](https://www.ietf.org/rfc/rfc3339.txt)date string.
 *   `attributes`: The user-provided attributes map from the Pub/Sub message;
@@ -263,7 +265,7 @@
 ### Example
 
 ```
-CREATE TABLE locations (event_timestamp TIMESTAMP, attributes MAP<VARCHAR, VARCHAR>, payload ROW<id INTEGER, location VARCHAR>)
+CREATE EXTERNAL TABLE locations (event_timestamp TIMESTAMP, attributes MAP<VARCHAR, VARCHAR>, payload ROW<id INTEGER, location VARCHAR>)
 TYPE pubsub
 LOCATION 'projects/testing-integration/topics/user-location'
 ```
@@ -275,7 +277,7 @@
 ### Syntax
 
 ```
-CREATE TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
+CREATE EXTERNAL TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
 TYPE kafka
 LOCATION 'kafka://localhost:2181/brokers'
 TBLPROPERTIES '{"bootstrap.servers":"localhost:9092", "topics": ["topic1", "topic2"]}'
@@ -313,7 +315,7 @@
 ### Syntax
 
 ```
-CREATE TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
+CREATE EXTERNAL TABLE [ IF NOT EXISTS ] tableName (tableElement [, tableElement ]*)
 TYPE text
 LOCATION '/home/admin/orders'
 TBLPROPERTIES '{"format: "Excel"}'
@@ -345,7 +347,7 @@
 ### Example
 
 ```
-CREATE TABLE orders (id INTEGER, price INTEGER)
+CREATE EXTERNAL TABLE orders (id INTEGER, price INTEGER)
 TYPE text
 LOCATION '/home/admin/orders'
 ```
diff --git a/website/src/documentation/dsls/sql/overview.md b/website/src/documentation/dsls/sql/overview.md
index 7063b16..6be9e43 100644
--- a/website/src/documentation/dsls/sql/overview.md
+++ b/website/src/documentation/dsls/sql/overview.md
@@ -32,9 +32,9 @@
    basic dialect underlying Beam SQL. We have added additional extensions to
    make it easy to leverage Beam's unified batch/streaming model and support
    for complex data types.
- - [SqlTransform]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/extensions/sql/SqlTransform.html): 
+ - [SqlTransform](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/extensions/sql/SqlTransform.html): 
    the interface for creating `PTransforms` from SQL queries.
- - [Row]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/values/Row.html):
+ - [Row](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/values/Row.html):
    the type of elements that Beam SQL operates on. A `PCollection<Row>` plays the role of a table.
 
 The [SQL pipeline walkthrough]({{ site.baseurl
diff --git a/website/src/documentation/dsls/sql/select.md b/website/src/documentation/dsls/sql/select.md
index 24bd728..f3a135f 100644
--- a/website/src/documentation/dsls/sql/select.md
+++ b/website/src/documentation/dsls/sql/select.md
@@ -32,59 +32,684 @@
  - [Joins]({{ site.baseurl}}/documentation/dsls/sql/joins)
  - [Windowing & Triggering]({{ site.baseurl}}/documentation/dsls/sql/windowing-and-triggering/)
 
-Below is a curated grammar of the supported syntax in Beam SQL
+Query statements scan one or more tables or expressions and return the computed
+result rows. This topic describes the syntax for SQL queries in Beam.
+
+## SQL Syntax
+
+    query_statement:
+        [ WITH with_query_name AS ( query_expr ) [, ...] ]
+        query_expr
+
+    query_expr:
+        { select | ( query_expr ) | query_expr set_op query_expr }
+        [ LIMIT count [ OFFSET skip_rows ] ]
+
+    select:
+        SELECT  [{ ALL | DISTINCT }]
+            { [ expression. ]* [ EXCEPT ( column_name [, ...] ) ]
+                [ REPLACE ( expression [ AS ] column_name [, ...] ) ]
+            | expression [ [ AS ] alias ] } [, ...]
+        [ FROM from_item  [, ...] ]
+        [ WHERE bool_expression ]
+        [ GROUP BY { expression [, ...] | ROLLUP ( expression [, ...] ) } ]
+        [ HAVING bool_expression ]
+
+    set_op:
+        UNION { ALL | DISTINCT } | INTERSECT DISTINCT | EXCEPT DISTINCT
+
+    from_item: {
+        table_name [ [ AS ] alias ] |
+        join |
+        ( query_expr ) [ [ AS ] alias ]
+        with_query_name [ [ AS ] alias ]
+    }
+
+    join:
+        from_item [ join_type ] JOIN from_item
+        [ { ON bool_expression | USING ( join_column [, ...] ) } ]
+
+    join_type:
+        { INNER | CROSS | FULL [OUTER] | LEFT [OUTER] | RIGHT [OUTER] }
+
+Notation:
+
+-   Square brackets "\[ \]" indicate optional clauses.
+-   Parentheses "( )" indicate literal parentheses.
+-   The vertical bar "|" indicates a logical OR.
+-   Curly braces "{ }" enclose a set of options.
+-   A comma followed by an ellipsis within square brackets "\[, ... \]"
+    indicates that the preceding item can repeat in a comma-separated list.
+
+## SELECT list
+
+Syntax:
+
+    SELECT  [{ ALL | DISTINCT }]
+        { [ expression. ]*
+        | expression [ [ AS ] alias ] } [, ...]
+
+The `SELECT` list defines the columns that the query will return. Expressions in
+the `SELECT` list can refer to columns in any of the `from_item`s in its
+corresponding `FROM` clause.
+
+Each item in the `SELECT` list is one of:
+
+-   \*
+-   `expression`
+-   `expression.*`
+
+### SELECT \*
+
+`SELECT *`, often referred to as *select star*, produces one output column for
+each column that is visible after executing the full query.
 
 ```
-query:
-	{
-          select
-      |   query UNION [ ALL ] query
-      |   query MINUS [ ALL ] query
-      |   query INTERSECT [ ALL ] query
-	}
-    [ ORDER BY orderItem [, orderItem ]* LIMIT count [OFFSET offset] ]
+SELECT * FROM (SELECT 'apple' AS fruit, 'carrot' AS vegetable);
 
-orderItem:
-      expression [ ASC | DESC ]
-
-select:
-      SELECT
-          { * | projectItem [, projectItem ]* }
-      FROM tableExpression
-      [ WHERE booleanExpression ]
-      [ GROUP BY { groupItem [, groupItem ]* } ]
-      [ HAVING booleanExpression ]
-
-projectItem:
-      expression [ [ AS ] columnAlias ]
-  |   tableAlias . *
-
-tableExpression:
-      tableReference [, tableReference ]*
-  |   tableExpression [ ( LEFT | RIGHT ) [ OUTER ] ] JOIN tableExpression [ joinCondition ]
-
-booleanExpression:
-    expression [ IS NULL | IS NOT NULL ]
-  | expression [ > | >= | = | < | <= | <> ] expression
-  | booleanExpression [ AND | OR ] booleanExpression
-  | NOT booleanExpression
-  | '(' booleanExpression ')'
-
-joinCondition:
-      ON booleanExpression
-
-tableReference:
-      tableName [ [ AS ] alias ]
-
-values:
-      VALUES expression [, expression ]*
-
-groupItem:
-      expression
-  |   '(' expression [, expression ]* ')'
-  |   HOP '(' expression [, expression ]* ')'
-  |   TUMBLE '(' expression [, expression ]* ')'
-  |   SESSION '(' expression [, expression ]* ')'
-
++-------+-----------+
+| fruit | vegetable |
++-------+-----------+
+| apple | carrot    |
++-------+-----------+
 ```
 
+### SELECT `expression`
+
+Items in a `SELECT` list can be expressions. These expressions evaluate to a
+single value and produce one output column, with an optional explicit `alias`.
+
+If the expression does not have an explicit alias, it receives an implicit alias
+according to the rules for [implicit aliases](#implicit-aliases), if possible.
+Otherwise, the column is anonymous and you cannot refer to it by name elsewhere
+in the query.
+
+### SELECT `expression.*` {#select-expression_1}
+
+An item in a `SELECT` list can also take the form of `expression.*`. This
+produces one output column for each column or top-level field of `expression`.
+The expression must be a table alias.
+
+The following query produces one output column for each column in the table
+`groceries`, aliased as `g`.
+
+```
+WITH groceries AS
+  (SELECT 'milk' AS dairy,
+   'eggs' AS protein,
+   'bread' AS grain)
+SELECT g.*
+FROM groceries AS g;
+
++-------+---------+-------+
+| dairy | protein | grain |
++-------+---------+-------+
+| milk  | eggs    | bread |
++-------+---------+-------+
+```
+
+### SELECT modifiers
+
+You can modify the results returned from a `SELECT` query, as follows.
+
+#### SELECT DISTINCT
+
+A `SELECT DISTINCT` statement discards duplicate rows and returns only the
+remaining rows. `SELECT DISTINCT` cannot return columns of the following types:
+
+-   STRUCT
+-   ARRAY
+
+#### SELECT ALL
+
+A `SELECT ALL` statement returns all rows, including duplicate rows. `SELECT
+ALL` is the default behavior of `SELECT`.
+
+### Aliases
+
+See [Aliases](#aliases_2) for information on syntax and visibility for
+`SELECT` list aliases.
+
+## FROM clause
+
+The `FROM` clause indicates the table or tables from which to retrieve rows, and
+specifies how to join those rows together to produce a single stream of rows for
+processing in the rest of the query.
+
+### Syntax
+
+    from_item: {
+        table_name [ [ AS ] alias ] |
+        join |
+        ( query_expr ) [ [ AS ] alias ] |
+        with_query_name [ [ AS ] alias ]
+    }
+
+#### table\_name
+
+The name (optionally qualified) of an existing table.
+
+    SELECT * FROM Roster;
+    SELECT * FROM beam.Roster;
+
+#### join
+
+See [JOIN Types](#join-types) below and [Joins]({{ site.baseurl}}/documentation/dsls/sql/joins).
+
+#### select {#select_1}
+
+`( select ) [ [ AS ] alias ]` is a table [subquery](#subqueries).
+
+#### with\_query\_name
+
+The query names in a `WITH` clause (see [WITH Clause](#with-clause)) act like
+names of temporary tables that you can reference anywhere in the `FROM` clause.
+In the example below, `subQ1` and `subQ2` are `with_query_names`.
+
+Example:
+
+    WITH
+      subQ1 AS (SELECT * FROM Roster WHERE SchoolID = 52),
+      subQ2 AS (SELECT SchoolID FROM subQ1)
+    SELECT DISTINCT * FROM subQ2;
+
+The `WITH` clause hides any permanent tables with the same name for the duration
+of the query, unless you qualify the table name, e.g. `beam.Roster`.
+
+### Subqueries
+
+A subquery is a query that appears inside another statement, and is written
+inside parentheses. These are also referred to as "sub-SELECTs" or "nested
+SELECTs". The full `SELECT` syntax is valid in subqueries.
+
+There are two types of subquery:
+
+-   Expression Subqueries
+    which you can use in a query wherever expressions are valid. Expression
+    subqueries return a single value.
+-   Table subqueries, which you can use only in a `FROM` clause. The outer query
+    treats the result of the subquery as a table.
+
+Note that there must be parentheses around both types of subqueries.
+
+Example:
+
+```
+SELECT AVG ( PointsScored )
+FROM
+( SELECT PointsScored
+  FROM Stats
+  WHERE SchoolID = 77 )
+```
+
+Optionally, a table subquery can have an alias.
+
+Example:
+
+```
+SELECT r.LastName
+FROM
+( SELECT * FROM Roster) AS r;
+```
+
+### Aliases {#aliases_1}
+
+See [Aliases](#aliases_2) for information on syntax and visibility for
+`FROM` clause aliases.
+
+## JOIN types
+
+Also see [Joins]({{ site.baseurl}}/documentation/dsls/sql/joins).
+
+### Syntax {#syntax_1}
+
+    join:
+        from_item [ join_type ] JOIN from_item
+        [ ON bool_expression | USING ( join_column [, ...] ) ]
+
+    join_type:
+        { INNER | CROSS | FULL [OUTER] | LEFT [OUTER] | RIGHT [OUTER] }
+
+The `JOIN` clause merges two `from_item`s so that the `SELECT` clause can query
+them as one source. The `join_type` and `ON` or `USING` clause (a "join
+condition") specify how to combine and discard rows from the two `from_item`s to
+form a single source.
+
+All `JOIN` clauses require a `join_type`.
+
+A `JOIN` clause requires a join condition unless one of the following conditions
+is true:
+
+-   `join_type` is `CROSS`.
+-   One or both of the `from_item`s is not a table, e.g. an `array_path` or
+    `field_path`.
+
+### \[INNER\] JOIN
+
+An `INNER JOIN`, or simply `JOIN`, effectively calculates the Cartesian product
+of the two `from_item`s and discards all rows that do not meet the join
+condition. "Effectively" means that it is possible to implement an `INNER JOIN`
+without actually calculating the Cartesian product.
+
+### CROSS JOIN
+
+`CROSS JOIN` is generally not yet supported.
+
+### FULL \[OUTER\] JOIN
+
+A `FULL OUTER JOIN` (or simply `FULL JOIN`) returns all fields for all rows in
+both `from_item`s that meet the join condition.
+
+`FULL` indicates that *all rows* from both `from_item`s are returned, even if
+they do not meet the join condition. For streaming jobs, all rows that are
+not late according to default trigger and belonging to the same window
+if there's non-global window applied.
+
+`OUTER` indicates that if a given row from one `from_item` does not join to any
+row in the other `from_item`, the row will return with NULLs for all columns
+from the other `from_item`.
+
+Also see [Joins]({{ site.baseurl}}/documentation/dsls/sql/joins).
+
+### LEFT \[OUTER\] JOIN
+
+The result of a `LEFT OUTER JOIN` (or simply `LEFT JOIN`) for two `from_item`s
+always retains all rows of the left `from_item` in the `JOIN` clause, even if no
+rows in the right `from_item` satisfy the join predicate.
+
+`LEFT` indicates that all rows from the *left* `from_item` are returned; if a
+given row from the left `from_item` does not join to any row in the *right*
+`from_item`, the row will return with NULLs for all columns from the right
+`from_item`. Rows from the right `from_item` that do not join to any row in the
+left `from_item` are discarded.
+
+### RIGHT \[OUTER\] JOIN
+
+The result of a `RIGHT OUTER JOIN` (or simply `RIGHT JOIN`) is similar and
+symmetric to that of `LEFT OUTER JOIN`.
+
+### ON clause
+
+The `ON` clause contains a `bool_expression`. A combined row (the result of
+joining two rows) meets the join condition if `bool_expression` returns TRUE.
+
+Example:
+
+```
+SELECT * FROM Roster INNER JOIN PlayerStats
+ON Roster.LastName = PlayerStats.LastName;
+```
+
+### USING clause
+
+The `USING` clause requires a `column_list` of one or more columns which occur
+in both input tables. It performs an equality comparison on that column, and the
+rows meet the join condition if the equality comparison returns TRUE.
+
+In most cases, a statement with the `USING` keyword is equivalent to using the
+`ON` keyword. For example, the statement:
+
+```
+SELECT FirstName
+FROM Roster INNER JOIN PlayerStats
+USING (LastName);
+```
+
+is equivalent to:
+
+```
+SELECT FirstName
+FROM Roster INNER JOIN PlayerStats
+ON Roster.LastName = PlayerStats.LastName;
+```
+
+The results from queries with `USING` do differ from queries that use `ON` when
+you use `SELECT *`. To illustrate this, consider the query:
+
+```
+SELECT * FROM Roster INNER JOIN PlayerStats
+USING (LastName);
+```
+
+This statement returns the rows from `Roster` and `PlayerStats` where
+`Roster.LastName` is the same as `PlayerStats.LastName`. The results include a
+single `LastName` column.
+
+By contrast, consider the following query:
+
+```
+SELECT * FROM Roster INNER JOIN PlayerStats
+ON Roster.LastName = PlayerStats.LastName;
+```
+
+This statement returns the rows from `Roster` and `PlayerStats` where
+`Roster.LastName` is the same as `PlayerStats.LastName`. The results include two
+`LastName` columns; one from `Roster` and one from `PlayerStats`.
+
+### Sequences of JOINs
+
+The `FROM` clause can contain multiple `JOIN` clauses in sequence.
+
+Example:
+
+```
+SELECT * FROM a LEFT JOIN b ON TRUE LEFT JOIN c ON TRUE;
+```
+
+where `a`, `b`, and `c` are any `from_item`s. JOINs are bound from left to
+right, but you can insert parentheses to group them in a different order.
+
+## WHERE clause
+
+### Syntax {#syntax_2}
+
+```
+WHERE bool_expression
+```
+
+The `WHERE` clause filters out rows by evaluating each row against
+`bool_expression`, and discards all rows that do not return TRUE (that is, rows
+that return FALSE or NULL).
+
+Example:
+
+```
+SELECT * FROM Roster
+WHERE SchoolID = 52;
+```
+
+The `bool_expression` can contain multiple sub-conditions.
+
+Example:
+
+```
+SELECT * FROM Roster
+WHERE LastName LIKE 'Mc%' OR LastName LIKE 'Mac%';
+```
+
+You cannot reference column aliases from the `SELECT` list in the `WHERE`
+clause.
+
+Expressions in an `INNER JOIN` have an equivalent expression in the `WHERE`
+clause. For example, a query using `INNER` `JOIN` and `ON` has an equivalent
+expression using `CROSS JOIN` and `WHERE`.
+
+Example - this query:
+
+```
+SELECT * FROM Roster INNER JOIN TeamMascot
+ON Roster.SchoolID = TeamMascot.SchoolID;
+```
+
+is equivalent to:
+
+```
+SELECT * FROM Roster CROSS JOIN TeamMascot
+WHERE Roster.SchoolID = TeamMascot.SchoolID;
+```
+
+## GROUP BY clause
+
+Also see [Windowing & Triggering]({{ site.baseurl}}/documentation/dsls/sql/windowing-and-triggering/)
+
+### Syntax {#syntax_3}
+
+    GROUP BY { expression [, ...] | ROLLUP ( expression [, ...] ) }
+
+The `GROUP BY` clause groups together rows in a table with non-distinct values
+for the `expression` in the `GROUP BY` clause. For multiple rows in the source
+table with non-distinct values for `expression`, the `GROUP BY` clause produces
+a single combined row. `GROUP BY` is commonly used when aggregate functions are
+present in the `SELECT` list, or to eliminate redundancy in the output.
+
+Example:
+
+```
+SELECT SUM(PointsScored), LastName
+FROM PlayerStats
+GROUP BY LastName;
+```
+
+## HAVING clause
+
+### Syntax {#syntax_4}
+
+```
+HAVING bool_expression
+```
+
+The `HAVING` clause is similar to the `WHERE` clause: it filters out rows that
+do not return TRUE when they are evaluated against the `bool_expression`.
+
+As with the `WHERE` clause, the `bool_expression` can be any expression that
+returns a boolean, and can contain multiple sub-conditions.
+
+The `HAVING` clause differs from the `WHERE` clause in that:
+
+-   The `HAVING` clause requires `GROUP BY` or aggregation to be present in the
+    query.
+-   The `HAVING` clause occurs after `GROUP BY` and aggregation.
+    This means that the `HAVING` clause is evaluated once for every
+    aggregated row in the result set. This differs from the `WHERE` clause,
+    which is evaluated before `GROUP BY` and aggregation.
+
+The `HAVING` clause can reference columns available via the `FROM` clause, as
+well as `SELECT` list aliases. Expressions referenced in the `HAVING` clause
+must either appear in the `GROUP BY` clause or they must be the result of an
+aggregate function:
+
+```
+SELECT LastName
+FROM Roster
+GROUP BY LastName
+HAVING SUM(PointsScored) > 15;
+```
+
+## Set operators
+
+### Syntax {#syntax_6}
+
+    UNION { ALL | DISTINCT } | INTERSECT DISTINCT | EXCEPT DISTINCT
+
+Set operators combine results from two or more input queries into a single
+result set. You must specify `ALL` or `DISTINCT`; if you specify `ALL`, then all
+rows are retained. If `DISTINCT` is specified, duplicate rows are discarded.
+
+If a given row R appears exactly m times in the first input query and n times in
+the second input query (m &gt;= 0, n &gt;= 0):
+
+-   For `UNION ALL`, R appears exactly m + n times in the result.
+-   For `UNION DISTINCT`, the `DISTINCT` is computed after the `UNION` is
+    computed, so R appears exactly one time.
+-   For `INTERSECT DISTINCT`, the `DISTINCT` is computed after the result above
+    is computed.
+-   For `EXCEPT DISTINCT`, row R appears once in the output if m &gt; 0 and
+    n = 0.
+-   If there are more than two input queries, the above operations generalize
+    and the output is the same as if the inputs were combined incrementally from
+    left to right.
+
+The following rules apply:
+
+-   For set operations other than `UNION ALL`, all column types must support
+    equality comparison.
+-   The input queries on each side of the operator must return the same number
+    of columns.
+-   The operators pair the columns returned by each input query according to the
+    columns' positions in their respective `SELECT` lists. That is, the first
+    column in the first input query is paired with the first column in the
+    second input query.
+-   The result set always uses the column names from the first input query.
+-   The result set always uses the supertypes of input types in corresponding
+    columns, so paired columns must also have either the same data type or a
+    common supertype.
+-   You must use parentheses to separate different set operations; for this
+    purpose, set operations such as `UNION ALL` and `UNION DISTINCT` are
+    different. If the statement only repeats the same set operation, parentheses
+    are not necessary.
+
+Examples:
+
+```
+query1 UNION ALL (query2 UNION DISTINCT query3)
+query1 UNION ALL query2 UNION ALL query3
+```
+
+Invalid:
+
+    query1 UNION ALL query2 UNION DISTINCT query3
+    query1 UNION ALL query2 INTERSECT ALL query3;  // INVALID.
+
+### UNION
+
+The `UNION` operator combines the result sets of two or more input queries by
+pairing columns from the result set of each query and vertically concatenating
+them.
+
+### INTERSECT
+
+The `INTERSECT` operator returns rows that are found in the result sets of both
+the left and right input queries. Unlike `EXCEPT`, the positioning of the input
+queries (to the left vs. right of the `INTERSECT` operator) does not matter.
+
+### EXCEPT
+
+The `EXCEPT` operator returns rows from the left input query that are not
+present in the right input query.
+
+## LIMIT clause and OFFSET clause
+
+### Syntax {#syntax_7}
+
+```
+LIMIT count [ OFFSET skip_rows ]
+```
+
+`LIMIT` specifies a non-negative `count` of type INTEGER, and no more than `count`
+rows will be returned. `LIMIT` `0` returns 0 rows. If there is a set operation,
+`LIMIT` is applied after the set operation is evaluated.
+
+`OFFSET` specifies a non-negative `skip_rows` of type INTEGER, and only rows from
+that offset in the table will be considered.
+
+These clauses accept only literal or parameter values.
+
+The rows that are returned by `LIMIT` and `OFFSET` is unspecified.
+
+## WITH clause
+
+The `WITH` clause contains one or more named subqueries which execute every time
+a subsequent `SELECT` statement references them. Any clause or subquery can
+reference subqueries you define in the `WITH` clause. This includes any `SELECT`
+statements on either side of a set operator, such as `UNION`.
+
+Example:
+
+```
+WITH subQ1 AS (SELECT SchoolID FROM Roster),
+     subQ2 AS (SELECT OpponentID FROM PlayerStats)
+SELECT * FROM subQ1
+UNION ALL
+SELECT * FROM subQ2;
+```
+
+## Aliases {#aliases_2}
+
+An alias is a temporary name given to a table, column, or expression present in
+a query. You can introduce explicit aliases in the `SELECT` list or `FROM`
+clause, or Beam will infer an implicit alias for some expressions.
+Expressions with neither an explicit nor implicit alias are anonymous and the
+query cannot reference them by name.
+
+### Explicit alias syntax
+
+You can introduce explicit aliases in either the `FROM` clause or the `SELECT`
+list.
+
+In a `FROM` clause, you can introduce explicit aliases for any item, including
+tables, arrays, subqueries, and `UNNEST` clauses, using `[AS] alias`. The `AS`
+keyword is optional.
+
+Example:
+
+```
+SELECT s.FirstName, s2.SongName
+FROM Singers AS s JOIN Songs AS s2 ON s.SingerID = s2.SingerID;
+```
+
+You can introduce explicit aliases for any expression in the `SELECT` list using
+`[AS] alias`. The `AS` keyword is optional.
+
+Example:
+
+```
+SELECT s.FirstName AS name, LOWER(s.FirstName) AS lname
+FROM Singers s;
+```
+
+### Explicit alias visibility
+
+After you introduce an explicit alias in a query, there are restrictions on
+where else in the query you can reference that alias. These restrictions on
+alias visibility are the result of Beam's name scoping rules.
+
+#### FROM clause aliases
+
+Beam processes aliases in a `FROM` clause from left to right, and aliases
+are visible only to subsequent `JOIN` clauses.
+
+### Ambiguous aliases
+
+Beam provides an error if a name is ambiguous, meaning it can resolve to
+more than one unique object.
+
+Examples:
+
+This query contains column names that conflict between tables, since both
+`Singers` and `Songs` have a column named `SingerID`:
+
+```
+SELECT SingerID
+FROM Singers, Songs;
+```
+
+### Implicit aliases
+
+In the `SELECT` list, if there is an expression that does not have an explicit
+alias, Beam assigns an implicit alias according to the following rules.
+There can be multiple columns with the same alias in the `SELECT` list.
+
+-   For identifiers, the alias is the identifier. For example, `SELECT abc`
+    implies `AS abc`.
+-   For path expressions, the alias is the last identifier in the path. For
+    example, `SELECT abc.def.ghi` implies `AS ghi`.
+-   For field access using the "dot" member field access operator, the alias is
+    the field name. For example, `SELECT (struct_function()).fname` implies `AS
+    fname`.
+
+In all other cases, there is no implicit alias, so the column is anonymous and
+cannot be referenced by name. The data from that column will still be returned
+and the displayed query results may have a generated label for that column, but
+the label cannot be used like an alias.
+
+In a `FROM` clause, `from_item`s are not required to have an alias. The
+following rules apply:
+
+If there is an expression that does not have an explicit alias, Beam assigns
+an implicit alias in these cases:
+
+-   For identifiers, the alias is the identifier. For example, `FROM abc`
+    implies `AS abc`.
+-   For path expressions, the alias is the last identifier in the path. For
+    example, `FROM abc.def.ghi` implies `AS ghi`
+
+Table subqueries do not have implicit aliases.
+
+`FROM UNNEST(x)` does not have an implicit alias.
+
+> Portions of this page are modifications based on
+> [work](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax)
+> created and
+> [shared by Google](https://developers.google.com/terms/site-policies)
+> and used according to terms described in the [Creative Commons 3.0
+> Attribution License](http://creativecommons.org/licenses/by/3.0/).
diff --git a/website/src/documentation/dsls/sql/shell.md b/website/src/documentation/dsls/sql/shell.md
index 6f9c32c..4ef670a 100644
--- a/website/src/documentation/dsls/sql/shell.md
+++ b/website/src/documentation/dsls/sql/shell.md
@@ -59,17 +59,17 @@
 
 ## Declaring Tables
 
-Before reading data from a source or writing data to a destination, you must declare a virtual table using the `CREATE TABLE` statement. For example, if you have a local CSV file `"test-file.csv"` in the current folder, you can create a table with the following statement:
+Before reading data from a source or writing data to a destination, you must declare a virtual table using the `CREATE EXTERNAL TABLE` statement. For example, if you have a local CSV file `"test-file.csv"` in the current folder, you can create a table with the following statement:
 
 ```
-0: BeamSQL> CREATE TABLE csv_file (field1 VARCHAR, field2 INTEGER) TYPE text LOCATION 'test-file.csv';
+0: BeamSQL> CREATE EXTERNAL TABLE csv_file (field1 VARCHAR, field2 INTEGER) TYPE text LOCATION 'test-file.csv';
 
 No rows affected (0.042 seconds)
 ```
 
-The `CREATE TABLE` statement registers the CSV file as a table in Beam SQL and specifies the table's schema. This statement does not directly create a persistent physical table; it only describes the source/sink to Beam SQL so that you can use the table in the queries that read data and write data.
+The `CREATE EXTERNAL TABLE` statement registers the CSV file as a table in Beam SQL and specifies the table's schema. This statement does not directly create a persistent physical table; it only describes the source/sink to Beam SQL so that you can use the table in the queries that read data and write data.
 
-_For more information about `CREATE TABLE` syntax and supported table types, see the [CREATE TABLE reference page]({{ site.baseurl }}/documentation/dsls/sql/create-table/)._
+_For more information about `CREATE EXTERNAL TABLE` syntax and supported table types, see the [CREATE EXTERNAL TABLE reference page]({{ site.baseurl }}/documentation/dsls/sql/create-external-table/)._
 
 ## Reading and Writing Data
 
diff --git a/website/src/documentation/dsls/sql/walkthrough.md b/website/src/documentation/dsls/sql/walkthrough.md
index 57fa8fb..8b8cec7 100644
--- a/website/src/documentation/dsls/sql/walkthrough.md
+++ b/website/src/documentation/dsls/sql/walkthrough.md
@@ -27,10 +27,9 @@
 Before applying a SQL query to a `PCollection`, the data in the collection must
 be in `Row` format. A `Row` represents a single, immutable record in a Beam SQL
 `PCollection`. The names and types of the fields/columns in the row are defined
-by its associated [Schema]({{ site.baseurl }}/documentation/sdks/javadoc/{{
+by its associated [Schema](https://beam.apache.org/releases/javadoc/{{
 site.release_latest }}/index.html?org/apache/beam/sdk/schemas/Schema.html).
-You can use the [Schema.builder()]({{ site.baseurl
-}}/documentation/sdks/javadoc/{{ site.release_latest
+You can use the [Schema.builder()](https://beam.apache.org/releases/javadoc/{{ site.release_latest
 }}/index.html?org/apache/beam/sdk/schemas/Schema.html) to create
 `Schemas`. See [Data
 Types]({{ site.baseurl }}/documentation/dsls/sql/data-types) for more details on supported primitive data types.
@@ -111,7 +110,7 @@
 
 ## SqlTransform
 
-[`SqlTransform.query(queryString)`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/extensions/sql/SqlTransform.html) method is the only API to create a `PTransform`
+[`SqlTransform.query(queryString)`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/extensions/sql/SqlTransform.html) method is the only API to create a `PTransform`
 from a string representation of the SQL query. You can apply this `PTransform`
 to either a single `PCollection` or a `PCollectionTuple` which holds multiple
 `PCollections`:
diff --git a/website/src/documentation/io/testing.md b/website/src/documentation/io/testing.md
index 7e212bc..abfa3a6 100644
--- a/website/src/documentation/io/testing.md
+++ b/website/src/documentation/io/testing.md
@@ -97,7 +97,7 @@
 
 ### Implementing unit tests {#implementing-unit-tests}
 
-A general guide to writing Unit Tests for all transforms can be found in the [PTransform Style Guide](https://beam.apache.org/contribute/ptransform-style-guide/#testing ). We have expanded on a few important points below.
+A general guide to writing Unit Tests for all transforms can be found in the [PTransform Style Guide]({{ site.baseurl }}/contribute/ptransform-style-guide/#testing ). We have expanded on a few important points below.
 
 If you are using the `Source` API, make sure to exhaustively unit-test your code. A minor implementation error can lead to data corruption or data loss (such as skipping or duplicating records) that can be hard for your users to detect. Also look into using <span class="language-java">`SourceTestUtils`</span><span class="language-py">`source_test_utils`</span> - it is a key piece of testing `Source` implementations.
 
@@ -164,13 +164,13 @@
 
 You won’t need to invoke PerfKit Benchmarker directly. Run `./gradlew performanceTest` task in project's root directory, passing kubernetes scripts of your choice (located in .test_infra/kubernetes directory). It will setup PerfKitBenchmarker for you.  
 
-Example run with the [Direct](https://beam.apache.org/documentation/runners/direct/) runner:
+Example run with the [Direct]({{ site.baseurl }}/documentation/runners/direct/) runner:
 ```
 ./gradlew performanceTest -DpkbLocation="/Users/me/PerfKitBenchmarker/pkb.py" -DintegrationTestPipelineOptions='["--numberOfRecords=1000"]' -DitModule=sdks/java/io/jdbc/ -DintegrationTest=org.apache.beam.sdk.io.jdbc.JdbcIOIT -DkubernetesScripts="/Users/me/beam/.test-infra/kubernetes/postgres/postgres-service-for-local-dev.yml" -DbeamITOptions="/Users/me/beam/.test-infra/kubernetes/postgres/pkb-config-local.yml" -DintegrationTestRunner=direct
 ```
 
 
-Example run with the [Google Cloud Dataflow](https://beam.apache.org/documentation/runners/dataflow/) runner:
+Example run with the [Google Cloud Dataflow]({{ site.baseurl }}/documentation/runners/dataflow/) runner:
 ```
 ./gradlew performanceTest -DpkbLocation="/Users/me/PerfKitBenchmarker/pkb.py" -DintegrationTestPipelineOptions='["--numberOfRecords=1000", "--project=GOOGLE_CLOUD_PROJECT", "--tempRoot=GOOGLE_STORAGE_BUCKET"]' -DitModule=sdks/java/io/jdbc/ -DintegrationTest=org.apache.beam.sdk.io.jdbc.JdbcIOIT -DkubernetesScripts="/Users/me/beam/.test-infra/kubernetes/postgres/postgres-service-for-local-dev.yml" -DbeamITOptions="/Users/me/beam/.test-infra/kubernetes/postgres/pkb-config-local.yml" -DintegrationTestRunner=dataflow
 ```
diff --git a/website/src/documentation/pipelines/test-your-pipeline.md b/website/src/documentation/pipelines/test-your-pipeline.md
index e783561..1306150 100644
--- a/website/src/documentation/pipelines/test-your-pipeline.md
+++ b/website/src/documentation/pipelines/test-your-pipeline.md
@@ -174,7 +174,7 @@
 You can use the `Create` transform to create a `PCollection` out of a standard in-memory collection class, such as Java `List`. See [Creating a PCollection]({{ site.baseurl }}/documentation/programming-guide/#creating-a-pcollection) for more information.
 
 ### PAssert
-[PAssert]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html) is a class included in the Beam Java SDK  that is an assertion on the contents of a `PCollection`. You can use `PAssert`to verify that a `PCollection` contains a specific set of expected elements.
+[PAssert](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html) is a class included in the Beam Java SDK  that is an assertion on the contents of a `PCollection`. You can use `PAssert`to verify that a `PCollection` contains a specific set of expected elements.
 
 For a given `PCollection`, you can use `PAssert` to verify the contents as follows:
 
@@ -200,7 +200,7 @@
 </dependency>
 ```
 
-For more information on how these classes work, see the [org.apache.beam.sdk.testing]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/package-summary.html) package documentation.
+For more information on how these classes work, see the [org.apache.beam.sdk.testing](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/package-summary.html) package documentation.
 
 ### An Example Test for a Composite Transform
 
diff --git a/website/src/documentation/programming-guide.md b/website/src/documentation/programming-guide.md
index 9eb8db5..f7b1996 100644
--- a/website/src/documentation/programming-guide.md
+++ b/website/src/documentation/programming-guide.md
@@ -106,7 +106,7 @@
 
 The `Pipeline` abstraction encapsulates all the data and steps in your data
 processing task. Your Beam driver program typically starts by constructing a
-<span class="language-java">[Pipeline]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/Pipeline.html)</span>
+<span class="language-java">[Pipeline](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/Pipeline.html)</span>
 <span class="language-py">[Pipeline](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/pipeline.py)</span>
 object, and then using that object as the basis for creating the pipeline's data
 sets as `PCollection`s and its operations as `Transform`s.
@@ -234,7 +234,7 @@
 
 ## 3. PCollections {#pcollections}
 
-The <span class="language-java">[PCollection]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/values/PCollection.html)</span>
+The <span class="language-java">[PCollection](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/values/PCollection.html)</span>
 <span class="language-py">`PCollection`</span> abstraction represents a
 potentially distributed, multi-element data set. You can think of a
 `PCollection` as "pipeline" data; Beam transforms use `PCollection` objects as
@@ -924,7 +924,7 @@
 
 #### 4.2.4. Combine {#combine}
 
-<span class="language-java">[`Combine`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Combine.html)</span>
+<span class="language-java">[`Combine`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Combine.html)</span>
 <span class="language-py">[`Combine`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/core.py)</span>
 is a Beam transform for combining collections of elements or values in your
 data. `Combine` has variants that work on entire `PCollection`s, and some that
@@ -1153,7 +1153,7 @@
 
 #### 4.2.5. Flatten {#flatten}
 
-<span class="language-java">[`Flatten`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Flatten.html)</span>
+<span class="language-java">[`Flatten`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Flatten.html)</span>
 <span class="language-py">[`Flatten`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/core.py)</span> and
 is a Beam transform for `PCollection` objects that store the same data type.
 `Flatten` merges multiple `PCollection` objects into a single logical
@@ -1202,7 +1202,7 @@
 
 #### 4.2.6. Partition {#partition}
 
-<span class="language-java">[`Partition`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Partition.html)</span>
+<span class="language-java">[`Partition`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Partition.html)</span>
 <span class="language-py">[`Partition`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/core.py)</span>
 is a Beam transform for `PCollection` objects that store the same data
 type. `Partition` splits a single `PCollection` into a fixed number of smaller
@@ -1587,8 +1587,8 @@
 
 The Beam SDK comes packed with many useful composite transforms. See the API
 reference pages for a list of transforms:
-  * [Pre-written Beam transforms for Java]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/package-summary.html)
-  * [Pre-written Beam transforms for Python]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.transforms.html)
+  * [Pre-written Beam transforms for Java](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/package-summary.html)
+  * [Pre-written Beam transforms for Python](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.transforms.html)
 
 #### 4.6.1. An example composite transform {#composite-transform-example}
 
@@ -2164,7 +2164,7 @@
 To use windowing with fixed data sets, you can assign your own timestamps to
 each element. To assign timestamps to elements, use a `ParDo` transform with a
 `DoFn` that outputs each element with a new timestamp (for example, the
-[WithTimestamps]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/WithTimestamps.html)
+[WithTimestamps](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/WithTimestamps.html)
 transform in the Beam SDK for Java).
 
 To illustrate how windowing with a bounded `PCollection` can affect how your
diff --git a/website/src/documentation/runners/dataflow.md b/website/src/documentation/runners/dataflow.md
index 99e2b6d..1cd28ba 100644
--- a/website/src/documentation/runners/dataflow.md
+++ b/website/src/documentation/runners/dataflow.md
@@ -203,8 +203,8 @@
 </table>
 
 See the reference documentation for the
-<span class="language-java">[DataflowPipelineOptions]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/dataflow/options/DataflowPipelineOptions.html)</span>
-<span class="language-py">[`PipelineOptions`]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.options.pipeline_options.html#apache_beam.options.pipeline_options.PipelineOptions)</span>
+<span class="language-java">[DataflowPipelineOptions](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/dataflow/options/DataflowPipelineOptions.html)</span>
+<span class="language-py">[`PipelineOptions`](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.options.pipeline_options.html#apache_beam.options.pipeline_options.PipelineOptions)</span>
 interface (and any subinterfaces) for additional pipeline configuration options.
 
 ## Additional information and caveats {#additional-info}
diff --git a/website/src/documentation/runners/direct.md b/website/src/documentation/runners/direct.md
index ce2e8d3..f61619f 100644
--- a/website/src/documentation/runners/direct.md
+++ b/website/src/documentation/runners/direct.md
@@ -40,11 +40,11 @@
 Here are some resources with information about how to test your pipelines.
 <ul>
   <!-- Java specific links -->
-  <li class="language-java"><a href="{{ site.baseurl }}/blog/2016/10/20/test-stream.html">Testing Unbounded Pipelines in Apache Beam</a> talks about the use of Java classes <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html">PAssert</a> and <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/TestStream.html">TestStream</a> to test your pipelines.</li>
-  <li class="language-java">The <a href="{{ site.baseurl }}/get-started/wordcount-example/#testing-your-pipeline-with-asserts">Apache Beam WordCount Walkthrough</a> contains an example of logging and testing a pipeline with <a href="{{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html">PAssert</a>.</li>
+  <li class="language-java"><a href="{{ site.baseurl }}/blog/2016/10/20/test-stream.html">Testing Unbounded Pipelines in Apache Beam</a> talks about the use of Java classes <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html">PAssert</a> and <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/TestStream.html">TestStream</a> to test your pipelines.</li>
+  <li class="language-java">The <a href="{{ site.baseurl }}/get-started/wordcount-example/#testing-your-pipeline-with-asserts">Apache Beam WordCount Walkthrough</a> contains an example of logging and testing a pipeline with <a href="https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/testing/PAssert.html">PAssert</a>.</li>
 
   <!-- Python specific links -->
-  <li class="language-py">The <a href="{{ site.baseurl }}/get-started/wordcount-example/#testing-your-pipeline-with-asserts">Apache Beam WordCount Walkthrough</a> contains an example of logging and testing a pipeline with <a href="{{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.testing.util.html#apache_beam.testing.util.assert_that">assert_that</a>.</li>
+  <li class="language-py">The <a href="{{ site.baseurl }}/get-started/wordcount-example/#testing-your-pipeline-with-asserts">Apache Beam WordCount Walkthrough</a> contains an example of logging and testing a pipeline with <a href="https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.testing.util.html#apache_beam.testing.util.assert_that">assert_that</a>.</li>
 </ul>
 
 ## Direct Runner prerequisites and setup
@@ -68,15 +68,15 @@
 When executing your pipeline from the command-line, set `runner` to `direct` or `DirectRunner`. The default values for the other pipeline options are generally sufficient.
 
 See the reference documentation for the
-<span class="language-java">[`DirectOptions`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/direct/DirectOptions.html)</span>
-<span class="language-py">[`DirectOptions`]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.options.pipeline_options.html#apache_beam.options.pipeline_options.DirectOptions)</span>
+<span class="language-java">[`DirectOptions`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/direct/DirectOptions.html)</span>
+<span class="language-py">[`DirectOptions`](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.options.pipeline_options.html#apache_beam.options.pipeline_options.DirectOptions)</span>
 interface for defaults and additional pipeline configuration options.
 
 ## Additional information and caveats
 
 ### Memory considerations
 
-Local execution is limited by the memory available in your local environment. It is highly recommended that you run your pipeline with data sets small enough to fit in local memory. You can create a small in-memory data set using a <span class="language-java">[`Create`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Create.html)</span><span class="language-py">[`Create`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/core.py)</span> transform, or you can use a <span class="language-java">[`Read`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/io/Read.html)</span><span class="language-py">[`Read`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/io/iobase.py)</span> transform to work with small local or remote files.
+Local execution is limited by the memory available in your local environment. It is highly recommended that you run your pipeline with data sets small enough to fit in local memory. You can create a small in-memory data set using a <span class="language-java">[`Create`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/transforms/Create.html)</span><span class="language-py">[`Create`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/transforms/core.py)</span> transform, or you can use a <span class="language-java">[`Read`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/sdk/io/Read.html)</span><span class="language-py">[`Read`](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/io/iobase.py)</span> transform to work with small local or remote files.
 
 ### Streaming execution
 
diff --git a/website/src/documentation/runners/flink.md b/website/src/documentation/runners/flink.md
index a2cac75..ccd9df8 100644
--- a/website/src/documentation/runners/flink.md
+++ b/website/src/documentation/runners/flink.md
@@ -177,7 +177,7 @@
 </tr>
 </table>
 
-See the reference documentation for the  <span class="language-java">[FlinkPipelineOptions]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/flink/FlinkPipelineOptions.html)</span><span class="language-py">[PipelineOptions](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/options/pipeline_options.py)</span> interface (and its subinterfaces) for the complete list of pipeline configuration options.
+See the reference documentation for the  <span class="language-java">[FlinkPipelineOptions](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/index.html?org/apache/beam/runners/flink/FlinkPipelineOptions.html)</span><span class="language-py">[PipelineOptions](https://github.com/apache/beam/blob/master/sdks/python/apache_beam/options/pipeline_options.py)</span> interface (and its subinterfaces) for the complete list of pipeline configuration options.
 
 ## Additional information and caveats
 
diff --git a/website/src/documentation/sdks/java.md b/website/src/documentation/sdks/java.md
index 0348d26..b007808 100644
--- a/website/src/documentation/sdks/java.md
+++ b/website/src/documentation/sdks/java.md
@@ -27,7 +27,7 @@
 
 Get started with the [Beam Programming Model]({{ site.baseurl }}/documentation/programming-guide/) to learn the basic concepts that apply to all SDKs in Beam.
 
-See the [Java API Reference]({{ site.baseurl }}/documentation/sdks/javadoc/) for more information on individual APIs.
+See the [Java API Reference](https://beam.apache.org/releases/javadoc/) for more information on individual APIs.
 
 
 ## Supported Features
diff --git a/website/src/documentation/sdks/python.md b/website/src/documentation/sdks/python.md
index ac11006..ae19ee6 100644
--- a/website/src/documentation/sdks/python.md
+++ b/website/src/documentation/sdks/python.md
@@ -25,7 +25,7 @@
 
 Get started with the [Beam Python SDK quickstart]({{ site.baseurl }}/get-started/quickstart-py) to set up your Python development environment, get the Beam SDK for Python, and run an example pipeline. Then, read through the [Beam programming guide]({{ site.baseurl }}/documentation/programming-guide) to learn the basic concepts that apply to all SDKs in Beam.
 
-See the [Python API reference]({{ site.baseurl }}/documentation/sdks/pydoc/) for more information on individual APIs.
+See the [Python API reference](https://beam.apache.org/releases/pydoc/) for more information on individual APIs.
 
 ## Python streaming pipelines
 
diff --git a/website/src/get-started/downloads.md b/website/src/get-started/downloads.md
index 608774f..34d02e1 100644
--- a/website/src/get-started/downloads.md
+++ b/website/src/get-started/downloads.md
@@ -71,7 +71,7 @@
 * minor version for new functionality added in a backward-compatible manner
 * incremental version for forward-compatible bug fixes
 
-Please note that APIs marked [`@Experimental`]({{ site.baseurl }}/documentation/sdks/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/annotations/Experimental.html)
+Please note that APIs marked [`@Experimental`](https://beam.apache.org/releases/javadoc/{{ site.release_latest }}/org/apache/beam/sdk/annotations/Experimental.html)
 may change at any point and are not guaranteed to remain compatible across versions.
 
 Additionally, any API may change before the first stable release, i.e., between
@@ -79,6 +79,13 @@
 
 ## Releases
 
+### 2.7.0 (2018-10-02)
+Official [source code download](https://dist.apache.org/repos/dist/release/beam/2.7.0/apache-beam-2.7.0-source-release.zip)
+[SHA-512](https://dist.apache.org/repos/dist/release/beam/2.7.0/apache-beam-2.7.0-source-release.zip.sha512)
+[signature](https://dist.apache.org/repos/dist/release/beam/2.7.0/apache-beam-2.7.0-source-release.zip.asc).
+
+[Release notes](https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12319527&version=12343654).
+
 ### 2.6.0 (2018-08-08)
 Official [source code download](https://archive.apache.org/dist/beam/2.6.0/apache-beam-2.6.0-source-release.zip)
 [SHA-512](https://archive.apache.org/dist/beam/2.6.0/apache-beam-2.6.0-source-release.zip.sha512)
diff --git a/website/src/get-started/quickstart-java.md b/website/src/get-started/quickstart-java.md
index 12e98d9..35bb74d 100644
--- a/website/src/get-started/quickstart-java.md
+++ b/website/src/get-started/quickstart-java.md
@@ -162,7 +162,7 @@
 
 {:.runner-dataflow}
 ```
-Make sure you complete the setup steps at https://beam.apache.org/documentation/runners/dataflow/#setup
+Make sure you complete the setup steps at {{ site.baseurl }}/documentation/runners/dataflow/#setup
 
 $ mvn compile exec:java -Dexec.mainClass=org.apache.beam.examples.WordCount \
      -Dexec.args="--runner=DataflowRunner --project=<your-gcp-project> \
@@ -213,7 +213,7 @@
 
 {:.runner-dataflow}
 ```
-Make sure you complete the setup steps at https://beam.apache.org/documentation/runners/dataflow/#setup
+Make sure you complete the setup steps at {{ site.baseurl }}/documentation/runners/dataflow/#setup
 
 PS> mvn compile exec:java -D exec.mainClass=org.apache.beam.examples.WordCount `
  -D exec.args="--runner=DataflowRunner --project=<your-gcp-project> `
@@ -363,7 +363,7 @@
 ## Next Steps
 
 * Learn more about the [Beam SDK for Java]({{ site.baseurl }}/documentation/sdks/java/)
-  and look through the [Java SDK API reference]({{ site.baseurl }}/documentation/sdks/javadoc).
+  and look through the [Java SDK API reference](https://beam.apache.org/releases/javadoc).
 * Walk through these WordCount examples in the [WordCount Example Walkthrough]({{ site.baseurl }}/get-started/wordcount-example).
 * Dive in to some of our favorite [articles and presentations]({{ site.baseurl }}/documentation/resources).
 * Join the Beam [users@]({{ site.baseurl }}/community/contact-us) mailing list.
diff --git a/website/src/get-started/quickstart-py.md b/website/src/get-started/quickstart-py.md
index b199c5e..f3d5d1b 100644
--- a/website/src/get-started/quickstart-py.md
+++ b/website/src/get-started/quickstart-py.md
@@ -190,7 +190,7 @@
 {:.runner-dataflow}
 ```
 # As part of the initial setup, install Google Cloud Platform specific extra components. Make sure you
-# complete the setup steps at https://beam.apache.org/documentation/runners/dataflow/#setup
+# complete the setup steps at {{ site.baseurl }}/documentation/runners/dataflow/#setup
 pip install apache-beam[gcp]
 python -m apache_beam.examples.wordcount --input gs://dataflow-samples/shakespeare/kinglear.txt \
                                          --output gs://<your-gcs-bucket>/counts \
@@ -207,7 +207,7 @@
 ## Next Steps
 
 * Learn more about the [Beam SDK for Python]({{ site.baseurl }}/documentation/sdks/python/)
-  and look through the [Python SDK API reference]({{ site.baseurl }}/documentation/sdks/pydoc).
+  and look through the [Python SDK API reference](https://beam.apache.org/releases/pydoc).
 * Walk through these WordCount examples in the [WordCount Example Walkthrough]({{ site.baseurl }}/get-started/wordcount-example).
 * Dive in to some of our favorite [articles and presentations]({{ site.baseurl }}/documentation/resources).
 * Join the Beam [users@]({{ site.baseurl }}/community/contact-us) mailing list.
diff --git a/website/src/get-started/wordcount-example.md b/website/src/get-started/wordcount-example.md
index 684f5ac..539d9d7 100644
--- a/website/src/get-started/wordcount-example.md
+++ b/website/src/get-started/wordcount-example.md
@@ -1148,7 +1148,7 @@
 dataset and all of the data can be processed together. For bounded datasets,
 the question to ask is "Do I have all of the data?" If data continuously
 arrives (such as an endless stream of game scores in the
-[Mobile gaming example](https://beam.apache.org/get-started/mobile-gaming-example/),
+[Mobile gaming example]({{ site.baseurl }}/get-started/mobile-gaming-example/),
 it is an unbounded dataset. An unbounded dataset is never available for
 processing at any one time, so the data must be processed using a streaming
 pipeline that runs continuously. The dataset will only be complete up to a
@@ -1390,7 +1390,7 @@
 
 This example uses an unbounded dataset as input. The code reads Pub/Sub
 messages from a Pub/Sub subscription or topic using
-[`beam.io.ReadStringsFromPubSub`]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html#apache_beam.io.gcp.pubsub.ReadStringsFromPubSub).
+[`beam.io.ReadStringsFromPubSub`](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html#apache_beam.io.gcp.pubsub.ReadStringsFromPubSub).
 
 ```java
   // This example is not currently available for the Beam SDK for Java.
@@ -1416,7 +1416,7 @@
 
 This example uses an unbounded `PCollection` and streams the results to
 Google Pub/Sub. The code formats the results and writes them to a Pub/Sub topic
-using [`beam.io.WriteStringsToPubSub`]({{ site.baseurl }}/documentation/sdks/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html#apache_beam.io.gcp.pubsub.WriteStringsToPubSub).
+using [`beam.io.WriteStringsToPubSub`](https://beam.apache.org/releases/pydoc/{{ site.release_latest }}/apache_beam.io.gcp.pubsub.html#apache_beam.io.gcp.pubsub.WriteStringsToPubSub).
 
 ```java
   // This example is not currently available for the Beam SDK for Java.