HIVE-27630: Iceberg: Fast forward branch. (#4627). (Ayush Saxena, reviewed by Denys Kuzmenko, zhangbutao)

diff --git a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
index 19d25b6..2946bb9 100644
--- a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
+++ b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
@@ -791,6 +791,12 @@ public void executeOperation(org.apache.hadoop.hive.ql.metadata.Table hmsTable,
             hmsTable.getTableName(), setSnapshotVersionSpec.getSnapshotId());
         IcebergTableUtil.setCurrentSnapshot(icebergTable, setSnapshotVersionSpec.getSnapshotId());
         break;
+      case FAST_FORWARD:
+        AlterTableExecuteSpec.FastForwardSpec fastForwardSpec =
+            (AlterTableExecuteSpec.FastForwardSpec) executeSpec.getOperationParams();
+        IcebergTableUtil.fastForwardBranch(icebergTable, fastForwardSpec.getSourceBranch(),
+            fastForwardSpec.getTargetBranch());
+        break;
       default:
         throw new UnsupportedOperationException(
             String.format("Operation type %s is not supported", executeSpec.getOperationType().name()));
diff --git a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
index f837cb8..6e76d23 100644
--- a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
+++ b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/IcebergTableUtil.java
@@ -242,4 +242,15 @@ public static void setCurrentSnapshot(Table table, Long value) {
     manageSnapshots.setCurrentSnapshot(value);
     manageSnapshots.commit();
   }
+
+  /**
+   * Fast forwards a branch to another.
+   * @param table the iceberg table
+   * @param sourceBranch the source branch
+   * @param targetBranch the target branch
+   */
+  public static void fastForwardBranch(Table table, String sourceBranch, String targetBranch) {
+    LOG.debug("Fast Forwarding the iceberg table {} branch {} to {}", table.name(), sourceBranch, targetBranch);
+    table.manageSnapshots().fastForwardBranch(sourceBranch, targetBranch).commit();
+  }
 }
diff --git a/iceberg/iceberg-handler/src/test/queries/negative/fastforward_iceberg_branch_conflict.q b/iceberg/iceberg-handler/src/test/queries/negative/fastforward_iceberg_branch_conflict.q
new file mode 100644
index 0000000..e8ce5b5
--- /dev/null
+++ b/iceberg/iceberg-handler/src/test/queries/negative/fastforward_iceberg_branch_conflict.q
@@ -0,0 +1,25 @@
+-- SORT_QUERY_RESULTS
+set hive.explain.user=false;
+set hive.fetch.task.conversion=more;
+
+create external table ice01(a int, b string, c int) stored by iceberg stored as orc tblproperties ('format-version'='2');
+
+insert into ice01 values (1, 'one', 50), (2, 'two', 51), (111, 'one', 55);
+
+select * from ice01;
+
+-- create a branch named test1
+alter table ice01 create branch test1;
+
+select * from default.ice01.branch_test1;
+
+-- insert into main branch
+insert into ice01 values (10, 'ten', 53), (11, 'eleven', 52), (12, 'twelve', 56);
+select * from ice01;
+
+-- insert into target branch
+insert into default.ice01.branch_test1 values(15, 'five', 89);
+select * from ice01;
+
+-- fastforward the branch
+alter table ice01 execute fast-forward 'test1' 'main';
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/queries/positive/fastforward_iceberg_branch.q b/iceberg/iceberg-handler/src/test/queries/positive/fastforward_iceberg_branch.q
new file mode 100644
index 0000000..ff0451e
--- /dev/null
+++ b/iceberg/iceberg-handler/src/test/queries/positive/fastforward_iceberg_branch.q
@@ -0,0 +1,39 @@
+-- SORT_QUERY_RESULTS
+set hive.explain.user=false;
+set hive.fetch.task.conversion=more;
+
+create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2');
+
+insert into ice01 values (1), (2), (3), (4);
+
+select * from ice01;
+
+-- create a branch named test1
+alter table ice01 create branch test1;
+select * from default.ice01.branch_test1;
+
+-- create a branch named test01
+alter table ice01 create branch test01;
+
+-- insert into test1 branch
+insert into default.ice01.branch_test1 values (11), (21), (31), (41);
+select * from default.ice01.branch_test1;
+
+explain alter table ice01 execute fast-forward 'test1';
+alter table ice01 execute fast-forward 'test1';
+select * from ice01;
+
+-- fast-forward the test01 branch to test1
+explain alter table ice01 execute fast-forward 'test01' 'test1';
+alter table ice01 execute fast-forward 'test01' 'test1';
+select * from default.ice01.branch_test01;
+
+-- create another branch test2
+alter table ice01 create branch test2;
+
+-- insert values to test2 branch
+insert into default.ice01.branch_test2 values (12), (22), (32), (42);
+
+-- fast-forward the main branch
+alter table ice01 execute fast-forward 'main' 'test2';
+select * from ice01;
\ No newline at end of file
diff --git a/iceberg/iceberg-handler/src/test/results/negative/fastforward_iceberg_branch_conflict.q.out b/iceberg/iceberg-handler/src/test/results/negative/fastforward_iceberg_branch_conflict.q.out
new file mode 100644
index 0000000..65e418f
--- /dev/null
+++ b/iceberg/iceberg-handler/src/test/results/negative/fastforward_iceberg_branch_conflict.q.out
@@ -0,0 +1,92 @@
+PREHOOK: query: create external table ice01(a int, b string, c int) stored by iceberg stored as orc tblproperties ('format-version'='2')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ice01
+POSTHOOK: query: create external table ice01(a int, b string, c int) stored by iceberg stored as orc tblproperties ('format-version'='2')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ice01
+PREHOOK: query: insert into ice01 values (1, 'one', 50), (2, 'two', 51), (111, 'one', 55)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into ice01 values (1, 'one', 50), (2, 'two', 51), (111, 'one', 55)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1	one	50
+111	one	55
+2	two	51
+PREHOOK: query: alter table ice01 create branch test1
+PREHOOK: type: ALTERTABLE_CREATEBRANCH
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 create branch test1
+POSTHOOK: type: ALTERTABLE_CREATEBRANCH
+POSTHOOK: Input: default@ice01
+PREHOOK: query: select * from default.ice01.branch_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from default.ice01.branch_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1	one	50
+111	one	55
+2	two	51
+PREHOOK: query: insert into ice01 values (10, 'ten', 53), (11, 'eleven', 52), (12, 'twelve', 56)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into ice01 values (10, 'ten', 53), (11, 'eleven', 52), (12, 'twelve', 56)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1	one	50
+10	ten	53
+11	eleven	52
+111	one	55
+12	twelve	56
+2	two	51
+PREHOOK: query: insert into default.ice01.branch_test1 values(15, 'five', 89)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into default.ice01.branch_test1 values(15, 'five', 89)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1	one	50
+10	ten	53
+11	eleven	52
+111	one	55
+12	twelve	56
+2	two	51
+PREHOOK: query: alter table ice01 execute fast-forward 'test1' 'main'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+FAILED: Execution Error, return code 40000 from org.apache.hadoop.hive.ql.ddl.DDLTask. Cannot fast-forward: test1 is not an ancestor of main
diff --git a/iceberg/iceberg-handler/src/test/results/positive/fastforward_iceberg_branch.q.out b/iceberg/iceberg-handler/src/test/results/positive/fastforward_iceberg_branch.q.out
new file mode 100644
index 0000000..be23fc7
--- /dev/null
+++ b/iceberg/iceberg-handler/src/test/results/positive/fastforward_iceberg_branch.q.out
@@ -0,0 +1,190 @@
+PREHOOK: query: create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@ice01
+POSTHOOK: query: create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@ice01
+PREHOOK: query: insert into ice01 values (1), (2), (3), (4)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into ice01 values (1), (2), (3), (4)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+2
+3
+4
+PREHOOK: query: alter table ice01 create branch test1
+PREHOOK: type: ALTERTABLE_CREATEBRANCH
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 create branch test1
+POSTHOOK: type: ALTERTABLE_CREATEBRANCH
+POSTHOOK: Input: default@ice01
+PREHOOK: query: select * from default.ice01.branch_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from default.ice01.branch_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+2
+3
+4
+PREHOOK: query: alter table ice01 create branch test01
+PREHOOK: type: ALTERTABLE_CREATEBRANCH
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 create branch test01
+POSTHOOK: type: ALTERTABLE_CREATEBRANCH
+POSTHOOK: Input: default@ice01
+PREHOOK: query: insert into default.ice01.branch_test1 values (11), (21), (31), (41)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into default.ice01.branch_test1 values (11), (21), (31), (41)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: select * from default.ice01.branch_test1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from default.ice01.branch_test1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+11
+2
+21
+3
+31
+4
+41
+PREHOOK: query: explain alter table ice01 execute fast-forward 'test1'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+POSTHOOK: query: explain alter table ice01 execute fast-forward 'test1'
+POSTHOOK: type: ALTERTABLE_EXECUTE
+POSTHOOK: Input: default@ice01
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Execute operation
+      table name: default.ice01
+      spec: AlterTableExecuteSpec{operationType=FAST_FORWARD, operationParams=FastForwardSpec{sourceBranch=main, targetBranch=test1}}
+
+PREHOOK: query: alter table ice01 execute fast-forward 'test1'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 execute fast-forward 'test1'
+POSTHOOK: type: ALTERTABLE_EXECUTE
+POSTHOOK: Input: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+11
+2
+21
+3
+31
+4
+41
+PREHOOK: query: explain alter table ice01 execute fast-forward 'test01' 'test1'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+POSTHOOK: query: explain alter table ice01 execute fast-forward 'test01' 'test1'
+POSTHOOK: type: ALTERTABLE_EXECUTE
+POSTHOOK: Input: default@ice01
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Execute operation
+      table name: default.ice01
+      spec: AlterTableExecuteSpec{operationType=FAST_FORWARD, operationParams=FastForwardSpec{sourceBranch=test01, targetBranch=test1}}
+
+PREHOOK: query: alter table ice01 execute fast-forward 'test01' 'test1'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 execute fast-forward 'test01' 'test1'
+POSTHOOK: type: ALTERTABLE_EXECUTE
+POSTHOOK: Input: default@ice01
+PREHOOK: query: select * from default.ice01.branch_test01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from default.ice01.branch_test01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+11
+2
+21
+3
+31
+4
+41
+PREHOOK: query: alter table ice01 create branch test2
+PREHOOK: type: ALTERTABLE_CREATEBRANCH
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 create branch test2
+POSTHOOK: type: ALTERTABLE_CREATEBRANCH
+POSTHOOK: Input: default@ice01
+PREHOOK: query: insert into default.ice01.branch_test2 values (12), (22), (32), (42)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@ice01
+POSTHOOK: query: insert into default.ice01.branch_test2 values (12), (22), (32), (42)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@ice01
+PREHOOK: query: alter table ice01 execute fast-forward 'main' 'test2'
+PREHOOK: type: ALTERTABLE_EXECUTE
+PREHOOK: Input: default@ice01
+POSTHOOK: query: alter table ice01 execute fast-forward 'main' 'test2'
+POSTHOOK: type: ALTERTABLE_EXECUTE
+POSTHOOK: Input: default@ice01
+PREHOOK: query: select * from ice01
+PREHOOK: type: QUERY
+PREHOOK: Input: default@ice01
+PREHOOK: Output: hdfs://### HDFS PATH ###
+POSTHOOK: query: select * from ice01
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@ice01
+POSTHOOK: Output: hdfs://### HDFS PATH ###
+1
+11
+12
+2
+21
+22
+3
+31
+32
+4
+41
+42
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
index ddb2285..2fbba1c 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/AlterClauseParser.g
@@ -479,6 +479,8 @@
     -> ^(TOK_ALTERTABLE_EXECUTE KW_EXPIRE_SNAPSHOTS $expireParam)
     | KW_EXECUTE KW_SET_CURRENT_SNAPSHOT LPAREN (snapshotParam=Number) RPAREN
     -> ^(TOK_ALTERTABLE_EXECUTE KW_SET_CURRENT_SNAPSHOT $snapshotParam)
+    | KW_EXECUTE KW_FAST_FORWARD sourceBranch=StringLiteral (targetBranch=StringLiteral)?
+    -> ^(TOK_ALTERTABLE_EXECUTE KW_FAST_FORWARD $sourceBranch $targetBranch?)
     ;
 
 alterStatementSuffixDropBranch
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
index 4b5326b..0f7e4e5 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
@@ -397,6 +397,7 @@
 KW_RETAIN: 'RETAIN';
 KW_RETENTION: 'RETENTION';
 KW_TAG: 'TAG';
+KW_FAST_FORWARD: 'FAST-FORWARD';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.
diff --git a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
index 880baf1..31b63e6 100644
--- a/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
+++ b/parser/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
@@ -986,6 +986,7 @@
     | KW_SET_CURRENT_SNAPSHOT
     | KW_BRANCH | KW_SNAPSHOTS | KW_RETAIN | KW_RETENTION
     | KW_TAG
+    | KW_FAST_FORWARD
 ;
 
 //The following SQL2011 reserved keywords are used as function name only, but not as identifiers.
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java
index 591a924..227253d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/execute/AlterTableExecuteAnalyzer.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.ddl.table.execute;
 
-import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.hive.common.TableName;
 import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.common.type.TimestampTZUtil;
@@ -35,6 +34,7 @@
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec;
 import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.ExpireSnapshotsSpec;
+import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.FastForwardSpec;
 import org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.RollbackSpec;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
@@ -45,6 +45,7 @@
 import java.util.regex.Pattern;
 
 import static org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.ExecuteOperationType.EXPIRE_SNAPSHOT;
+import static org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.ExecuteOperationType.FAST_FORWARD;
 import static org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.ExecuteOperationType.ROLLBACK;
 import static org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.ExecuteOperationType.SET_CURRENT_SNAPSHOT;
 import static org.apache.hadoop.hive.ql.parse.AlterTableExecuteSpec.RollbackSpec.RollbackType.TIME;
@@ -107,6 +108,22 @@ protected void analyzeCommand(TableName tableName, Map<String, String> partition
           new AlterTableExecuteSpec(SET_CURRENT_SNAPSHOT,
               new AlterTableExecuteSpec.SetCurrentSnapshotSpec(Long.valueOf(child.getText())));
       desc = new AlterTableExecuteDesc(tableName, partitionSpec, spec);
+    } else if (HiveParser.KW_FAST_FORWARD == executeCommandType.getType()) {
+      String branchName;
+      String targetBranchName;
+      ASTNode child1 = (ASTNode) command.getChild(1);
+      if (command.getChildCount() == 2) {
+        branchName = "main";
+        targetBranchName = PlanUtils.stripQuotes(child1.getText());
+      } else {
+        ASTNode child2 = (ASTNode) command.getChild(2);
+        branchName = PlanUtils.stripQuotes(child1.getText());
+        targetBranchName = PlanUtils.stripQuotes(child2.getText());
+      }
+
+      AlterTableExecuteSpec spec =
+          new AlterTableExecuteSpec(FAST_FORWARD, new FastForwardSpec(branchName, targetBranchName));
+      desc = new AlterTableExecuteDesc(tableName, partitionSpec, spec);
     }
 
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableExecuteSpec.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableExecuteSpec.java
index bb6fc87..692f73b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableExecuteSpec.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/AlterTableExecuteSpec.java
@@ -36,7 +36,8 @@ public class AlterTableExecuteSpec<T> {
   public enum ExecuteOperationType {
     ROLLBACK,
     EXPIRE_SNAPSHOT,
-    SET_CURRENT_SNAPSHOT
+    SET_CURRENT_SNAPSHOT,
+    FAST_FORWARD
   }
 
   private final ExecuteOperationType operationType;
@@ -161,4 +162,35 @@ public String toString() {
       return MoreObjects.toStringHelper(this).add("snapshotId", snapshotId).toString();
     }
   }
+
+    /**
+   * Value object class, that stores the fast-forward operation specific parameters.
+   * <ul>
+   *   <li>source branch: the branch which needs to be fast-forwarded</li>
+     * <li>target branch: the branch to which the source branch needs to be fast-forwarded</li>
+   * </ul>
+   */
+  public static class FastForwardSpec {
+    private final String sourceBranch;
+    private final String targetBranch;
+
+    public FastForwardSpec(String sourceBranch, String targetBranch) {
+      this.sourceBranch = sourceBranch;
+      this.targetBranch = targetBranch;
+    }
+
+    public String getSourceBranch() {
+      return sourceBranch;
+    }
+
+    public String getTargetBranch() {
+      return targetBranch;
+    }
+
+    @Override
+    public String toString() {
+      return MoreObjects.toStringHelper(this).add("sourceBranch", sourceBranch)
+          .add("targetBranch", targetBranch).toString();
+    }
+  }
 }