Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo into window_function

Conflicts:
	tajo-core/src/main/java/org/apache/tajo/engine/parser/HiveQLAnalyzer.java
diff --git a/.travis.yml b/.travis.yml
index e509f80..e334a77 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -28,6 +28,12 @@
   - $HOME/.m2
   - $HOME/local
 
+notifications:
+  email:
+  - issues@tajo.apache.org
+  irc: "chat.freenode.net#tajo"
+
+
 install: ./dev-support/travis-install-dependencies.sh
 
-script: mvn clean install -Phcatalog-0.12.0 -q | grep -v 'INFO:' 
+script: mvn clean install -Phcatalog-0.12.0 | grep "^Tests run:\|Running\|Results :"
diff --git a/CHANGES b/CHANGES
index 6f0ad06..3dd67a9 100644
--- a/CHANGES
+++ b/CHANGES
@@ -1,8 +1,15 @@
-Tajo Change Log
+Tajo Change Log 
 
 Release 0.9.0 - unreleased
 
-  NEW FEATURES
+  NEW FEATURES 
+
+    TAJO-847: Supporting MariaDB-based Store, which is compatible with MySQL.
+    (Jinhang Choi via jihoon)
+
+    TAJO-860: Implements TRUNCATE table. (Hyoungjun Kim via hyunsik)
+
+    TAJO-849: Add Parquet storage to HCatalogStore. (jaehwa)
 
     TAJO-494: Extend TajoClient to run a query with a plan context serialized 
     as the JSON form. (jihoon)
@@ -13,8 +20,36 @@
 
     TAJO-791: Implements ADD_DAYS() function. (Hyoungjun Kim via hyunsik)
 
+    TAJO-762: Implements current date/time function (Hyoungjun Kim via hyunsik)
+
   IMPROVEMENT
 
+    TAJO-911: Refactoring Mysql/Maria Catalog Store. (DaeMyung Kang via hyunsik)
+
+    TAJO-853: Refactoring FilterPushDown for OUTER JOIN.
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-840: Improve query result print with counting empty table. (jaehwa)
+
+    TAJO-844: JDBC should be support getTime, getDate, and getTimestamp. 
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-843: implements COALESCE for BOOLEAN, DATE, TIME, TIMESTAMP. 
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-854: Supports INSERT INTO with UNION. (Hyoungjun Kim via jihoon) 
+
+    TAJO-793: CLI should be able to exit when single query is failed.
+    (Hyoungjun Kim via jinho)
+
+    TAJO-846: Clean up the task history in woker. (jinho)
+
+    TAJO-842: NULL handling in JDBC. (Hyoungjun Kim via jinho)
+
+    TAJO-699: Create a table using LIKE. (Prafulla T via hyunsik)
+
+    TAJO-825: Datetime type refactoring. (Hyoungjun Kim via jihoon)
+
     TAJO-811: add simple fifo scheduler support. (jinho)
 
     TAJO-801: Multiple distinct should be supported. (Hyoungjun Kim via hyunsik)
@@ -45,6 +80,84 @@
 
   BUG FIXES
 
+    TAJO-902: Unicode delimiter does not work correctly. (jinho)
+
+    TAJO-905: When to_date() parses some date without day, the result will be 
+    wrong. (hyunsik)
+
+    TAJO-898: Left outer join with union returns empty result. 
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-897: PartitionedTableRewriter is repeated several times with same 
+    table. (Hyoungjun Kim via hyunsik)
+
+    TAJO-891: Complex join conditions with UNION or inline should be supported.
+    (hyunsik)
+    
+    TAJO-899: Nested now() has different value for each task. (Hyoungjun Kim 
+    via hyunsik)
+
+    TAJO-894: Left outer join with partitioned large table and small table 
+    returns empty result. (Hyoungjun Kim via hyunsik)
+
+    TAJO-867: OUTER JOIN with empty result subquery produces a wrong result.
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-881: JOIN with union query occurs NPE. (Hyoungjun Kim via hyunsik)
+
+    TAJO-884: complex join conditions should be supported in ON clause.
+    (hyunsik)
+
+    TAJO-874: Sometimes InvalidOperationException occurs when aggregates
+    TableStat. (Hyoungjun Kim via hyunsik) 
+
+    TAJO-866: COUNT DISTINCT with other aggregation function throws
+    ClassCastException. (Hyoungjun Kim via hyunsik)
+
+    TAJO-882: CLI hangs when a error occurs in the GlobalPlanner.
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-850: OUTER JOIN does not properly handle a NULL.
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-879: Some data is missing in the case of BROADCAST JOIN and
+    multi-column partition. (Hyoungjun Kim via jaehwa)
+
+    TAJO-848: PreLogicalPlanVerifier::visitInsert need to find smaller
+    expressions than target columns for a partitioned table. (jaehwa)
+
+    TAJO-880: NULL in CASE clause occurs Exception. (Hyoungjun Kim via hyunsik)
+
+    TAJO-862: Restore failure of dumped relations. (jihoon)
+
+    TAJO-861: tajo-dump script are not executable. (jinho)
+
+    TAJO-839: If all tables participate in the BROADCAST JOIN, there is some 
+    missing data. (Hyoungjun Kim via jihoon)
+
+    TAJO-868: TestDateTimeFunctions unit test is occasionally failed. (hyunsik)
+
+    TAJO-863: Column order mismatched in the JOIN query with asterisk selection. 
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-851: Timestamp type test of TestSQLExpression::testCastFromTable fails 
+    in jenkins CI test. (Hyoungjun Kim via hyunsik)
+
+    TAJO-830: Some filter conditions with a SUBQUERY are removed by optimizer.
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-819: KillQuery does not work for running query on TajoWorker. (jaehwa)
+
+    TAJO-808: Fix pre-commit build failure. (jinho)
+ 
+    TAJO-827: SUM() overflow in the case of INT4. (Hyoungjun Kim via hyunsik)
+
+    TAJO-832: NPE occurs when Exception's message is null in Task. 
+    (Hyoungjun Kim via hyunsik)
+
+    TAJO-833: NPE occurs when using the column as a alias name in the multiple 
+    DISTINCT. (Hyoungjun Kim via hyunsik)
+
     TAJO-821: IllegalStateException occurs when a NettyClientBase object is created 
     within single thread. (hyoungjunkim via jinho)
 
@@ -61,7 +174,8 @@
     TAJO-800: CLI's meta command should be aware "TABLE_NAME" style. 
     (Hyoungjun Kim via hyunsik)
 
-    TAJO-795: PlannerUtil::joinJoinKeyForEachTable need to handle theta-join. (jaehwa)
+    TAJO-795: PlannerUtil::joinJoinKeyForEachTable need to handle theta-join.
+    (jaehwa)
 
     TAJO-792: Insert table error with database name. 
     (Hyoungjun Kim via hyunsik)
@@ -82,6 +196,15 @@
 
   TASKS
 
+    TAJO-890: Redirect stdout of maven test to /dev/null in Travis CI 
+    script. (hyunsik)
+
+    TAJO-887: Eliminate HiveQL support feature. (hyunsik)
+
+    TAJO-886: Add IRC page to community section in site. (hyunsik)
+
+    TAJO-859: Update site for new committer Alvin Henrick. (hyunsik)
+
     TAJO-817: tajo-core should not skip deploy. (hyunsik)
 
     TAJO-814: Set up Travis CI builds. (hyunsik)
@@ -93,6 +216,8 @@
 
     TAJO-605: Rename Options to KeyValueList. (jinho)
 
+    TAJO-834: Add Travis notification to issues@tajo.a.o and IRC. hyunsik)
+
   SUB TASKS
 
     TAJO-783: Remove yarn-related code from tajo-core. (hyunsik)
diff --git a/dev-support/test-patch.sh b/dev-support/test-patch.sh
index 1a5c247..1978e36 100644
--- a/dev-support/test-patch.sh
+++ b/dev-support/test-patch.sh
@@ -299,7 +299,7 @@
 }
 
 ###############################################################################
-buildTrunk () {
+preBuildTrunk () {
   echo ""
   echo ""
   echo "======================================================================"
@@ -324,6 +324,31 @@
 }
 
 ###############################################################################
+buildTrunk () {
+  echo ""
+  echo ""
+  echo "======================================================================"
+  echo "======================================================================"
+  echo " Build patch to verify the stability and javac, javadoc warnings"
+  echo "======================================================================"
+  echo "======================================================================"
+  echo ""
+  echo ""
+  echo "Compiling $(pwd)"
+  echo "$MVN clean test -DskipTests $TAJO_MVN_OPTS > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
+  $MVN clean test -DskipTests $TAJO_MVN_OPTS > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+  if [[ $? != 0 ]] ; then
+    echo "Trunk compilation is broken?"
+    cleanupAndExit 1
+  fi
+
+  echo ""
+  echo "Generating Javadocs"
+  echo "$MVN test javadoc:javadoc -DskipTests $TAJO_MVN_OPTS > $PATCH_DIR/trunkJavadocsWarnings.txt 2>&1"
+  $MVN test javadoc:javadoc -DskipTests $TAJO_MVN_OPTS > $PATCH_DIR/trunkJavadocWarnings.txt 2>&1
+}
+
+###############################################################################
 ### Check for @author tags in the patch
 checkAuthor () {
   echo ""
@@ -853,6 +878,7 @@
     exit 100
   fi
 fi
+preBuildTrunk
 downloadPatch
 verifyPatch
 PLEVEL=$?
@@ -860,6 +886,13 @@
   submitJiraComment 1
   cleanupAndExit 1
 fi
+applyPatch $PLEVEL
+APPLY_PATCH_RET=$?
+(( RESULT = RESULT + $APPLY_PATCH_RET ))
+if [[ $APPLY_PATCH_RET != 0 ]] ; then
+  submitJiraComment 1
+  cleanupAndExit 1
+fi
 buildTrunk
 checkAuthor
 
@@ -868,13 +901,6 @@
 fi
 checkTests
 (( RESULT = RESULT + $? ))
-applyPatch $PLEVEL
-APPLY_PATCH_RET=$?
-(( RESULT = RESULT + $APPLY_PATCH_RET ))
-if [[ $APPLY_PATCH_RET != 0 ]] ; then
-  submitJiraComment 1
-  cleanupAndExit 1
-fi
 checkJavacWarnings
 JAVAC_RET=$?
 #2 is returned if the code could not compile
diff --git a/tajo-algebra/src/main/java/org/apache/tajo/algebra/CreateTable.java b/tajo-algebra/src/main/java/org/apache/tajo/algebra/CreateTable.java
index f60b571..c74677d 100644
--- a/tajo-algebra/src/main/java/org/apache/tajo/algebra/CreateTable.java
+++ b/tajo-algebra/src/main/java/org/apache/tajo/algebra/CreateTable.java
@@ -47,6 +47,8 @@
   private PartitionMethodDescExpr partition;
   @Expose @SerializedName("IfNotExists")
   private boolean ifNotExists;
+  @Expose @SerializedName("LikeParentTable")
+  private String likeParentTable;
 
   public CreateTable(final String tableName, boolean ifNotExists) {
     super(OpType.CreateTable);
@@ -147,6 +149,15 @@
     return ifNotExists;
   }
 
+  public void setLikeParentTable(String parentTable)  {
+    this.likeParentTable = parentTable;
+  }
+
+  public String getLikeParentTableName()  {
+    return likeParentTable;
+  }
+
+
   @Override
   public int hashCode() {
     return Objects.hashCode(
diff --git a/tajo-algebra/src/main/java/org/apache/tajo/algebra/OpType.java b/tajo-algebra/src/main/java/org/apache/tajo/algebra/OpType.java
index c5a90a2..bc6e89b 100644
--- a/tajo-algebra/src/main/java/org/apache/tajo/algebra/OpType.java
+++ b/tajo-algebra/src/main/java/org/apache/tajo/algebra/OpType.java
@@ -50,6 +50,7 @@
   DropTable(DropTable.class),
   AlterTablespace(AlterTablespace.class),
   AlterTable(AlterTable.class),
+  TruncateTable(TruncateTable.class),
 
   // Insert or Update
   Insert(Insert.class),
diff --git a/tajo-algebra/src/main/java/org/apache/tajo/algebra/TruncateTable.java b/tajo-algebra/src/main/java/org/apache/tajo/algebra/TruncateTable.java
new file mode 100644
index 0000000..ae6efa7
--- /dev/null
+++ b/tajo-algebra/src/main/java/org/apache/tajo/algebra/TruncateTable.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.algebra;
+
+import com.google.gson.annotations.Expose;
+import com.google.gson.annotations.SerializedName;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class TruncateTable extends Expr {
+  @Expose
+  @SerializedName("TableNames")
+  private List<String> tableNames;
+
+  public TruncateTable(final List<String> tableNames) {
+    super(OpType.TruncateTable);
+    this.tableNames = tableNames;
+  }
+
+  @Override
+  public int hashCode() {
+    return tableNames.hashCode();
+  }
+
+  public List<String> getTableNames() {
+    return tableNames;
+  }
+
+  @Override
+  boolean equalsTo(Expr expr) {
+    TruncateTable another = (TruncateTable) expr;
+    return Arrays.equals(tableNames.toArray(new String[]{}), another.tableNames.toArray(new String[]{}));
+  }
+}
diff --git a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/CatalogUtil.java b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/CatalogUtil.java
index 0933fc7..ae00084 100644
--- a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/CatalogUtil.java
+++ b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/CatalogUtil.java
@@ -658,6 +658,14 @@
         if (Type.TEXT_ARRAY.getNumber() <= argTypeNumber && argTypeNumber <= exitingTypeNumber) {
           flag = true;
         }
+      } else if (givenType == Type.BOOLEAN && (definedType == Type.BOOLEAN || definedType == Type.BOOLEAN_ARRAY)) {
+        flag = true;
+      } else if (givenType == Type.DATE && (definedType == Type.DATE || definedType == Type.DATE_ARRAY)) {
+        flag = true;
+      } else if (givenType == Type.TIME && (definedType == Type.TIME || definedType == Type.TIME_ARRAY)) {
+        flag = true;
+      } else if (givenType == Type.TIMESTAMP && (definedType == Type.TIMESTAMP || definedType == Type.TIMESTAMP_ARRAY)) {
+        flag = true;
       }
     }
     return flag;
@@ -707,7 +715,7 @@
   static final String [] RESERVED_KEYWORDS = {
       "AS", "ALL", "AND", "ANY", "ASYMMETRIC", "ASC",
       "BOTH",
-      "CASE", "CAST", "CREATE", "CROSS",
+      "CASE", "CAST", "CREATE", "CROSS", "CURRENT_DATE", "CURRENT_TIME", "CURRENT_TIMESTAMP",
       "DESC", "DISTINCT",
       "END", "ELSE", "EXCEPT",
       "FALSE", "FULL", "FROM",
diff --git a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/Schema.java b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/Schema.java
index b2dde3d..35d2fe9 100644
--- a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/Schema.java
+++ b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/Schema.java
@@ -230,7 +230,7 @@
       return true;
     }
     if (fieldsByName.containsKey(name)) {
-      if (fieldsByName.size() > 1) {
+      if (fieldsByName.get(name).size() > 1) {
         throw new RuntimeException("Ambiguous Column name");
       }
       return true;
diff --git a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
index 01316bc..c481276 100644
--- a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
+++ b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/StatisticsUtil.java
@@ -65,15 +65,19 @@
             continue;
           }
 
-          agg.setNumDistVals(agg.getNumDistValues() + cs.getNumDistValues());
-          agg.setNumNulls(agg.getNumNulls() + cs.getNumNulls());
-          if (!cs.minIsNotSet() && (agg.minIsNotSet() ||
-              agg.getMinValue().compareTo(cs.getMinValue()) > 0)) {
-            agg.setMinValue(cs.getMinValue());
-          }
-          if (!cs.maxIsNotSet() && (agg.maxIsNotSet() ||
-              agg.getMaxValue().compareTo(cs.getMaxValue()) < 0)) {
-            agg.setMaxValue(stats.getColumnStats().get(i).getMaxValue());
+          try {
+            agg.setNumDistVals(agg.getNumDistValues() + cs.getNumDistValues());
+            agg.setNumNulls(agg.getNumNulls() + cs.getNumNulls());
+            if (!cs.minIsNotSet() && (agg.minIsNotSet() ||
+                agg.getMinValue().compareTo(cs.getMinValue()) > 0)) {
+              agg.setMinValue(cs.getMinValue());
+            }
+            if (!cs.maxIsNotSet() && (agg.maxIsNotSet() ||
+                agg.getMaxValue().compareTo(cs.getMaxValue()) < 0)) {
+              agg.setMaxValue(stats.getColumnStats().get(i).getMaxValue());
+            }
+          } catch (Exception e) {
+            LOG.warn(e.getMessage(), e);
           }
         }
       }
@@ -117,15 +121,19 @@
             LOG.warn("ERROR: One of column stats is NULL (expected column: " + css[i].getColumn() + ")");
             continue;
           }
-          css[i].setNumDistVals(css[i].getNumDistValues() + cs.getNumDistValues());
-          css[i].setNumNulls(css[i].getNumNulls() + cs.getNumNulls());
-          if (!cs.minIsNotSet() && (css[i].minIsNotSet() ||
-              css[i].getMinValue().compareTo(cs.getMinValue()) > 0)) {
-            css[i].setMinValue(cs.getMinValue());
-          }
-          if (!cs.maxIsNotSet() && (css[i].maxIsNotSet() ||
-              css[i].getMaxValue().compareTo(cs.getMaxValue()) < 0)) {
-            css[i].setMaxValue(ts.getColumnStats().get(i).getMaxValue());
+          try {
+            css[i].setNumDistVals(css[i].getNumDistValues() + cs.getNumDistValues());
+            css[i].setNumNulls(css[i].getNumNulls() + cs.getNumNulls());
+            if (!cs.minIsNotSet() && (css[i].minIsNotSet() ||
+                css[i].getMinValue().compareTo(cs.getMinValue()) > 0)) {
+              css[i].setMinValue(cs.getMinValue());
+            }
+            if (!cs.maxIsNotSet() && (css[i].maxIsNotSet() ||
+                css[i].getMaxValue().compareTo(cs.getMaxValue()) < 0)) {
+              css[i].setMaxValue(ts.getColumnStats().get(i).getMaxValue());
+            }
+          } catch (Exception e) {
+            LOG.warn(e.getMessage(), e);
           }
         }
       }
diff --git a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/TableStats.java b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/TableStats.java
index dda8cd3..c04545c 100644
--- a/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/TableStats.java
+++ b/tajo-catalog/tajo-catalog-common/src/main/java/org/apache/tajo/catalog/statistics/TableStats.java
@@ -47,6 +47,10 @@
   @Expose private List<ColumnStats> columnStatses = null; // repeated
 
   public TableStats() {
+    reset();
+  }
+
+  public void reset() {
     numRows = 0l;
     numBytes = 0l;
     numBlocks = 0;
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
index a3cb99b..2c939d4 100644
--- a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/pom.xml
@@ -138,6 +138,8 @@
       </activation>
       <properties>
         <hive.version>0.12.0</hive.version>
+        <parquet.version>1.4.2</parquet.version>
+        <parquet.format.version>2.0.0</parquet.format.version>
       </properties>
       <dependencies>
         <dependency>
@@ -304,6 +306,11 @@
             </exclusion>
           </exclusions>
         </dependency>
+        <dependency>
+          <groupId>com.twitter</groupId>
+          <artifactId>parquet-hive-bundle</artifactId>
+          <version>${parquet.version}</version>
+        </dependency>
       </dependencies>
     </profile>
     <profile>
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
index 7924af1..3008ed9 100644
--- a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogStore.java
@@ -537,7 +537,14 @@
           table.getParameters().remove(StorageConstants.SEQUENCEFILE_NULL);
         }
       } else {
-        throw new CatalogException(new NotImplementedException(tableDesc.getMeta().getStoreType().name()));
+        if (tableDesc.getMeta().getStoreType().equals(CatalogProtos.StoreType.PARQUET)) {
+          sd.setInputFormat(parquet.hive.DeprecatedParquetInputFormat.class.getName());
+          sd.setOutputFormat(parquet.hive.DeprecatedParquetOutputFormat.class.getName());
+          sd.getSerdeInfo().setSerializationLib(parquet.hive.serde.ParquetHiveSerDe.class.getName());
+        } else {
+          throw new CatalogException(new NotImplementedException(tableDesc.getMeta().getStoreType
+              ().name()));
+        }
       }
 
       sd.setSortCols(new ArrayList<Order>());
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
index 98aa7c5..9e60768 100644
--- a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/main/java/org/apache/tajo/catalog/store/HCatalogUtil.java
@@ -30,6 +30,7 @@
 import org.apache.tajo.catalog.exception.CatalogException;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.common.TajoDataTypes;
+import parquet.hadoop.mapred.DeprecatedParquetOutputFormat;
 
 public class HCatalogUtil {
   protected final Log LOG = LogFactory.getLog(getClass());
@@ -128,7 +129,9 @@
     } else if(outputFormatClass.equals(HiveSequenceFileOutputFormat.class.getSimpleName())) {
       return CatalogProtos.StoreType.SEQUENCEFILE.name();
     } else if(outputFormatClass.equals(RCFileOutputFormat.class.getSimpleName())) {
-        return CatalogProtos.StoreType.RCFILE.name();
+      return CatalogProtos.StoreType.RCFILE.name();
+    } else if(outputFormatClass.equals(DeprecatedParquetOutputFormat.class.getSimpleName())) {
+      return CatalogProtos.StoreType.PARQUET.name();
     } else {
       throw new CatalogException("Not supported file output format. - file output format:" + fileFormat);
     }
diff --git a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/test/java/org/apache/tajo/catalog/store/TestHCatalogStore.java b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/test/java/org/apache/tajo/catalog/store/TestHCatalogStore.java
index 729184a..a507b08 100644
--- a/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/test/java/org/apache/tajo/catalog/store/TestHCatalogStore.java
+++ b/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/src/test/java/org/apache/tajo/catalog/store/TestHCatalogStore.java
@@ -369,4 +369,34 @@
     store.dropTable(DB_NAME, REGION);
   }
 
+
+  @Test
+  public void testTableUsingParquet() throws Exception {
+    TableMeta meta = new TableMeta(CatalogProtos.StoreType.PARQUET, new KeyValueSet());
+
+    org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
+    schema.addColumn("c_custkey", TajoDataTypes.Type.INT4);
+    schema.addColumn("c_name", TajoDataTypes.Type.TEXT);
+    schema.addColumn("c_address", TajoDataTypes.Type.TEXT);
+    schema.addColumn("c_nationkey", TajoDataTypes.Type.INT4);
+    schema.addColumn("c_phone", TajoDataTypes.Type.TEXT);
+    schema.addColumn("c_acctbal", TajoDataTypes.Type.FLOAT8);
+    schema.addColumn("c_mktsegment", TajoDataTypes.Type.TEXT);
+    schema.addColumn("c_comment", TajoDataTypes.Type.TEXT);
+
+    TableDesc table = new TableDesc(CatalogUtil.buildFQName(DB_NAME, CUSTOMER), schema, meta,
+        new Path(warehousePath, new Path(DB_NAME, CUSTOMER)));
+    store.createTable(table.getProto());
+    assertTrue(store.existTable(DB_NAME, CUSTOMER));
+
+    TableDesc table1 = new TableDesc(store.getTable(DB_NAME, CUSTOMER));
+    assertEquals(table.getName(), table1.getName());
+    assertEquals(table.getPath(), table1.getPath());
+    assertEquals(table.getSchema().size(), table1.getSchema().size());
+    for (int i = 0; i < table.getSchema().size(); i++) {
+      assertEquals(table.getSchema().getColumn(i).getSimpleName(), table1.getSchema().getColumn(i).getSimpleName());
+    }
+
+    store.dropTable(DB_NAME, CUSTOMER);
+  }
 }
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/CatalogServer.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/CatalogServer.java
index c0f6d36..4e391aa 100644
--- a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/CatalogServer.java
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/CatalogServer.java
@@ -864,10 +864,12 @@
        * to implement compareTo so decided to take the shortcut.
        *
        * */
-      for (FunctionDescProto existing : functions.get(signature)) {
-        if (existing.getParameterTypesList() != null &&
+      if (functions.containsKey(signature)) {
+        for (FunctionDescProto existing : functions.get(signature)) {
+          if (existing.getParameterTypesList() != null &&
               CatalogUtil.isMatchedFunction(existing.getParameterTypesList(), params)) {
-          return existing;
+            return existing;
+          }
         }
       }
       return null;
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractDBStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractDBStore.java
index 0d22486..bcf6774 100644
--- a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractDBStore.java
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractDBStore.java
@@ -152,9 +152,20 @@
     return catalogUri;
   }
 
+  protected boolean isConnValid(int timeout) throws CatalogException {
+    boolean isValid = false;
+
+    try {
+      isValid = conn.isValid(timeout);
+    } catch (SQLException e) {
+      e.printStackTrace();
+    }
+    return isValid;
+  }
+
   public Connection getConnection() {
     try {
-      boolean isValid = conn.isValid(100);
+      boolean isValid = isConnValid(100);
       if (!isValid) {
         CatalogUtil.closeQuietly(conn);
         conn = createConnection(conf);
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractMySQLMariaDBStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractMySQLMariaDBStore.java
new file mode 100644
index 0000000..6d0876f
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/AbstractMySQLMariaDBStore.java
@@ -0,0 +1,297 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ */
+package org.apache.tajo.catalog.store;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.tajo.catalog.CatalogUtil;
+import org.apache.tajo.catalog.exception.CatalogException;
+import org.apache.tajo.exception.InternalException;
+
+import java.sql.*;
+import java.util.HashMap;
+import java.util.Map;
+
+public abstract class AbstractMySQLMariaDBStore extends AbstractDBStore  {
+  protected String getCatalogDriverName(){
+    throw new CatalogException("you can't use this directly.");
+  }
+
+  public AbstractMySQLMariaDBStore(final Configuration conf) throws InternalException {
+    super(conf);
+  }
+
+  @Override
+  public int getDriverVersion() {
+    throw new CatalogException("you can't use this directly.");
+  }
+
+  protected Connection createConnection(Configuration conf) throws SQLException {
+    Connection con = DriverManager.getConnection(getCatalogUri(), this.connectionId,
+        this.connectionPassword);
+    //TODO con.setAutoCommit(false);
+    return con;
+  }
+
+  @Override
+  protected boolean isConnValid(int timeout) throws CatalogException {
+    boolean isValid = false;
+
+    try {
+      isValid = super.isConnValid(timeout);
+    } catch (NullPointerException e) {
+      LOG.info("Conn abortion when checking isValid; retrieve false to create another Conn.");
+    }
+    return isValid;
+  }
+
+  @Override
+  public String readSchemaFile(String filename) throws CatalogException {
+      return super.readSchemaFile(filename);
+  }
+
+  // TODO - DDL and index statements should be renamed
+  @Override
+  protected void createBaseTable() throws CatalogException {
+    Statement stmt = null;
+    Connection conn = null;
+
+    try {
+      conn = getConnection();
+      stmt = conn.createStatement();
+
+
+      // META
+      if (!baseTableMaps.get(TB_META)) {
+        String sql = super.readSchemaFile("common/meta.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql.toString());
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_META + " is created.");
+        baseTableMaps.put(TB_META, true);
+      }
+
+      // TABLE SPACES
+      if (!baseTableMaps.get(TB_SPACES)) {
+        String sql = readSchemaFile("tablespaces.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql);
+        }
+
+        stmt.executeUpdate(sql);
+
+        LOG.info("Table '" + TB_SPACES + "' is created.");
+        baseTableMaps.put(TB_SPACES, true);
+      }
+
+      // DATABASES
+      if (!baseTableMaps.get(TB_DATABASES)) {
+        String sql = readSchemaFile("databases.sql");
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql);
+        }
+        LOG.info("Table '" + TB_DATABASES + "' is created.");
+        baseTableMaps.put(TB_DATABASES, true);
+        stmt.executeUpdate(sql);
+      }
+
+      // TABLES
+      if (!baseTableMaps.get(TB_TABLES)) {
+        String sql = readSchemaFile("tables.sql");
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql);
+        }
+        stmt.executeUpdate(sql);
+        LOG.info("Table '" + TB_TABLES + "' is created.");
+        baseTableMaps.put(TB_TABLES, true);
+      }
+
+      // COLUMNS
+      if (!baseTableMaps.get(TB_COLUMNS)) {
+        String sql = readSchemaFile("columns.sql");
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql);
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_COLUMNS + " is created.");
+        baseTableMaps.put(TB_COLUMNS, true);
+      }
+
+      // OPTIONS
+      if (!baseTableMaps.get(TB_OPTIONS)) {
+        String sql = readSchemaFile("table_properties.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql.toString());
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_OPTIONS + " is created.");
+        baseTableMaps.put(TB_OPTIONS, true);
+      }
+
+      // INDEXES
+      if (!baseTableMaps.get(TB_INDEXES)) {
+        String sql = readSchemaFile("indexes.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql.toString());
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_INDEXES + "' is created.");
+        baseTableMaps.put(TB_INDEXES, true);
+      }
+
+      if (!baseTableMaps.get(TB_STATISTICS)) {
+        String sql = readSchemaFile("stats.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql.toString());
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_STATISTICS + "' is created.");
+        baseTableMaps.put(TB_STATISTICS, true);
+      }
+
+      // PARTITION_METHODS
+      if (!baseTableMaps.get(TB_PARTITION_METHODS)) {
+        String sql = readSchemaFile("partition_methods.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql);
+        }
+
+        stmt.executeUpdate(sql);
+        LOG.info("Table '" + TB_PARTITION_METHODS + "' is created.");
+        baseTableMaps.put(TB_PARTITION_METHODS, true);
+      }
+
+      // PARTITIONS
+      if (!baseTableMaps.get(TB_PARTTIONS)) {
+        String sql = readSchemaFile("partitions.sql");
+
+        if (LOG.isDebugEnabled()) {
+          LOG.debug(sql.toString());
+        }
+
+        stmt.executeUpdate(sql.toString());
+        LOG.info("Table '" + TB_PARTTIONS + "' is created.");
+        baseTableMaps.put(TB_PARTTIONS, true);
+      }
+
+      insertSchemaVersion();
+
+    } catch (SQLException se) {
+      throw new CatalogException("failed to create base tables for MySQL catalog store", se);
+    } finally {
+      CatalogUtil.closeQuietly(stmt);
+    }
+  }
+
+  @Override
+  protected void dropBaseTable() throws CatalogException {
+    Connection conn = null;
+    Statement stmt = null;
+    Map<String, Boolean> droppedTable = new HashMap<String, Boolean>();
+
+    try {
+      conn = getConnection();
+      stmt = conn.createStatement();
+      StringBuilder sql = new StringBuilder();
+
+      for(Map.Entry<String, Boolean> entry : baseTableMaps.entrySet()) {
+        if(entry.getValue() && !entry.getKey().equals(TB_TABLES)) {
+          sql.delete(0, sql.length());
+          sql.append("DROP TABLE ").append(entry.getKey());
+          stmt.addBatch(sql.toString());
+          droppedTable.put(entry.getKey(), true);
+        }
+      }
+      if(baseTableMaps.get(TB_TABLES)) {
+        sql.delete(0, sql.length());
+        sql.append("DROP TABLE ").append(TB_TABLES);
+        stmt.addBatch(sql.toString());
+        droppedTable.put(TB_TABLES, true);
+      }
+      stmt.executeBatch();
+
+      for(String tableName : droppedTable.keySet()) {
+        LOG.info("Table '" + tableName + "' is dropped");
+      }
+    } catch (SQLException se) {
+      throw new CatalogException(se);
+    } finally {
+      CatalogUtil.closeQuietly(stmt);
+    }
+  }
+
+  @Override
+  protected boolean isInitialized() throws CatalogException {
+    Connection conn;
+    ResultSet res = null;
+
+    try {
+      conn = getConnection();
+      res = conn.getMetaData().getTables(null, null, null,
+          new String[]{"TABLE"});
+
+      baseTableMaps.put(TB_META, false);
+      baseTableMaps.put(TB_SPACES, false);
+      baseTableMaps.put(TB_DATABASES, false);
+      baseTableMaps.put(TB_TABLES, false);
+      baseTableMaps.put(TB_COLUMNS, false);
+      baseTableMaps.put(TB_OPTIONS, false);
+      baseTableMaps.put(TB_STATISTICS, false);
+      baseTableMaps.put(TB_INDEXES, false);
+      baseTableMaps.put(TB_PARTITION_METHODS, false);
+      baseTableMaps.put(TB_PARTTIONS, false);
+
+      if (res.wasNull())
+        return false;
+
+      while (res.next()) {
+        // if my.cnf has lower_case_table_names = 1,
+        // TABLE_NAME returns lower case even it created by upper case.
+        baseTableMaps.put(res.getString("TABLE_NAME").toUpperCase(), true);
+      }
+
+      for(Map.Entry<String, Boolean> entry : baseTableMaps.entrySet()) {
+        if (!entry.getValue()) {
+          return false;
+        }
+      }
+
+    } catch(SQLException se) {
+      throw new CatalogException(se);
+    } finally {
+      CatalogUtil.closeQuietly(res);
+    }
+
+    return  true;
+  }
+}
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MariaDBStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MariaDBStore.java
new file mode 100644
index 0000000..8cb3858
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MariaDBStore.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ *
+ */
+package org.apache.tajo.catalog.store;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.tajo.catalog.exception.CatalogException;
+import org.apache.tajo.exception.InternalException;
+
+public class MariaDBStore extends AbstractMySQLMariaDBStore {
+  /** 2014-06-09: First versioning */
+  private static final int MARIADB_CATALOG_STORE_VERSION = 2;
+
+  private static final String CATALOG_DRIVER = "org.mariadb.jdbc.Driver";
+
+  @Override
+  protected String getCatalogDriverName(){
+    return CATALOG_DRIVER;
+  }
+
+  public MariaDBStore(final Configuration conf) throws InternalException {
+    super(conf);
+  }
+
+  @Override
+  public int getDriverVersion() {
+    return MARIADB_CATALOG_STORE_VERSION;
+  }
+
+  @Override
+  public String readSchemaFile(String filename) throws CatalogException {
+    return super.readSchemaFile("mariadb/" + filename);
+  }
+}
diff --git a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MySQLStore.java b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MySQLStore.java
index 849afc8..cedc0fe 100644
--- a/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MySQLStore.java
+++ b/tajo-catalog/tajo-catalog-server/src/main/java/org/apache/tajo/catalog/store/MySQLStore.java
@@ -22,21 +22,17 @@
 package org.apache.tajo.catalog.store;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.catalog.exception.CatalogException;
 import org.apache.tajo.exception.InternalException;
 
-import java.sql.*;
-import java.util.HashMap;
-import java.util.Map;
-
-public class MySQLStore extends AbstractDBStore  {
+public class MySQLStore extends AbstractMySQLMariaDBStore  {
   /** 2014-03-20: First versioning */
   private static final int MYSQL_CATALOG_STORE_VERSION_2 = 2;
   /** Before 2013-03-20 */
   private static final int MYSQL_CATALOG_STORE_VERSION_1 = 1;
 
   private static final String CATALOG_DRIVER = "com.mysql.jdbc.Driver";
+  @Override
   protected String getCatalogDriverName(){
     return CATALOG_DRIVER;
   }
@@ -50,242 +46,8 @@
     return MYSQL_CATALOG_STORE_VERSION_2;
   }
 
-  protected Connection createConnection(Configuration conf) throws SQLException {
-    Connection con = DriverManager.getConnection(getCatalogUri(), this.connectionId,
-        this.connectionPassword);
-    //TODO con.setAutoCommit(false);
-    return con;
-  }
-
   @Override
   public String readSchemaFile(String filename) throws CatalogException {
     return super.readSchemaFile("mysql/" + filename);
   }
-
-  // TODO - DDL and index statements should be renamed
-  @Override
-  protected void createBaseTable() throws CatalogException {
-    Statement stmt = null;
-    Connection conn = null;
-
-    try {
-      conn = getConnection();
-      stmt = conn.createStatement();
-
-
-      // META
-      if (!baseTableMaps.get(TB_META)) {
-        String sql = super.readSchemaFile("common/meta.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql.toString());
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_META + " is created.");
-        baseTableMaps.put(TB_META, true);
-      }
-
-      // TABLE SPACES
-      if (!baseTableMaps.get(TB_SPACES)) {
-        String sql = readSchemaFile("tablespaces.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql);
-        }
-
-        stmt.executeUpdate(sql);
-
-        LOG.info("Table '" + TB_SPACES + "' is created.");
-        baseTableMaps.put(TB_SPACES, true);
-      }
-
-      // DATABASES
-      if (!baseTableMaps.get(TB_DATABASES)) {
-        String sql = readSchemaFile("databases.sql");
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql);
-        }
-        LOG.info("Table '" + TB_DATABASES + "' is created.");
-        baseTableMaps.put(TB_DATABASES, true);
-        stmt.executeUpdate(sql);
-      }
-
-      // TABLES
-      if (!baseTableMaps.get(TB_TABLES)) {
-        String sql = readSchemaFile("tables.sql");
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql);
-        }
-        stmt.executeUpdate(sql);
-        LOG.info("Table '" + TB_TABLES + "' is created.");
-        baseTableMaps.put(TB_TABLES, true);
-      }
-
-      // COLUMNS
-      if (!baseTableMaps.get(TB_COLUMNS)) {
-        String sql = readSchemaFile("columns.sql");
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql);
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_COLUMNS + " is created.");
-        baseTableMaps.put(TB_COLUMNS, true);
-      }
-
-      // OPTIONS
-      if (!baseTableMaps.get(TB_OPTIONS)) {
-        String sql = readSchemaFile("table_properties.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql.toString());
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_OPTIONS + " is created.");
-        baseTableMaps.put(TB_OPTIONS, true);
-      }
-
-      // INDEXES
-      if (!baseTableMaps.get(TB_INDEXES)) {
-        String sql = readSchemaFile("indexes.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql.toString());
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_INDEXES + "' is created.");
-        baseTableMaps.put(TB_INDEXES, true);
-      }
-
-      if (!baseTableMaps.get(TB_STATISTICS)) {
-        String sql = readSchemaFile("stats.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql.toString());
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_STATISTICS + "' is created.");
-        baseTableMaps.put(TB_STATISTICS, true);
-      }
-
-      // PARTITION_METHODS
-      if (!baseTableMaps.get(TB_PARTITION_METHODS)) {
-        String sql = readSchemaFile("partition_methods.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql);
-        }
-
-        stmt.executeUpdate(sql);
-        LOG.info("Table '" + TB_PARTITION_METHODS + "' is created.");
-        baseTableMaps.put(TB_PARTITION_METHODS, true);
-      }
-
-      // PARTITIONS
-      if (!baseTableMaps.get(TB_PARTTIONS)) {
-        String sql = readSchemaFile("partitions.sql");
-
-        if (LOG.isDebugEnabled()) {
-          LOG.debug(sql.toString());
-        }
-
-        stmt.executeUpdate(sql.toString());
-        LOG.info("Table '" + TB_PARTTIONS + "' is created.");
-        baseTableMaps.put(TB_PARTTIONS, true);
-      }
-
-      insertSchemaVersion();
-
-    } catch (SQLException se) {
-      throw new CatalogException("failed to create base tables for MySQL catalog store", se);
-    } finally {
-      CatalogUtil.closeQuietly(stmt);
-    }
-  }
-
-  @Override
-  protected void dropBaseTable() throws CatalogException {
-    Connection conn = null;
-    Statement stmt = null;
-    Map<String, Boolean> droppedTable = new HashMap<String, Boolean>();
-
-    try {
-      conn = getConnection();
-      stmt = conn.createStatement();
-      StringBuilder sql = new StringBuilder();
-
-      for(Map.Entry<String, Boolean> entry : baseTableMaps.entrySet()) {
-        if(entry.getValue() && !entry.getKey().equals(TB_TABLES)) {
-          sql.delete(0, sql.length());
-          sql.append("DROP TABLE ").append(entry.getKey());
-          stmt.addBatch(sql.toString());
-          droppedTable.put(entry.getKey(), true);
-        }
-      }
-      if(baseTableMaps.get(TB_TABLES)) {
-        sql.delete(0, sql.length());
-        sql.append("DROP TABLE ").append(TB_TABLES);
-        stmt.addBatch(sql.toString());
-        droppedTable.put(TB_TABLES, true);
-      }
-      stmt.executeBatch();
-
-      for(String tableName : droppedTable.keySet()) {
-        LOG.info("Table '" + tableName + "' is dropped");
-      }
-    } catch (SQLException se) {
-      throw new CatalogException(se);
-    } finally {
-      CatalogUtil.closeQuietly(stmt);
-    }
-  }
-
-  @Override
-  protected boolean isInitialized() throws CatalogException {
-    Connection conn;
-    ResultSet res = null;
-
-    try {
-      conn = getConnection();
-      res = conn.getMetaData().getTables(null, null, null,
-          new String[]{"TABLE"});
-
-      baseTableMaps.put(TB_META, false);
-      baseTableMaps.put(TB_SPACES, false);
-      baseTableMaps.put(TB_DATABASES, false);
-      baseTableMaps.put(TB_TABLES, false);
-      baseTableMaps.put(TB_COLUMNS, false);
-      baseTableMaps.put(TB_OPTIONS, false);
-      baseTableMaps.put(TB_STATISTICS, false);
-      baseTableMaps.put(TB_INDEXES, false);
-      baseTableMaps.put(TB_PARTITION_METHODS, false);
-      baseTableMaps.put(TB_PARTTIONS, false);
-
-      if (res.wasNull())
-        return false;
-
-      while (res.next()) {
-        // if my.cnf has lower_case_table_names = 1,
-        // TABLE_NAME returns lower case even it created by upper case.
-        baseTableMaps.put(res.getString("TABLE_NAME").toUpperCase(), true);
-      }
-
-      for(Map.Entry<String, Boolean> entry : baseTableMaps.entrySet()) {
-        if (!entry.getValue()) {
-          return false;
-        }
-      }
-
-    } catch(SQLException se) {
-      throw new CatalogException(se);
-    } finally {
-      CatalogUtil.closeQuietly(res);
-    }
-
-    return  true;
-  }
 }
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/columns.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/columns.sql
new file mode 100644
index 0000000..51e2a1b
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/columns.sql
@@ -0,0 +1,9 @@
+CREATE TABLE COLUMNS (
+  TID INT NOT NULL,
+  COLUMN_NAME VARCHAR(255) NOT NULL,
+  ORDINAL_POSITION INT NOT NULL,
+  DATA_TYPE CHAR(16),
+  TYPE_LENGTH INTEGER,
+  PRIMARY KEY (TID, COLUMN_NAME),
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/databases.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/databases.sql
new file mode 100644
index 0000000..e07e916
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/databases.sql
@@ -0,0 +1,7 @@
+CREATE TABLE DATABASES_ (
+  DB_ID INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
+  DB_NAME VARCHAR(128) NOT NULL UNIQUE,
+  SPACE_ID INT NOT NULL,
+  FOREIGN KEY (SPACE_ID) REFERENCES TABLESPACES (SPACE_ID),
+  UNIQUE INDEX IDX_NAME (DB_NAME)
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/indexes.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/indexes.sql
new file mode 100644
index 0000000..62feb36
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/indexes.sql
@@ -0,0 +1,16 @@
+CREATE TABLE INDEXES (
+  DB_ID INT NOT NULL,
+  TID INT NOT NULL,
+  INDEX_NAME VARCHAR(128) NOT NULL,
+  COLUMN_NAME VARCHAR(128) NOT NULL,
+  DATA_TYPE VARCHAR(128) NOT NULL,
+  INDEX_TYPE CHAR(32) NOT NULL,
+  IS_UNIQUE BOOLEAN NOT NULL,
+  IS_CLUSTERED BOOLEAN NOT NULL,
+  IS_ASCENDING BOOLEAN NOT NULL,
+  PRIMARY KEY (DB_ID, INDEX_NAME),
+  FOREIGN KEY (DB_ID) REFERENCES DATABASES_ (DB_ID) ON DELETE CASCADE,
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE,
+  UNIQUE INDEX IDX_DB_ID_NAME (DB_ID, INDEX_NAME),
+  INDEX IDX_TID_COLUMN_NAME (TID, COLUMN_NAME)
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partition_methods.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partition_methods.sql
new file mode 100644
index 0000000..060c4c8
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partition_methods.sql
@@ -0,0 +1,7 @@
+CREATE TABLE PARTITION_METHODS (
+  TID INT NOT NULL PRIMARY KEY,
+  PARTITION_TYPE VARCHAR(10) NOT NULL,
+  EXPRESSION VARCHAR(1024) NOT NULL,
+  EXPRESSION_SCHEMA VARCHAR(1024) NOT NULL,
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partitions.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partitions.sql
new file mode 100644
index 0000000..428f5a4
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/partitions.sql
@@ -0,0 +1,12 @@
+CREATE TABLE PARTITIONS (
+  PID INT NOT NULL PRIMARY KEY,
+  TID INT NOT NULL,
+  PARTITION_NAME VARCHAR(128),
+  ORDINAL_POSITION INT NOT NULL,
+  PARTITION_VALUE VARCHAR(1024),
+  PATH VARCHAR(4096),
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE,
+  CONSTRAINT C_PARTITION_UNIQUE UNIQUE (TID, PARTITION_NAME),
+  INDEX IDX_TID (TID),
+  UNIQUE INDEX IDX_TID_NAME (TID, PARTITION_NAME)
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/stats.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/stats.sql
new file mode 100644
index 0000000..bba8ee7
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/stats.sql
@@ -0,0 +1,6 @@
+CREATE TABLE STATS (
+  TID INT NOT NULL PRIMARY KEY,
+  NUM_ROWS BIGINT,
+  NUM_BYTES BIGINT,
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/table_properties.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/table_properties.sql
new file mode 100644
index 0000000..78e281e
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/table_properties.sql
@@ -0,0 +1,7 @@
+CREATE TABLE OPTIONS (
+  TID INT NOT NULL,
+  KEY_ VARCHAR(255) NOT NULL,
+  VALUE_ VARCHAR(255) NOT NULL,
+  PRIMARY KEY (TID, KEY_),
+  FOREIGN KEY (TID) REFERENCES TABLES (TID) ON DELETE CASCADE
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tables.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tables.sql
new file mode 100644
index 0000000..e7297f7
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tables.sql
@@ -0,0 +1,11 @@
+CREATE TABLE TABLES (
+  TID INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
+  DB_ID INT NOT NULL,
+  TABLE_NAME VARCHAR(128) NOT NULL,
+  TABLE_TYPE VARCHAR(128) NOT NULL,
+  PATH VARCHAR(4096),
+  STORE_TYPE CHAR(16),
+  FOREIGN KEY (DB_ID) REFERENCES DATABASES_ (DB_ID),
+  INDEX IDX_DB_ID (DB_ID),
+  UNIQUE INDEX IDX_TABLE_ID (DB_ID, TABLE_NAME)
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tablespaces.sql b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tablespaces.sql
new file mode 100644
index 0000000..f2e2299
--- /dev/null
+++ b/tajo-catalog/tajo-catalog-server/src/main/resources/schemas/mariadb/tablespaces.sql
@@ -0,0 +1,7 @@
+CREATE TABLE TABLESPACES (
+  SPACE_ID INT NOT NULL AUTO_INCREMENT PRIMARY KEY,
+  SPACE_NAME VARCHAR(128) NOT NULL UNIQUE,
+  SPACE_HANDLER VARCHAR (1024) DEFAULT 'HDFS',
+  SPACE_URI VARCHAR (4096) NOT NULL,
+  UNIQUE INDEX IDX_NAME (SPACE_NAME)
+)
\ No newline at end of file
diff --git a/tajo-catalog/tajo-catalog-server/src/test/java/org/apache/tajo/catalog/TestCatalog.java b/tajo-catalog/tajo-catalog-server/src/test/java/org/apache/tajo/catalog/TestCatalog.java
index 453a54d..4039ff2 100644
--- a/tajo-catalog/tajo-catalog-server/src/test/java/org/apache/tajo/catalog/TestCatalog.java
+++ b/tajo-catalog/tajo-catalog-server/src/test/java/org/apache/tajo/catalog/TestCatalog.java
@@ -31,6 +31,7 @@
 import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
 import org.apache.tajo.catalog.store.DerbyStore;
 import org.apache.tajo.catalog.store.MySQLStore;
+import org.apache.tajo.catalog.store.MariaDBStore;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.conf.TajoConf;
@@ -85,8 +86,8 @@
     conf.set(CATALOG_URI, catalogURI);
     conf.setVar(TajoConf.ConfVars.CATALOG_ADDRESS, "127.0.0.1:0");
 
-    // MySQLStore requires password
-    if (driverClass.equals(MySQLStore.class.getCanonicalName())) {
+    // MySQLStore/MariaDB requires password
+    if (driverClass.equals(MySQLStore.class.getCanonicalName()) || driverClass.equals(MariaDBStore.class.getCanonicalName())) {
       if (connectionId == null) {
         throw new CatalogException(String.format("%s driver requires %s", driverClass, CatalogConstants.CONNECTION_ID));
       }
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/DefaultTajoCliOutputFormatter.java b/tajo-client/src/main/java/org/apache/tajo/cli/DefaultTajoCliOutputFormatter.java
index dd1f911..62736b5 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/DefaultTajoCliOutputFormatter.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/DefaultTajoCliOutputFormatter.java
@@ -24,6 +24,7 @@
 import org.apache.tajo.catalog.statistics.TableStats;
 import org.apache.tajo.client.QueryStatus;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.util.FileUtil;
 
 import java.io.InputStream;
@@ -36,6 +37,7 @@
   private int printPauseRecords;
   private boolean printPause;
   private boolean printErrorTrace;
+  private String nullChar;
 
   @Override
   public void init(TajoConf tajoConf) {
@@ -44,6 +46,7 @@
     this.printPause = tajoConf.getBoolVar(TajoConf.ConfVars.CLI_PRINT_PAUSE);
     this.printPauseRecords = tajoConf.getIntVar(TajoConf.ConfVars.CLI_PRINT_PAUSE_NUM_RECORDS);
     this.printErrorTrace = tajoConf.getBoolVar(TajoConf.ConfVars.CLI_PRINT_ERROR_TRACE);
+    this.nullChar = tajoConf.getVar(ConfVars.CLI_NULL_CHAR);
   }
 
   @Override
@@ -90,9 +93,9 @@
     while (res.next()) {
       for (int i = 1; i <= numOfColumns; i++) {
         if (i > 1) sout.print(",  ");
-        String columnValue = res.getObject(i).toString();
+        String columnValue = res.getString(i);
         if(res.wasNull()){
-          sout.print("null");
+          sout.print(nullChar);
         } else {
           sout.print(columnValue);
         }
@@ -125,6 +128,7 @@
   @Override
   public void printNoResult(PrintWriter sout) {
     sout.println("(0 rows)");
+    sout.flush();
   }
 
   @Override
@@ -138,6 +142,7 @@
   @Override
   public void printMessage(PrintWriter sout, String message) {
     sout.println(message);
+    sout.flush();
   }
 
   @Override
@@ -146,16 +151,19 @@
     if (printErrorTrace) {
       sout.println(ExceptionUtils.getStackTrace(t));
     }
+    sout.flush();
   }
 
   @Override
   public void printErrorMessage(PrintWriter sout, String message) {
     sout.println(parseErrorMessage(message));
+    sout.flush();
   }
 
   @Override
   public void printKilledMessage(PrintWriter sout, QueryId queryId) {
     sout.println(TajoCli.KILL_PREFIX + queryId);
+    sout.flush();
   }
 
   @Override
@@ -168,6 +176,7 @@
     if (printErrorTrace && status.getErrorTrace() != null && !status.getErrorTrace().isEmpty()) {
       sout.println(status.getErrorTrace());
     }
+    sout.flush();
   }
 
   public static String parseErrorMessage(String message) {
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/DescTableCommand.java b/tajo-client/src/main/java/org/apache/tajo/cli/DescTableCommand.java
index 6bda7c9..d8023f2 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/DescTableCommand.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/DescTableCommand.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo.cli;
 
+import org.apache.commons.lang.CharUtils;
+import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.TableDesc;
 import org.apache.tajo.catalog.partition.PartitionMethodDesc;
@@ -84,8 +86,22 @@
     }
     sb.append("Options: \n");
     for(Map.Entry<String, String> entry : desc.getMeta().toMap().entrySet()){
+
+      /*
+      *  Checks whether the character is ASCII 7 bit printable.
+      *  For example, a printable unicode '\u007c' become the character ‘|’.
+      *
+      *  Control-chars : ctrl-a(\u0001), tab(\u0009) ..
+      *  Printable-chars : '|'(\u007c), ','(\u002c) ..
+      * */
+
+      String value = entry.getValue();
+      String unescaped = StringEscapeUtils.unescapeJava(value);
+      if (unescaped.length() == 1 && CharUtils.isAsciiPrintable(unescaped.charAt(0))) {
+        value = unescaped;
+      }
       sb.append("\t").append("'").append(entry.getKey()).append("'").append("=")
-          .append("'").append(entry.getValue()).append("'").append("\n");
+          .append("'").append(value).append("'").append("\n");
     }
     sb.append("\n");
     sb.append("schema: \n");
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/ExecExternalShellCommand.java b/tajo-client/src/main/java/org/apache/tajo/cli/ExecExternalShellCommand.java
index 2a9805f..628f8ba 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/ExecExternalShellCommand.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/ExecExternalShellCommand.java
@@ -54,16 +54,22 @@
 
     CountDownLatch latch = new CountDownLatch(2);
     Process process = Runtime.getRuntime().exec(execCommand);
-    InputStreamConsoleWriter inWriter = new InputStreamConsoleWriter(process.getInputStream(), sout, "", latch);
-    InputStreamConsoleWriter errWriter = new InputStreamConsoleWriter(process.getErrorStream(), sout, "ERROR: ", latch);
+    try {
+      InputStreamConsoleWriter inWriter = new InputStreamConsoleWriter(process.getInputStream(), sout, "", latch);
+      InputStreamConsoleWriter errWriter = new InputStreamConsoleWriter(process.getErrorStream(), sout, "ERROR: ", latch);
 
-    inWriter.start();
-    errWriter.start();
+      inWriter.start();
+      errWriter.start();
 
-    int processResult = process.waitFor();
-    latch.await();
-    if (processResult != 0) {
-      throw new IOException("ERROR: Failed with exit code = " + processResult);
+      int processResult = process.waitFor();
+      latch.await();
+      if (processResult != 0) {
+        throw new IOException("ERROR: Failed with exit code = " + processResult);
+      }
+    } finally {
+      org.apache.commons.io.IOUtils.closeQuietly(process.getInputStream());
+      org.apache.commons.io.IOUtils.closeQuietly(process.getOutputStream());
+      org.apache.commons.io.IOUtils.closeQuietly(process.getErrorStream());
     }
   }
 
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/SetCommand.java b/tajo-client/src/main/java/org/apache/tajo/cli/SetCommand.java
index 8b31d39..c694507 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/SetCommand.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/SetCommand.java
@@ -44,6 +44,7 @@
       Map<String, String> variables = new HashMap<String, String>();
       variables.put(cmd[1], cmd[2]);
       client.updateSessionVariables(variables);
+      context.setVariable(cmd[1], cmd[2]);
     } else {
       context.getOutput().println("usage: \\set [[NAME] VALUE]");
     }
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/SimpleParser.java b/tajo-client/src/main/java/org/apache/tajo/cli/SimpleParser.java
index afb8a59..0beaec8 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/SimpleParser.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/SimpleParser.java
@@ -148,6 +148,7 @@
       } else if (isStatementContinue() || isStatementStart(chars[idx])) {
         if (!isStatementContinue()) { // TOK_START -> STATEMENT
           state = ParsingState.STATEMENT;
+          rawAppender.append("\n");
         }
 
         while (!isTerminateState(state) && idx < chars.length) {
diff --git a/tajo-client/src/main/java/org/apache/tajo/cli/TajoCli.java b/tajo-client/src/main/java/org/apache/tajo/cli/TajoCli.java
index e0ca62a..1fc27dc 100644
--- a/tajo-client/src/main/java/org/apache/tajo/cli/TajoCli.java
+++ b/tajo-client/src/main/java/org/apache/tajo/cli/TajoCli.java
@@ -64,6 +64,8 @@
 
   private TajoCliOutputFormatter outputFormatter;
 
+  private boolean wasError = false;
+
   private static final Class [] registeredCommands = {
       DescTableCommand.class,
       DescFunctionCommand.class,
@@ -117,6 +119,15 @@
     public TajoConf getConf() {
       return conf;
     }
+
+    public void setVariable(String key, String value) {
+      conf.set(key, value);
+      try {
+        initFormatter();
+      } catch (Exception e) {
+        System.err.println(ERROR_PREFIX + e.getMessage());
+      }
+    }
   }
 
   public TajoCli(TajoConf c, String [] args, InputStream in, OutputStream out) throws Exception {
@@ -125,10 +136,7 @@
     this.reader = new ConsoleReader(sin, out);
     this.reader.setExpandEvents(false);
     this.sout = new PrintWriter(reader.getOutput());
-    Class formatterClass = conf.getClass(ConfVars.CLI_OUTPUT_FORMATTER_CLASS.varname,
-        DefaultTajoCliOutputFormatter.class);
-    this.outputFormatter = (TajoCliOutputFormatter)formatterClass.newInstance();
-    this.outputFormatter.init(conf);
+    initFormatter();
 
     CommandLineParser parser = new PosixParser();
     CommandLine cmd = parser.parse(options, args);
@@ -217,6 +225,15 @@
     addShutdownHook();
   }
 
+  private void initFormatter() throws Exception {
+    Class formatterClass = conf.getClass(ConfVars.CLI_OUTPUT_FORMATTER_CLASS.varname,
+        DefaultTajoCliOutputFormatter.class);
+    if (outputFormatter == null || !outputFormatter.getClass().equals(formatterClass)) {
+      outputFormatter = (TajoCliOutputFormatter)formatterClass.newInstance();
+    }
+    outputFormatter.init(conf);
+  }
+
   public TajoCliContext getContext() {
     return context;
   }
@@ -300,7 +317,7 @@
       if (line.equals("")) {
         continue;
       }
-
+      wasError = false;
       if (line.startsWith("{")) {
         executeJsonQuery(line);
       } else {
@@ -325,6 +342,10 @@
       } else {
         executeQuery(parsedResult.getStatement());
       }
+
+      if (wasError && context.getConf().getBoolVar(ConfVars.CLI_ERROR_STOP)) {
+        break;
+      }
     }
   }
 
@@ -336,6 +357,7 @@
       TajoShellCommand invoked = commands.get(arguments[0]);
       if (invoked == null) {
         printInvalidCommand(arguments[0]);
+        wasError = true;
         return -1;
       }
 
@@ -343,13 +365,19 @@
         invoked.invoke(arguments);
       } catch (IllegalArgumentException ige) {
         outputFormatter.printErrorMessage(sout, ige);
+        wasError = true;
         return -1;
       } catch (Exception e) {
         outputFormatter.printErrorMessage(sout, e);
+        wasError = true;
         return -1;
       } finally {
         context.getOutput().flush();
       }
+
+      if (wasError && context.getConf().getBoolVar(ConfVars.CLI_ERROR_STOP)) {
+        break;
+      }
     }
 
     return 0;
@@ -360,6 +388,7 @@
     ClientProtos.SubmitQueryResponse response = client.executeQueryWithJson(json);
     if (response == null) {
       outputFormatter.printErrorMessage(sout, "response is null");
+      wasError = true;
     } else if (response.getResultCode() == ClientProtos.ResultCode.OK) {
       if (response.getIsForwarded()) {
         QueryId queryId = new QueryId(response.getQueryId());
@@ -367,6 +396,7 @@
       } else {
         if (!response.hasTableDesc() && !response.hasResultSet()) {
           outputFormatter.printMessage(sout, "OK");
+          wasError = true;
         } else {
           localQueryCompleted(response, startTime);
         }
@@ -374,6 +404,7 @@
     } else {
       if (response.hasErrorMessage()) {
         outputFormatter.printErrorMessage(sout, response.getErrorMessage());
+        wasError = true;
       }
     }
   }
@@ -383,6 +414,7 @@
     ClientProtos.SubmitQueryResponse response = client.executeQuery(statement);
     if (response == null) {
       outputFormatter.printErrorMessage(sout, "response is null");
+      wasError = true;
     } else if (response.getResultCode() == ClientProtos.ResultCode.OK) {
       if (response.getIsForwarded()) {
         QueryId queryId = new QueryId(response.getQueryId());
@@ -397,6 +429,7 @@
     } else {
       if (response.hasErrorMessage()) {
         outputFormatter.printErrorMessage(sout, response.getErrorMessage());
+        wasError = true;
       }
     }
   }
@@ -418,6 +451,7 @@
       }
     } catch (Throwable t) {
       outputFormatter.printErrorMessage(sout, t);
+      wasError = true;
     } finally {
       if (res != null) {
         try {
@@ -466,8 +500,10 @@
 
       if (status.getState() == QueryState.QUERY_ERROR || status.getState() == QueryState.QUERY_FAILED) {
         outputFormatter.printErrorMessage(sout, status);
+        wasError = true;
       } else if (status.getState() == QueryState.QUERY_KILLED) {
         outputFormatter.printKilledMessage(sout, queryId);
+        wasError = true;
       } else {
         if (status.getState() == QueryState.QUERY_SUCCEEDED) {
           float responseTime = ((float)(status.getFinishTime() - status.getSubmitTime()) / 1000.0f);
@@ -484,6 +520,7 @@
       }
     } catch (Throwable t) {
       outputFormatter.printErrorMessage(sout, t);
+      wasError = true;
     } finally {
       if (res != null) {
         try {
@@ -499,6 +536,7 @@
   }
 
   public int executeScript(String script) throws Exception {
+    wasError = false;
     List<ParsedResult> results = SimpleParser.parseScript(script);
     executeParsedResults(results);
     return 0;
diff --git a/tajo-client/src/main/java/org/apache/tajo/client/TajoAdmin.java b/tajo-client/src/main/java/org/apache/tajo/client/TajoAdmin.java
index 25b91a4..ad42675 100644
--- a/tajo-client/src/main/java/org/apache/tajo/client/TajoAdmin.java
+++ b/tajo-client/src/main/java/org/apache/tajo/client/TajoAdmin.java
@@ -22,6 +22,7 @@
 import org.apache.commons.cli.*;
 import org.apache.commons.lang.StringUtils;
 import org.apache.tajo.QueryId;
+import org.apache.tajo.TajoProtos;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.ipc.ClientProtos.BriefQueryInfo;
 import org.apache.tajo.ipc.ClientProtos.WorkerResourceInfo;
@@ -395,11 +396,13 @@
 
   public void processKill(Writer writer, String queryIdStr)
       throws IOException, ServiceException {
-    boolean killedSuccessfully = tajoClient.killQuery(TajoIdUtils.parseQueryId(queryIdStr));
-    if (killedSuccessfully) {
+    QueryStatus status = tajoClient.killQuery(TajoIdUtils.parseQueryId(queryIdStr));
+    if (status.getState() == TajoProtos.QueryState.QUERY_KILLED) {
       writer.write(queryIdStr + " is killed successfully.\n");
+    } else if (status.getState() == TajoProtos.QueryState.QUERY_KILL_WAIT) {
+      writer.write(queryIdStr + " will be finished after a while.\n");
     } else {
-      writer.write("killing query is failed.");
+      writer.write("ERROR:" + status.getErrorMessage());
     }
   }
 
diff --git a/tajo-client/src/main/java/org/apache/tajo/client/TajoClient.java b/tajo-client/src/main/java/org/apache/tajo/client/TajoClient.java
index 7d84592..333c8d6 100644
--- a/tajo-client/src/main/java/org/apache/tajo/client/TajoClient.java
+++ b/tajo-client/src/main/java/org/apache/tajo/client/TajoClient.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.cli.InvalidClientSessionException;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.ipc.ClientProtos;
 import org.apache.tajo.ipc.ClientProtos.*;
 import org.apache.tajo.ipc.QueryMasterClientProtocol;
 import org.apache.tajo.ipc.QueryMasterClientProtocol.QueryMasterClientProtocolService;
@@ -346,6 +347,9 @@
       throws ServiceException, IOException {
     SubmitQueryResponse response = executeQuery(sql);
 
+    if (response.getResultCode() == ClientProtos.ResultCode.ERROR) {
+      throw new ServiceException(response.getErrorTrace());
+    }
     QueryId queryId = new QueryId(response.getQueryId());
     if (response.getIsForwarded()) {
       if (queryId.equals(QueryIdFactory.NULL_QUERY_ID)) {
@@ -369,7 +373,9 @@
 
   public ResultSet executeJsonQueryAndGetResult(final String json) throws ServiceException, IOException {
     SubmitQueryResponse response = executeQueryWithJson(json);
-
+    if (response.getResultCode() == ClientProtos.ResultCode.ERROR) {
+      throw new ServiceException(response.getErrorTrace());
+    }
     QueryId queryId = new QueryId(response.getQueryId());
     if (response.getIsForwarded()) {
       if (queryId.equals(QueryIdFactory.NULL_QUERY_ID)) {
@@ -854,7 +860,7 @@
     }.withRetries();
   }
 
-  public boolean killQuery(final QueryId queryId)
+  public QueryStatus killQuery(final QueryId queryId)
       throws ServiceException, IOException {
 
     QueryStatus status = getQueryStatus(queryId);
@@ -874,7 +880,9 @@
 
       long currentTimeMillis = System.currentTimeMillis();
       long timeKillIssued = currentTimeMillis;
-      while ((currentTimeMillis < timeKillIssued + 10000L) && (status.getState() != QueryState.QUERY_KILLED)) {
+      while ((currentTimeMillis < timeKillIssued + 10000L)
+          && ((status.getState() != QueryState.QUERY_KILLED)
+          || (status.getState() == QueryState.QUERY_KILL_WAIT))) {
         try {
           Thread.sleep(100L);
         } catch(InterruptedException ie) {
@@ -883,13 +891,13 @@
         currentTimeMillis = System.currentTimeMillis();
         status = getQueryStatus(queryId);
       }
-      return status.getState() == QueryState.QUERY_KILLED;
+
     } catch(Exception e) {
       LOG.debug("Error when checking for application status", e);
-      return false;
     } finally {
       connPool.releaseConnection(tmClient);
     }
+    return status;
   }
 
   public List<CatalogProtos.FunctionDescProto> getFunctions(final String functionName) throws ServiceException {
diff --git a/tajo-client/src/main/java/org/apache/tajo/client/TajoDump.java b/tajo-client/src/main/java/org/apache/tajo/client/TajoDump.java
index d1d93b2..7628d9d 100644
--- a/tajo-client/src/main/java/org/apache/tajo/client/TajoDump.java
+++ b/tajo-client/src/main/java/org/apache/tajo/client/TajoDump.java
@@ -139,10 +139,10 @@
     writer.write("--\n");
     writer.write("-- Tajo database dump\n");
     if (includeUSerName) {
-      writer.write("--\nDump user: " + userInfo.getUserName() + "\n");
+      writer.write("--\n-- Dump user: " + userInfo.getUserName() + "\n");
     }
     if (includeDate) {
-      writer.write("\n-- Dump date: " + toDateString() + "\n");
+      writer.write("--\n-- Dump date: " + toDateString() + "\n");
     }
     writer.write("--\n");
     writer.write("\n");
diff --git a/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSet.java b/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSet.java
index 336c782..8595970 100644
--- a/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSet.java
+++ b/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSet.java
@@ -87,6 +87,9 @@
         this.totalRow = desc.getStats() != null ? desc.getStats().getNumRows() : INFINITE_ROW_NUM;
       }
 
+      if (totalRow == 0) {
+        totalRow = INFINITE_ROW_NUM;
+      }
 
       List<FileFragment> frags = getFragments(desc.getPath());
       scanner = new MergeScanner(conf, desc.getSchema(), desc.getMeta(), frags);
diff --git a/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSetBase.java b/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSetBase.java
index 4c307b3..d189c78 100644
--- a/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSetBase.java
+++ b/tajo-client/src/main/java/org/apache/tajo/jdbc/TajoResultSetBase.java
@@ -20,9 +20,11 @@
 
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.datum.Datum;
-import org.apache.tajo.datum.NullDatum;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.*;
 import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.io.IOException;
 import java.io.InputStream;
@@ -32,6 +34,7 @@
 import java.sql.*;
 import java.util.Calendar;
 import java.util.Map;
+import java.util.TimeZone;
 
 public abstract class TajoResultSetBase implements ResultSet {
   protected int curRow;
@@ -64,6 +67,9 @@
   public boolean getBoolean(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return false;
+    }
     return datum.asBool();
   }
 
@@ -71,6 +77,9 @@
   public boolean getBoolean(String colName) throws SQLException {
     Datum datum = cur.get(findColumn(colName));
     handleNull(datum);
+    if (wasNull) {
+      return false;
+    }
     return datum.asBool();
   }
 
@@ -78,6 +87,9 @@
   public byte getByte(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asByte();
   }
 
@@ -85,6 +97,9 @@
   public byte getByte(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asByte();
   }
 
@@ -92,6 +107,9 @@
   public byte[] getBytes(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
     return datum.asByteArray();
   }
 
@@ -99,6 +117,9 @@
   public byte[] getBytes(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
     return datum.asByteArray();
   }
 
@@ -106,6 +127,9 @@
   public double getDouble(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0.0d;
+    }
     return datum.asFloat8();
   }
 
@@ -113,6 +137,9 @@
   public double getDouble(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0.0d;
+    }
     return datum.asFloat8();
   }
 
@@ -120,6 +147,9 @@
   public float getFloat(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0.0f;
+    }
     return datum.asFloat4();
   }
 
@@ -127,6 +157,9 @@
   public float getFloat(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0.0f;
+    }
     return datum.asFloat4();
   }
 
@@ -134,6 +167,9 @@
   public int getInt(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt4();
   }
 
@@ -141,6 +177,9 @@
   public int getInt(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt4();
   }
 
@@ -148,6 +187,9 @@
   public long getLong(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt8();
   }
 
@@ -155,6 +197,9 @@
   public long getLong(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt8();
   }
 
@@ -163,6 +208,9 @@
     Datum d = cur.get(fieldId - 1);
     handleNull(d);
 
+    if (wasNull) {
+      return null;
+    }
     TajoDataTypes.Type dataType = schema.getColumn(fieldId - 1).getDataType().getType();
 
     switch(dataType) {
@@ -173,24 +221,35 @@
       case INT8: return d.asInt8();
       case TEXT:
       case CHAR:
-      case DATE:
       case VARCHAR:  return d.asChars();
       case FLOAT4:  return d.asFloat4();
       case FLOAT8:  return d.asFloat8();
       case NUMERIC:  return d.asFloat8();
+      case DATE: {
+        return getDate((DateDatum)d, TajoConf.getCurrentTimeZone());
+      }
+      case TIME: {
+        return getTime((TimeDatum)d, TajoConf.getCurrentTimeZone());
+      }
+      case TIMESTAMP: {
+        return getTimestamp((TimestampDatum) d, TajoConf.getCurrentTimeZone());
+      }
       default: return d.asChars();
     }
   }
 
   @Override
   public Object getObject(String name) throws SQLException {
-    return getObject(findColumn(name));
+    return getObject(findColumn(name) + 1);
   }
 
   @Override
   public short getShort(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt2();
   }
 
@@ -198,21 +257,167 @@
   public short getShort(String name) throws SQLException {
     Datum datum = cur.get(findColumn(name));
     handleNull(datum);
+    if (wasNull) {
+      return 0;
+    }
     return datum.asInt2();
   }
 
   @Override
   public String getString(int fieldId) throws SQLException {
     Datum datum = cur.get(fieldId - 1);
-    handleNull(datum);
-    return datum.asChars();
+    return getString(datum, fieldId);
   }
 
   @Override
   public String getString(String name) throws SQLException {
-    Datum datum = cur.get(findColumn(name));
+    int id = findColumn(name);
+    Datum datum = cur.get(id);
+    return getString(datum, id + 1);
+  }
+
+  private String getString(Datum datum, int fieldId) throws SQLException {
     handleNull(datum);
-    return datum.asChars();
+
+    if (wasNull) {
+      return null;
+    }
+
+    TajoDataTypes.Type dataType = datum.type();
+
+    switch(dataType) {
+      case BOOLEAN:
+        return String.valueOf(datum.asBool());
+      case TIME: {
+        return ((TimeDatum)datum).asChars(TajoConf.getCurrentTimeZone(), false);
+      }
+      case TIMESTAMP: {
+        return ((TimestampDatum)datum).asChars(TajoConf.getCurrentTimeZone(), false);
+      }
+      default :
+        return datum.asChars();
+    }
+  }
+
+  @Override
+  public Date getDate(int fieldId) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getDate((DateDatum)datum, TajoConf.getCurrentTimeZone());
+  }
+
+  @Override
+  public Date getDate(String name) throws SQLException {
+    return getDate(findColumn(name) + 1);
+  }
+
+  @Override
+  public Date getDate(int fieldId, Calendar x) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getDate((DateDatum)datum, x.getTimeZone());
+  }
+
+  @Override
+  public Date getDate(String name, Calendar x) throws SQLException {
+    return getDate(findColumn(name) + 1, x);
+  }
+
+  private Date getDate(DateDatum datum, TimeZone tz) {
+    TimeMeta tm = datum.toTimeMeta();
+    if (tz != null) {
+      DateTimeUtil.toUserTimezone(tm, tz);
+    }
+    return new Date(DateTimeUtil.julianTimeToJavaTime(DateTimeUtil.toJulianTimestamp(tm)));
+  }
+
+  @Override
+  public Time getTime(int fieldId) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getTime((TimeDatum)datum, TajoConf.getCurrentTimeZone());
+
+  }
+
+  @Override
+  public Time getTime(String name) throws SQLException {
+    return getTime(findColumn(name) + 1);
+  }
+
+  @Override
+  public Time getTime(int fieldId, Calendar x) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getTime((TimeDatum)datum, x.getTimeZone());
+  }
+
+  @Override
+  public Time getTime(String name, Calendar x) throws SQLException {
+    return getTime(findColumn(name) + 1, x);
+  }
+
+  private Time getTime(TimeDatum datum, TimeZone tz) {
+    TimeMeta tm = datum.toTimeMeta();
+    if (tz != null) {
+      DateTimeUtil.toUserTimezone(tm, tz);
+    }
+    return new Time(DateTimeUtil.toJavaTime(tm.hours, tm.minutes, tm.secs, tm.fsecs));
+  }
+
+  @Override
+  public Timestamp getTimestamp(int fieldId) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getTimestamp((TimestampDatum)datum, TajoConf.getCurrentTimeZone());
+  }
+
+  @Override
+  public Timestamp getTimestamp(String name) throws SQLException {
+    return getTimestamp(findColumn(name) + 1);
+  }
+
+  @Override
+  public Timestamp getTimestamp(int fieldId, Calendar x) throws SQLException {
+    Datum datum = cur.get(fieldId - 1);
+    handleNull(datum);
+    if (wasNull) {
+      return null;
+    }
+
+    return getTimestamp((TimestampDatum)datum, x.getTimeZone());
+  }
+
+  @Override
+  public Timestamp getTimestamp(String name, Calendar x) throws SQLException {
+    return getTimestamp(findColumn(name) + 1, x);
+  }
+
+  private Timestamp getTimestamp(TimestampDatum datum, TimeZone tz) {
+    TimeMeta tm = datum.toTimeMeta();
+    if (tz != null) {
+      DateTimeUtil.toUserTimezone(tm, tz);
+    }
+    return new Timestamp(DateTimeUtil.julianTimeToJavaTime(DateTimeUtil.toJulianTimestamp(tm)));
   }
 
   @Override
@@ -353,36 +558,6 @@
   }
 
   @Override
-  public Date getDate(int index) throws SQLException {
-    Object obj = getObject(index);
-    if (obj == null) {
-      return null;
-    }
-
-    try {
-      return Date.valueOf((String) obj);
-    } catch (Exception e) {
-      throw new SQLException("Cannot convert column " + index
-          + " to date: " + e.toString());
-    }
-  }
-
-  @Override
-  public Date getDate(String name) throws SQLException {
-    return getDate(findColumn(name));
-  }
-
-  @Override
-  public Date getDate(int index, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getDate not supported");
-  }
-
-  @Override
-  public Date getDate(String name, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getDate not supported");
-  }
-
-  @Override
   public int getFetchDirection() throws SQLException {
     return ResultSet.FETCH_FORWARD;
   }
@@ -497,46 +672,6 @@
   }
 
   @Override
-  public Time getTime(int index) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTime not supported");
-  }
-
-  @Override
-  public Time getTime(String name) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTime not supported");
-  }
-
-  @Override
-  public Time getTime(int index, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTime not supported");
-  }
-
-  @Override
-  public Time getTime(String name, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTime not supported");
-  }
-
-  @Override
-  public Timestamp getTimestamp(int index) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTimestamp not supported");
-  }
-
-  @Override
-  public Timestamp getTimestamp(String name) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTimestamp not supported");
-  }
-
-  @Override
-  public Timestamp getTimestamp(int index, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTimestamp not supported");
-  }
-
-  @Override
-  public Timestamp getTimestamp(String name, Calendar x) throws SQLException {
-    throw new SQLFeatureNotSupportedException("getTimestamp not supported");
-  }
-
-  @Override
   public int getType() throws SQLException {
     return ResultSet.TYPE_FORWARD_ONLY;
   }
diff --git a/tajo-common/src/main/java/org/apache/tajo/ExecutionBlockId.java b/tajo-common/src/main/java/org/apache/tajo/ExecutionBlockId.java
index 1ccb357..b6020e0 100644
--- a/tajo-common/src/main/java/org/apache/tajo/ExecutionBlockId.java
+++ b/tajo-common/src/main/java/org/apache/tajo/ExecutionBlockId.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo;
 
+import com.google.common.base.Objects;
+
 public class ExecutionBlockId implements Comparable<ExecutionBlockId> {
   public static final String EB_ID_PREFIX = "eb";
   private QueryId queryId;
@@ -63,7 +65,7 @@
 
   @Override
   public int hashCode() {
-    return toString().hashCode();
+    return Objects.hashCode(queryId, id);
   }
 
   public TajoIdProtos.ExecutionBlockIdProto getProto() {
diff --git a/tajo-common/src/main/java/org/apache/tajo/QueryId.java b/tajo-common/src/main/java/org/apache/tajo/QueryId.java
index d9d5f73..85882c1 100644
--- a/tajo-common/src/main/java/org/apache/tajo/QueryId.java
+++ b/tajo-common/src/main/java/org/apache/tajo/QueryId.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo;
 
+import com.google.common.base.Objects;
+
 public class QueryId implements Comparable<QueryId> {
   public static final String SEPARATOR = "_";
   public static final String QUERY_ID_PREFIX = "q";
@@ -63,7 +65,7 @@
 
   @Override
   public int hashCode() {
-    return toString().hashCode();
+    return Objects.hashCode(id, seq);
   }
 
   @Override
diff --git a/tajo-common/src/main/java/org/apache/tajo/QueryUnitAttemptId.java b/tajo-common/src/main/java/org/apache/tajo/QueryUnitAttemptId.java
index 98ba5d1..a9fd68b 100644
--- a/tajo-common/src/main/java/org/apache/tajo/QueryUnitAttemptId.java
+++ b/tajo-common/src/main/java/org/apache/tajo/QueryUnitAttemptId.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo;
 
+import com.google.common.base.Objects;
+
 public class QueryUnitAttemptId implements Comparable<QueryUnitAttemptId> {
   public static final String QUA_ID_PREFIX = "ta";
 
@@ -78,7 +80,7 @@
 
   @Override
   public int hashCode() {
-    return toString().hashCode();
+    return Objects.hashCode(queryUnitId, id);
   }
 
   @Override
diff --git a/tajo-common/src/main/java/org/apache/tajo/QueryUnitId.java b/tajo-common/src/main/java/org/apache/tajo/QueryUnitId.java
index 21addf9..da0479b 100644
--- a/tajo-common/src/main/java/org/apache/tajo/QueryUnitId.java
+++ b/tajo-common/src/main/java/org/apache/tajo/QueryUnitId.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo;
 
+import com.google.common.base.Objects;
+
 public class QueryUnitId implements Comparable<QueryUnitId> {
   public static final String QU_ID_PREFIX = "t";
 
@@ -66,15 +68,15 @@
     if (this == obj) {
       return true;
     }
-    if(!(obj instanceof QueryUnitId)) {
+    if (!(obj instanceof QueryUnitId)) {
       return false;
     }
-    return compareTo((QueryUnitId)obj) == 0;
+    return compareTo((QueryUnitId) obj) == 0;
   }
 
   @Override
   public int hashCode() {
-    return toString().hashCode();
+    return Objects.hashCode(executionBlockId, id);
   }
 
   @Override
diff --git a/tajo-common/src/main/java/org/apache/tajo/conf/TajoConf.java b/tajo-common/src/main/java/org/apache/tajo/conf/TajoConf.java
index 6580525..6298d27 100644
--- a/tajo-common/src/main/java/org/apache/tajo/conf/TajoConf.java
+++ b/tajo-common/src/main/java/org/apache/tajo/conf/TajoConf.java
@@ -25,14 +25,24 @@
 import org.apache.tajo.TajoConstants;
 import org.apache.tajo.util.NetUtils;
 import org.apache.tajo.util.TUtil;
+import org.apache.tajo.util.datetime.DateTimeConstants;
 
 import java.io.IOException;
 import java.io.PrintStream;
 import java.net.InetSocketAddress;
 import java.util.Map;
+import java.util.TimeZone;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
 
 public class TajoConf extends Configuration {
 
+  private static TimeZone CURRENT_TIMEZONE;
+  private static int DATE_ORDER = -1;
+  private static final ReentrantReadWriteLock confLock = new ReentrantReadWriteLock();
+  private static final Lock writeLock = confLock.writeLock();
+  private static final Lock readLock = confLock.readLock();
+
   static {
     Configuration.addDefaultResource("catalog-default.xml");
     Configuration.addDefaultResource("catalog-site.xml");
@@ -40,6 +50,8 @@
     Configuration.addDefaultResource("storage-site.xml");
     Configuration.addDefaultResource("tajo-default.xml");
     Configuration.addDefaultResource("tajo-site.xml");
+
+    confStaticInit();
   }
 
   private static final String EMPTY_VALUE = "";
@@ -59,6 +71,68 @@
     addResource(path);
   }
 
+  private static void confStaticInit() {
+    TimeZone.setDefault(getCurrentTimeZone());
+    getDateOrder();
+  }
+
+  public static TimeZone getCurrentTimeZone() {
+    writeLock.lock();
+    try {
+      if (CURRENT_TIMEZONE == null) {
+        TajoConf tajoConf = new TajoConf();
+        CURRENT_TIMEZONE = TimeZone.getTimeZone(tajoConf.getVar(ConfVars.TAJO_TIMEZONE));
+      }
+      return CURRENT_TIMEZONE;
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  public static TimeZone setCurrentTimeZone(TimeZone timeZone) {
+    readLock.lock();
+    try {
+      TimeZone oldTimeZone = CURRENT_TIMEZONE;
+      CURRENT_TIMEZONE = timeZone;
+      return oldTimeZone;
+    } finally {
+      readLock.unlock();
+    }
+  }
+
+  public static int getDateOrder() {
+    writeLock.lock();
+    try {
+      if (DATE_ORDER < 0) {
+        TajoConf tajoConf = new TajoConf();
+        String dateOrder = tajoConf.getVar(ConfVars.TAJO_DATE_ORDER);
+        if ("YMD".equals(dateOrder)) {
+          DATE_ORDER = DateTimeConstants.DATEORDER_YMD;
+        } else if ("DMY".equals(dateOrder)) {
+          DATE_ORDER = DateTimeConstants.DATEORDER_DMY;
+        } else if ("MDY".equals(dateOrder)) {
+          DATE_ORDER = DateTimeConstants.DATEORDER_MDY;
+        } else {
+          DATE_ORDER = DateTimeConstants.DATEORDER_YMD;
+        }
+      }
+      return DATE_ORDER;
+    } finally {
+      writeLock.unlock();
+    }
+  }
+
+  public static int setDateOrder(int dateOrder) {
+    readLock.lock();
+    try {
+      int oldDateOrder = DATE_ORDER;
+      DATE_ORDER = dateOrder;
+      return oldDateOrder;
+    } finally {
+      readLock.unlock();
+    }
+  }
+
   public static enum ConfVars {
 
     //////////////////////////////////
@@ -225,11 +299,6 @@
     GEOIP_DATA("tajo.function.geoip-database-location", ""),
 
     //////////////////////////////////
-    // Hive Configuration
-    //////////////////////////////////
-    HIVE_QUERY_MODE("tajo.hive.query.mode", false),
-
-    //////////////////////////////////
     // Task Configuration
     TASK_DEFAULT_MEMORY("tajo.task.memory-slot-mb.default", 512),
     TASK_DEFAULT_DISK("tajo.task.disk-slot.default", 0.5f),
@@ -249,7 +318,28 @@
     CLI_PRINT_PAUSE_NUM_RECORDS("tajo.cli.print.pause.num.records", 100),
     CLI_PRINT_PAUSE("tajo.cli.print.pause", true),
     CLI_PRINT_ERROR_TRACE("tajo.cli.print.error.trace", true),
-    CLI_OUTPUT_FORMATTER_CLASS("tajo.cli.output.formatter", "org.apache.tajo.cli.DefaultTajoCliOutputFormatter");
+    CLI_OUTPUT_FORMATTER_CLASS("tajo.cli.output.formatter", "org.apache.tajo.cli.DefaultTajoCliOutputFormatter"),
+    CLI_NULL_CHAR("tajo.cli.nullchar", ""),
+    CLI_ERROR_STOP("tajo.cli.error.stop", false),
+
+    //TIME & DATE
+    TAJO_TIMEZONE("tajo.timezone", System.getProperty("user.timezone")),
+    TAJO_DATE_ORDER("tajo.date.order", "YMD"),
+
+    //PLANNER
+    PLANNER_USE_FILTER_PUSHDOWN("tajo.planner.use.filter.pushdown", true),
+
+    // FILE FORMAT
+    CSVFILE_NULL("tajo.csvfile.null", "\\\\N"),
+
+    //OPTIMIZER
+    OPTIMIZER_JOIN_ENABLE("tajo.optimizer.join.enable", true),
+
+    // DEBUG OPTION
+    TAJO_DEBUG("tajo.debug", false),
+
+    // ONLY FOR TESTCASE
+    TESTCASE_MIN_TASK_NUM("tajo.testcase.min.task.num", -1)
     ;
 
     public final String varname;
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/CharDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/CharDatum.java
index 444ca66..e6c4d94 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/CharDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/CharDatum.java
@@ -141,7 +141,7 @@
         return datum;
 
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
   
@@ -156,7 +156,7 @@
         return -1;
 
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 }
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/DateDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/DateDatum.java
index ad68303..b26ef84 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/DateDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/DateDatum.java
@@ -19,81 +19,84 @@
 package org.apache.tajo.datum;
 
 import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.Bytes;
-import org.joda.time.*;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
+import org.apache.tajo.util.datetime.DateTimeConstants.DateStyle;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 public class DateDatum extends Datum {
   public static final int SIZE = 4;
-  /** ISO 8601/SQL standard format - ex) 1997-12-17 */
-  public static final String DEFAULT_FORMAT_STRING = "yyyy-MM-dd";
-  private static final DateTimeFormatter DEFAULT_FORMATTER = DateTimeFormat.forPattern(DEFAULT_FORMAT_STRING);
-  private final LocalDate date;
+
+  private int year;
+  private int monthOfYear;
+  private int dayOfMonth;
 
   public DateDatum(int value) {
     super(TajoDataTypes.Type.DATE);
-    date = decode(value);
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.j2date(value, tm);
+
+    year = tm.years;
+    monthOfYear = tm.monthOfYear;
+    dayOfMonth = tm.dayOfMonth;
   }
 
-  public DateDatum(int year, int month, int day) {
-    super(TajoDataTypes.Type.DATE);
-    date = new LocalDate(year, month, day);
-  }
-
-  public DateDatum(String dateStr) {
-    super(TajoDataTypes.Type.DATE);
-    this.date = LocalDate.parse(dateStr, DEFAULT_FORMATTER);
-  }
-
-  public DateDatum(LocalDate date) {
-    super(TajoDataTypes.Type.DATE);
-    this.date = date;
-  }
-
-  public LocalDate getDate() {
-    //LocalDate is immutable
-    return date;
-  }
-
-  public DateDatum(byte [] bytes) {
-    this(Bytes.toInt(bytes));
+  public TimeMeta toTimeMeta() {
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.j2date(DateTimeUtil.date2j(year, monthOfYear, dayOfMonth), tm);
+    return tm;
   }
 
   public int getCenturyOfEra() {
-    return date.getCenturyOfEra();
+    TimeMeta tm = toTimeMeta();
+    return tm.getCenturyOfEra();
   }
 
   public int getYear() {
-    return date.getYear();
-  }
-
-  public int getMonthOfYear() {
-    return date.getMonthOfYear();
+    TimeMeta tm = toTimeMeta();
+    return tm.years;
   }
 
   public int getWeekyear() {
-    return date.getWeekyear();
+    TimeMeta tm = toTimeMeta();
+    return tm.getWeekyear();
   }
 
-  public int getWeekOfWeekyear() {
-    return date.getWeekOfWeekyear();
-  }
-
-  public int getDayOfWeek() {
-    return date.getDayOfWeek();
-  }
-
-  public int getDayOfMonth() {
-    return date.getDayOfMonth();
+  public int getMonthOfYear() {
+    TimeMeta tm = toTimeMeta();
+    return tm.monthOfYear;
   }
 
   public int getDayOfYear() {
-    return date.getDayOfYear();
+    TimeMeta tm = toTimeMeta();
+    return tm.getDayOfYear();
   }
 
+  public int getDayOfWeek() {
+    TimeMeta tm = toTimeMeta();
+    return tm.getDayOfWeek();
+  }
+
+  public int getISODayOfWeek() {
+    TimeMeta tm = toTimeMeta();
+    return tm.getISODayOfWeek();
+  }
+
+  public int getWeekOfYear() {
+    TimeMeta tm = toTimeMeta();
+    return tm.getWeekOfYear();
+  }
+
+  public int getDayOfMonth() {
+    TimeMeta tm = toTimeMeta();
+    return tm.dayOfMonth;
+  }
+
+
   public String toString() {
     return asChars();
   }
@@ -104,19 +107,31 @@
       case INT4:
       case INT8:
       case FLOAT4:
-      case FLOAT8:
-        return new DateDatum(date.plusDays(datum.asInt2()));
-      case INTERVAL:
-        IntervalDatum interval = (IntervalDatum)datum;
-        LocalDate localDate;
+      case FLOAT8: {
+        TimeMeta tm = toTimeMeta();
+        tm.plusDays(datum.asInt4());
+        return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
+      }
+      case INTERVAL: {
+        IntervalDatum interval = (IntervalDatum) datum;
+        TimeMeta tm = toTimeMeta();
+        tm.plusMillis(interval.getMilliSeconds());
         if (interval.getMonths() > 0) {
-          localDate = date.plusMonths(interval.getMonths());
-        } else {
-          localDate = date;
+          tm.plusMonths(interval.getMonths());
         }
-        return new TimestampDatum(localDate.toDateTimeAtStartOfDay().getMillis() + interval.getMilliSeconds());
-      case TIME:
-        return new TimestampDatum(createDateTime(date, ((TimeDatum)datum).getTime(), true));
+        DateTimeUtil.toUTCTimezone(tm);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+      }
+      case TIME: {
+        TimeMeta tm1 = toTimeMeta();
+
+        TimeMeta tm2 = ((TimeDatum)datum).toTimeMeta();
+        DateTimeUtil.toUserTimezone(tm2);     //TimeDatum is UTC
+
+        tm1.plusTime(DateTimeUtil.toTime(tm2));
+        DateTimeUtil.toUTCTimezone(tm1);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm1));
+      }
       default:
         throw new InvalidOperationException(datum.type());
     }
@@ -128,68 +143,51 @@
       case INT4:
       case INT8:
       case FLOAT4:
-      case FLOAT8:
-        return new DateDatum(date.minusDays(datum.asInt2()));
-      case INTERVAL:
-        IntervalDatum interval = (IntervalDatum)datum;
-        LocalDate localDate;
+      case FLOAT8: {
+        TimeMeta tm = toTimeMeta();
+        tm.plusDays(0 - datum.asInt4());
+        return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
+      }
+      case INTERVAL: {
+        IntervalDatum interval = (IntervalDatum) datum;
+        TimeMeta tm = toTimeMeta();
         if (interval.getMonths() > 0) {
-          localDate = date.minusMonths(interval.getMonths());
-        } else {
-          localDate = date;
+          tm.plusMonths(0 - interval.getMonths());
         }
-        return new TimestampDatum(localDate.toDateTimeAtStartOfDay().getMillis() - interval.getMilliSeconds());
-      case TIME:
-        return new TimestampDatum(createDateTime(date, ((TimeDatum)datum).getTime(), false));
-      case DATE:
-        return new Int4Datum(Days.daysBetween(((DateDatum)datum).date, date).getDays());
+        tm.plusMillis(0 - interval.getMilliSeconds());
+        DateTimeUtil.toUTCTimezone(tm);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+      }
+      case TIME: {
+        TimeMeta tm1 = toTimeMeta();
+
+        TimeMeta tm2 = ((TimeDatum)datum).toTimeMeta();
+        DateTimeUtil.toUserTimezone(tm2);     //TimeDatum is UTC
+
+        tm1.plusTime(0 - DateTimeUtil.toTime(tm2));
+        DateTimeUtil.toUTCTimezone(tm1);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm1));
+      }
+      case DATE: {
+        TimeMeta tm1 = toTimeMeta();
+        TimeMeta tm2 = ((DateDatum) datum).toTimeMeta();
+
+        int day1 = DateTimeUtil.date2j(tm1.years, tm1.monthOfYear, tm1.dayOfMonth);
+        int day2 = DateTimeUtil.date2j(tm2.years, tm2.monthOfYear, tm2.dayOfMonth);
+        return new Int4Datum(day1 - day2);
+      }
       default:
         throw new InvalidOperationException(datum.type());
     }
   }
 
-  public static DateTime createDateTime(LocalDate date, LocalTime time, boolean plus) {
-    //TODO create too many temporary instance. This must be improved.
-    DateTime dateTime = new DateTime(date.toDate().getTime());
-    if (plus) {
-      return dateTime
-                .plusHours(time.getHourOfDay())
-                .plusMinutes(time.getMinuteOfHour())
-                .plusSeconds(time.getSecondOfMinute())
-                .plusMillis(time.getMillisOfSecond());
-    } else {
-      return dateTime
-                .minusHours(time.getHourOfDay())
-                .minusMinutes(time.getMinuteOfHour())
-                .minusSeconds(time.getSecondOfMinute())
-                .minusMillis(time.getMillisOfSecond());
-    }
-  }
-
   @Override
   public int asInt4() {
     return encode();
   }
 
-  private static LocalDate decode(int val) {
-    int year = (val >> 16);
-    int monthOfYear = (0xFFFF & val) >> 8;
-    int dayOfMonth = (0x00FF & val);
-    return new LocalDate(year, monthOfYear, dayOfMonth);
-  }
-
-  /**
-   *   Year     MonthOfYear   DayOfMonth
-   *  31-16       15-8          7 - 0
-   *
-   * 0xFF 0xFF    0xFF          0xFF
-   */
   private int encode() {
-    int instance = 0;
-    instance |= (date.getYear() << 16); // 1970 ~ : 2 bytes
-    instance |= (date.getMonthOfYear() << 8); // 1 - 12 : 1 byte
-    instance |= (date.getDayOfMonth()); // 0 - 31 : 1 byte
-    return instance;
+    return DateTimeUtil.date2j(year, monthOfYear, dayOfMonth);
   }
 
   @Override
@@ -209,11 +207,13 @@
 
   @Override
   public String asChars() {
-    return date.toString(DEFAULT_FORMATTER);
+    TimeMeta tm = toTimeMeta();
+    return DateTimeUtil.encodeDate(tm, DateStyle.ISO_DATES);
   }
 
   public String toChars(String format) {
-    return date.toString(format);
+    TimeMeta tm = toTimeMeta();
+    return DateTimeFormat.to_char(tm, format);
   }
 
   @Override
@@ -228,8 +228,8 @@
 
   @Override
   public Datum equalsTo(Datum datum) {
-    if (datum.type() == TajoDataTypes.Type.TIME) {
-      return DatumFactory.createBool(date.equals(((DateDatum) datum).date));
+    if (datum.type() == Type.DATE) {
+      return DatumFactory.createBool(equals(datum));
     } else if (datum.isNull()) {
       return datum;
     } else {
@@ -240,18 +240,28 @@
   @Override
   public int compareTo(Datum datum) {
     if (datum.type() == TajoDataTypes.Type.DATE) {
-      return date.compareTo(((DateDatum)datum).date);
+      DateDatum another = (DateDatum) datum;
+      int compareResult = (year < another.year) ? -1 : ((year == another.year) ? 0 : 1);
+      if (compareResult != 0) {
+        return compareResult;
+      }
+      compareResult = (monthOfYear < another.monthOfYear) ? -1 : ((monthOfYear == another.monthOfYear) ? 0 : 1);
+      if (compareResult != 0) {
+        return compareResult;
+      }
+
+      return (dayOfMonth < another.dayOfMonth) ? -1 : ((dayOfMonth == another.dayOfMonth) ? 0 : 1);
     } else if (datum instanceof NullDatum || datum.isNull()) {
       return -1;
     } else {
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
   public boolean equals(Object obj) {
     if (obj instanceof DateDatum) {
       DateDatum another = (DateDatum) obj;
-      return date.isEqual(another.date);
+      return year == another.year && monthOfYear == another.monthOfYear && dayOfMonth == another.dayOfMonth;
     } else {
       return false;
     }
@@ -259,6 +269,11 @@
 
   @Override
   public int hashCode() {
-    return date.hashCode();
+    int total = 157;
+    total = 23 * total + year;
+    total = 23 * total + monthOfYear;
+    total = 23 * total + dayOfMonth;
+
+    return total;
   }
 }
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/DatumFactory.java b/tajo-common/src/main/java/org/apache/tajo/datum/DatumFactory.java
index 5ce36e5..7d6ef18 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/DatumFactory.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/DatumFactory.java
@@ -23,9 +23,9 @@
 import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.util.Bytes;
-import org.joda.time.DateTime;
-import org.joda.time.LocalDate;
-import org.joda.time.LocalTime;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.io.IOException;
 
@@ -96,7 +96,7 @@
     case TIME:
       return createTime(value);
     case TIMESTAMP:
-      return createTimeStamp(value);
+      return createTimestamp(value);
     case INTERVAL:
       return createInterval(value);
     case BLOB:
@@ -128,11 +128,11 @@
       case TEXT:
         return createText(bytes);
       case DATE:
-        return new DateDatum(bytes);
+        return new DateDatum(Bytes.toInt(bytes));
       case TIME:
-        return new TimeDatum(bytes);
+        return new TimeDatum(Bytes.toLong(bytes));
       case TIMESTAMP:
-        return new TimestampDatum(bytes);
+        return new TimestampDatum(Bytes.toLong(bytes));
       case BIT:
         return createBit(bytes[0]);
       case BLOB:
@@ -170,7 +170,7 @@
     case INT8:
       return new Int8Datum(val);
     case TIMESTAMP:
-      return createTimeStampFromMillis(val);
+      return new TimestampDatum(val);
     case TIME:
       return createTime(val); 
     default:
@@ -266,8 +266,13 @@
     return new DateDatum(instance);
   }
 
+  public static DateDatum createDate(int year, int month, int day) {
+    return new DateDatum(DateTimeUtil.date2j(year, month, day));
+  }
+
   public static DateDatum createDate(String dateStr) {
-    return new DateDatum(LocalDate.parse(dateStr));
+    TimeMeta tm = DateTimeUtil.decodeDateTime(dateStr);
+    return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
   }
 
   public static TimeDatum createTime(long instance) {
@@ -275,19 +280,20 @@
   }
 
   public static TimeDatum createTime(String dateStr) {
-    return new TimeDatum(LocalTime.parse(dateStr));
+    TimeMeta tm = DateTimeUtil.decodeDateTime(dateStr);
+    return new TimeDatum(DateTimeUtil.toTime(tm));
   }
 
-  public static TimestampDatum createTimeStamp(int unixTime) {
-    return new TimestampDatum(unixTime);
+  public static TimestampDatum createTimestmpDatumWithJavaMillis(long millis) {
+    return new TimestampDatum(DateTimeUtil.javaTimeToJulianTime(millis));
   }
 
-  public static TimestampDatum createTimeStampFromMillis(long millis) {
-    return new TimestampDatum(new DateTime(millis));
+  public static TimestampDatum createTimestmpDatumWithUnixTime(int unixTime) {
+    return createTimestmpDatumWithJavaMillis(unixTime * 1000L);
   }
 
-  public static TimestampDatum createTimeStamp(String timeStamp) {
-    return new TimestampDatum(timeStamp);
+  public static TimestampDatum createTimestamp(String datetimeStr) {
+    return new TimestampDatum(DateTimeUtil.toJulianTimestamp(datetimeStr));
   }
 
   public static IntervalDatum createInterval(String intervalStr) {
@@ -301,7 +307,7 @@
     case INT8:
       return new DateDatum(datum.asInt4());
     case TEXT:
-      return new DateDatum(datum.asChars());
+      return createDate(datum.asChars());
     case DATE:
       return (DateDatum) datum;
     default:
@@ -314,7 +320,9 @@
     case INT8:
       return new TimeDatum(datum.asInt8());
     case TEXT:
-      return new TimeDatum(datum.asChars());
+      TimeMeta tm = DateTimeFormat.parseDateTime(datum.asChars(), "HH24:MI:SS.MS");
+      DateTimeUtil.toUTCTimezone(tm);
+      return new TimeDatum(DateTimeUtil.toTime(tm));
     case TIME:
       return (TimeDatum) datum;
     default:
@@ -325,7 +333,11 @@
   public static TimestampDatum createTimestamp(Datum datum) {
     switch (datum.type()) {
       case TEXT:
-        return new TimestampDatum(datum.asChars());
+        long timestamp = DateTimeUtil.toJulianTimestamp(datum.asChars());
+        TimeMeta tm = new TimeMeta();
+        DateTimeUtil.toJulianTimeMeta(timestamp, tm);
+        DateTimeUtil.toUTCTimezone(tm);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
       case TIMESTAMP:
         return (TimestampDatum) datum;
       default:
@@ -375,7 +387,24 @@
     case FLOAT8:
       return DatumFactory.createFloat8(operandDatum.asFloat8());
     case TEXT:
-      return DatumFactory.createText(operandDatum.asTextBytes());
+      switch (operandDatum.type()) {
+        case TIMESTAMP: {
+          TimestampDatum timestampDatum = (TimestampDatum)operandDatum;
+          TimeMeta tm = timestampDatum.toTimeMeta();
+          DateTimeUtil.toUserTimezone(tm);
+          TimestampDatum convertedTimestampDatum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+          return DatumFactory.createText(convertedTimestampDatum.asTextBytes());
+        }
+        case TIME: {
+          TimeDatum timeDatum = (TimeDatum)operandDatum;
+          TimeMeta tm = timeDatum.toTimeMeta();
+          DateTimeUtil.toUserTimezone(tm);
+          TimeDatum convertedTimeDatum = new TimeDatum(DateTimeUtil.toTime(tm));
+          return DatumFactory.createText(convertedTimeDatum.asTextBytes());
+        }
+        default:
+          return DatumFactory.createText(operandDatum.asTextBytes());
+      }
     case DATE:
       return DatumFactory.createDate(operandDatum);
     case TIME:
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/Float4Datum.java b/tajo-common/src/main/java/org/apache/tajo/datum/Float4Datum.java
index 35dca00..e24bce4 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/Float4Datum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/Float4Datum.java
@@ -23,6 +23,8 @@
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.NumberUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.nio.ByteBuffer;
 
@@ -133,7 +135,7 @@
       case NULL_TYPE:
         return datum;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -193,7 +195,7 @@
       case NULL_TYPE:
         return -1;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -211,7 +213,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val + datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().plusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -233,7 +238,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val - datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().minusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(0 - asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -260,7 +268,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/Float8Datum.java b/tajo-common/src/main/java/org/apache/tajo/datum/Float8Datum.java
index b7d8cf4..0542148 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/Float8Datum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/Float8Datum.java
@@ -23,6 +23,8 @@
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.Bytes;
 import org.apache.tajo.util.NumberUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.nio.ByteBuffer;
 
@@ -122,7 +124,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -182,7 +184,7 @@
       case NULL_TYPE:
         return -1;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -200,7 +202,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val + datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().plusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -222,7 +227,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val - datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().minusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(0 - asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -249,7 +257,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/Int2Datum.java b/tajo-common/src/main/java/org/apache/tajo/datum/Int2Datum.java
index 731ccdc..38cf019 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/Int2Datum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/Int2Datum.java
@@ -22,6 +22,8 @@
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.NumberUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.nio.ByteBuffer;
 
@@ -124,7 +126,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -184,7 +186,7 @@
       case NULL_TYPE:
         return -1;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -202,7 +204,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val + datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().plusDays(asInt2()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(asInt2());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -224,7 +229,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val - datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().minusDays(asInt2()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(0 - asInt2());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/Int4Datum.java b/tajo-common/src/main/java/org/apache/tajo/datum/Int4Datum.java
index 5b60e9e..d26b6b2 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/Int4Datum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/Int4Datum.java
@@ -22,6 +22,8 @@
 import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.NumberUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.nio.ByteBuffer;
 
@@ -128,7 +130,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -188,7 +190,7 @@
       case NULL_TYPE:
         return -1;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -206,7 +208,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val + datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().plusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -228,7 +233,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val - datum.asFloat8());
     case DATE:
-        return new DateDatum(((DateDatum)datum).getDate().minusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(0 - asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -255,7 +263,7 @@
     case NULL_TYPE:
       return datum;
     default:
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/Int8Datum.java b/tajo-common/src/main/java/org/apache/tajo/datum/Int8Datum.java
index 0367f9c..46a1353 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/Int8Datum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/Int8Datum.java
@@ -23,6 +23,8 @@
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.NumberUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.nio.ByteBuffer;
 
@@ -135,7 +137,7 @@
       case NULL_TYPE:
         return datum;
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -196,7 +198,7 @@
         return -1;
 
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
@@ -214,7 +216,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val + datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().plusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
@@ -236,7 +241,10 @@
     case FLOAT8:
       return DatumFactory.createFloat8(val - datum.asFloat8());
     case DATE:
-      return new DateDatum(((DateDatum)datum).getDate().minusDays(asInt4()));
+      DateDatum dateDatum = (DateDatum)datum;
+      TimeMeta tm = dateDatum.toTimeMeta();
+      tm.plusDays(0 - asInt4());
+      return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
     case NULL_TYPE:
       return datum;
     default:
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/IntervalDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/IntervalDatum.java
index b6904a9..28a0a47 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/IntervalDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/IntervalDatum.java
@@ -21,9 +21,8 @@
 import com.google.common.base.Objects;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.exception.InvalidOperationException;
-import org.joda.time.DateTime;
-import org.joda.time.LocalDate;
-import org.joda.time.LocalTime;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.text.DecimalFormat;
 import java.util.HashMap;
@@ -97,7 +96,6 @@
 
       int length = intervalStr.getBytes().length;
 
-      int start = 0;
       StringBuilder digitChars = new StringBuilder();
       StringBuilder unitChars = new StringBuilder();
       for (int i = 0; i < length; i++) {
@@ -231,28 +229,32 @@
       case INTERVAL:
         IntervalDatum other = (IntervalDatum) datum;
         return new IntervalDatum(months + other.months, millieconds + other.millieconds);
-      case DATE:
-        LocalDate date = ((DateDatum)datum).getDate();
-        LocalDate localDate;
-        if (months > 0) {
-          localDate = date.plusMonths(months);
-        } else {
-          localDate = date;
+      case DATE: {
+        DateDatum dateDatum = (DateDatum) datum;
+        TimeMeta tm = dateDatum.toTimeMeta();
+        tm.plusMillis(getMilliSeconds());
+        if (getMonths() > 0) {
+          tm.plusMonths(getMonths());
         }
-        return new TimestampDatum(localDate.toDateTimeAtStartOfDay().getMillis() + millieconds);
-      case TIME:
-        LocalTime localTime = ((TimeDatum)datum).getTime();
-        localTime = localTime.plusMillis((int) millieconds);
-        return new TimeDatum(localTime);
-      case TIMESTAMP:
-        DateTime dateTime = ((TimestampDatum) datum).getDateTime();
+        DateTimeUtil.toUTCTimezone(tm);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+      }
+      case TIME: {
+        TimeMeta tm = ((TimeDatum) datum).toTimeMeta();
+        tm.plusMillis(millieconds);
+        return new TimeDatum(DateTimeUtil.toTime(tm));
+      }
+      case TIMESTAMP: {
+        TimeMeta tm = new TimeMeta();
+        DateTimeUtil.toJulianTimeMeta(((TimestampDatum) datum).asInt8(), tm);
         if (months > 0) {
-          dateTime = dateTime.plusMonths(months);
+          tm.plusMonths(months);
         }
         if (millieconds > 0) {
-          dateTime = dateTime.plusMillis((int) millieconds);
+          tm.plusMillis(millieconds);
         }
-        return new TimestampDatum(dateTime);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+      }
       default:
         throw new InvalidOperationException(datum.type());
     }
@@ -403,7 +405,7 @@
     } else if (datum instanceof NullDatum || datum.isNull()) {
       return -1;
     } else {
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/NullDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/NullDatum.java
index 9842490..a4f79d7 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/NullDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/NullDatum.java
@@ -18,6 +18,7 @@
 
 package org.apache.tajo.datum;
 
+import org.apache.tajo.common.TajoDataTypes.DataType;
 import org.apache.tajo.exception.InvalidCastException;
 
 import static org.apache.tajo.common.TajoDataTypes.Type;
@@ -25,9 +26,11 @@
 public class NullDatum extends Datum {
   private static NullDatum instance;
   private static final byte [] EMPTY_BYTES = new byte[0];
+  private static final DataType NULL_DATA_TYPE;
 
   static {
     instance = new NullDatum();
+    NULL_DATA_TYPE = DataType.newBuilder().setType(Type.NULL_TYPE).build();
   }
 
   private NullDatum() {
@@ -38,6 +41,10 @@
     return instance;
   }
 
+  public static DataType getDataType() {
+    return NULL_DATA_TYPE;
+  }
+
   @Override
   public boolean isNull() {
     return true;
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/TextDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/TextDatum.java
index 313b905..49f09f6 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/TextDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/TextDatum.java
@@ -110,7 +110,7 @@
         return -1;
 
       default:
-        throw new InvalidOperationException();
+        throw new InvalidOperationException(datum.type());
     }
   }
 
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/TimeDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/TimeDatum.java
index 70f62c2..37e5e78 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/TimeDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/TimeDatum.java
@@ -22,65 +22,47 @@
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.Bytes;
-import org.joda.time.DateTime;
-import org.joda.time.LocalTime;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
+import org.apache.tajo.util.datetime.DateTimeConstants.DateStyle;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
+
+import java.util.TimeZone;
 
 public class TimeDatum extends Datum {
   public static final int SIZE = 8;
-  /** ISO 8601/SQL standard format - ex) 07:37:16-08 */
-  public static final String DEFAULT_FORMAT_STRING = "HH:mm:ss";
-  private static final DateTimeFormatter DEFAULT_FORMATTER = DateTimeFormat.forPattern(DEFAULT_FORMAT_STRING);
-  private final LocalTime time;
+  private final long time;
 
-  public TimeDatum(long value) {
-    super(TajoDataTypes.Type.TIME);
-    time = new LocalTime(value);
-  }
-
-  public TimeDatum(int hour, int minute, int second) {
-    super(TajoDataTypes.Type.TIME);
-    time = new LocalTime(hour, minute, second);
-  }
-
-  public TimeDatum(int hour, int minute, int second, int millis) {
-    super(TajoDataTypes.Type.TIME);
-    time = new LocalTime(hour, minute, second, millis);
-  }
-
-  public TimeDatum(String timeStr) {
-    super(TajoDataTypes.Type.TIME);
-    time = LocalTime.parse(timeStr, DEFAULT_FORMATTER);
-  }
-
-  public TimeDatum(LocalTime time) {
+  public TimeDatum(long time) {
     super(TajoDataTypes.Type.TIME);
     this.time = time;
   }
 
-  public TimeDatum(byte [] bytes) {
-    this(Bytes.toLong(bytes));
+  public TimeMeta toTimeMeta() {
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.date2j(time, tm);
+
+    return tm;
   }
 
   public int getHourOfDay() {
-    return time.getHourOfDay();
+    TimeMeta tm = toTimeMeta();
+    return tm.hours;
   }
 
   public int getMinuteOfHour() {
-    return time.getMinuteOfHour();
+    TimeMeta tm = toTimeMeta();
+    return tm.minutes;
   }
 
   public int getSecondOfMinute() {
-    return time.getSecondOfMinute();
-  }
-
-  public int getMillisOfDay() {
-    return time.getMillisOfDay();
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.date2j(time, tm);
+    return tm.secs;
   }
 
   public int getMillisOfSecond() {
-    return time.getMillisOfSecond();
+    TimeMeta tm = toTimeMeta();
+    return tm.fsecs / 1000;
   }
 
   public String toString() {
@@ -94,7 +76,7 @@
 
   @Override
   public long asInt8() {
-    return time.toDateTimeToday().getMillis();
+    return time;
   }
 
   @Override
@@ -109,11 +91,21 @@
 
   @Override
   public String asChars() {
-    return time.toString(DEFAULT_FORMATTER);
+    TimeMeta tm = toTimeMeta();
+    return DateTimeUtil.encodeTime(tm, DateStyle.ISO_DATES);
   }
 
-  public String toChars(String format) {
-    return time.toString(format);
+  public String asChars(TimeZone timeZone, boolean includeTimeZone) {
+    TimeMeta tm = toTimeMeta();
+    DateTimeUtil.toUserTimezone(tm, timeZone);
+    if (includeTimeZone) {
+      tm.timeZone = timeZone.getRawOffset() / 1000;
+    }
+    return DateTimeUtil.encodeTime(tm, DateStyle.ISO_DATES);
+  }
+
+  public String toString(TimeZone timeZone, boolean includeTimeZone) {
+    return asChars(timeZone, includeTimeZone);
   }
 
   @Override
@@ -128,12 +120,23 @@
 
   public Datum plus(Datum datum) {
     switch(datum.type()) {
-      case INTERVAL:
+      case INTERVAL: {
         IntervalDatum interval = ((IntervalDatum)datum);
-        return new TimeDatum(time.plusMillis((int)interval.getMilliSeconds()));
-      case DATE:
-        DateTime dateTime = DateDatum.createDateTime(((DateDatum)datum).getDate(), time, true);
-        return new TimestampDatum(dateTime);
+        TimeMeta tm = toTimeMeta();
+        tm.plusMillis(interval.getMilliSeconds());
+        return new TimeDatum(DateTimeUtil.toTime(tm));
+      }
+      case DATE: {
+        TimeMeta tm = toTimeMeta();
+        DateTimeUtil.toUserTimezone(tm);     //TimeDatum is UTC
+
+        DateDatum dateDatum = (DateDatum) datum;
+        TimeMeta dateTm = dateDatum.toTimeMeta();
+        dateTm.plusTime(DateTimeUtil.toTime(tm));
+
+        DateTimeUtil.toUTCTimezone(dateTm);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(dateTm));
+      }
       default:
         throw new InvalidOperationException(datum.type());
     }
@@ -141,12 +144,17 @@
 
   public Datum minus(Datum datum) {
     switch(datum.type()) {
-      case INTERVAL:
+      case INTERVAL: {
         IntervalDatum interval = ((IntervalDatum)datum);
-        return new TimeDatum(time.minusMillis((int)interval.getMilliSeconds()));
+        TimeMeta tm = toTimeMeta();
+        tm.plusMillis(0 - interval.getMilliSeconds());
+        return new TimeDatum(DateTimeUtil.toTime(tm));
+      }
       case TIME:
-        return new IntervalDatum(
-            time.toDateTimeToday().getMillis() - ((TimeDatum)datum).getTime().toDateTimeToday().getMillis() );
+        TimeMeta tm1 = toTimeMeta();
+        TimeMeta tm2 = ((TimeDatum)datum).toTimeMeta();
+
+        return new IntervalDatum((DateTimeUtil.toTime(tm1) - DateTimeUtil.toTime(tm2))/1000);
       default:
         throw new InvalidOperationException(datum.type());
     }
@@ -155,29 +163,30 @@
   @Override
   public Datum equalsTo(Datum datum) {
     if (datum.type() == TajoDataTypes.Type.TIME) {
-      return DatumFactory.createBool(time.equals(((TimeDatum) datum).time));
+      return DatumFactory.createBool(time == (((TimeDatum) datum).time));
     } else if (datum.isNull()) {
       return datum;
     } else {
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
   @Override
   public int compareTo(Datum datum) {
     if (datum.type() == TajoDataTypes.Type.TIME) {
-      return time.compareTo(((TimeDatum)datum).time);
+      TimeDatum another = (TimeDatum)datum;
+      return (time < another.time) ? -1 : ((time == another.time) ? 0 : 1);
     } else if (datum instanceof NullDatum || datum.isNull()) {
       return -1;
     } else {
-      throw new InvalidOperationException();
+      throw new InvalidOperationException(datum.type());
     }
   }
 
   public boolean equals(Object obj) {
     if (obj instanceof TimeDatum) {
       TimeDatum another = (TimeDatum) obj;
-      return time.isEqual(another.time);
+      return time == another.time;
     } else {
       return false;
     }
@@ -185,10 +194,7 @@
 
   @Override
   public int hashCode() {
-    return time.hashCode();
+    return (int)(time ^ (time >>> 32));
   }
 
-  public LocalTime getTime() {
-    return time;
-  }
 }
diff --git a/tajo-common/src/main/java/org/apache/tajo/datum/TimestampDatum.java b/tajo-common/src/main/java/org/apache/tajo/datum/TimestampDatum.java
index 879428b..62b0861 100644
--- a/tajo-common/src/main/java/org/apache/tajo/datum/TimestampDatum.java
+++ b/tajo-common/src/main/java/org/apache/tajo/datum/TimestampDatum.java
@@ -19,121 +19,180 @@
 package org.apache.tajo.datum;
 
 import com.google.common.base.Objects;
-import org.apache.commons.lang.StringUtils;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.exception.InvalidOperationException;
 import org.apache.tajo.util.Bytes;
-import org.joda.time.DateTime;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
+import org.apache.tajo.util.datetime.DateTimeConstants.DateStyle;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
+
+import java.util.TimeZone;
 
 public class TimestampDatum extends Datum {
   public static final int SIZE = 8;
-  /** ISO 8601/SQL standard format - ex) 1997-12-17 07:37:16-08 */
-  public static final String DEFAULT_FORMAT_STRING = "yyyy-MM-dd HH:mm:ss";
-  public static final String FRACTION_FORMAT_STRING = "yyyy-MM-dd HH:mm:ss.SSS";
-  private static final DateTimeFormatter DEFAULT_FORMATTER = DateTimeFormat.forPattern(DEFAULT_FORMAT_STRING);
-  private static final DateTimeFormatter FRACTION_FORMATTER = DateTimeFormat.forPattern(FRACTION_FORMAT_STRING);
-  private final DateTime dateTime;
 
-  public TimestampDatum(int timestamp) {
-    super(TajoDataTypes.Type.TIMESTAMP);
-    dateTime = new DateTime((long)timestamp * 1000);
-  }
+  private long timestamp;
 
+  /**
+   *
+   * @param timestamp UTC based
+   */
   public TimestampDatum(long timestamp) {
     super(TajoDataTypes.Type.TIMESTAMP);
-    dateTime = new DateTime(timestamp);
+    this.timestamp = timestamp;
   }
 
-  public TimestampDatum(DateTime dateTime) {
-    super(TajoDataTypes.Type.TIMESTAMP);
-    this.dateTime = dateTime;
+  /**
+   * It's the same value to asInt8().
+   * @return The Timestamp
+   */
+  public long getTimestamp() {
+    return timestamp;
   }
 
-  TimestampDatum(byte [] bytes) {
-    super(TajoDataTypes.Type.TIMESTAMP);
-    this.dateTime = new DateTime(Bytes.toLong(bytes));
+  public int getEpoch() {
+    return DateTimeUtil.julianTimeToEpoch(timestamp);
   }
 
-  public TimestampDatum(String datetime) {
-    super(TajoDataTypes.Type.TIMESTAMP);
-
-    DateTime tmpDateTime = null;
-    try {
-      tmpDateTime = DateTime.parse(datetime, DEFAULT_FORMATTER);
-    } catch (IllegalArgumentException e) {
-      tmpDateTime = DateTime.parse(datetime, FRACTION_FORMATTER);
-    }
-    this.dateTime = tmpDateTime;
+  public long getJavaTimestamp() {
+    return DateTimeUtil.julianTimeToJavaTime(timestamp);
   }
 
-  public int getUnixTime() {
-    return (int) (dateTime.getMillis() / 1000);
-  }
-
-  public long getMillis() {
-    return dateTime.getMillis();
-  }
-
-  public DateTime getDateTime() {
-    return dateTime;
-  }
 
   public int getCenturyOfEra() {
-    return dateTime.getCenturyOfEra();
-  }
-
-  public int getEra() {
-    return dateTime.getEra();
+    TimeMeta tm = toTimeMeta();
+    return tm.getCenturyOfEra();
   }
 
   public int getYear() {
-    return dateTime.getYear();
+    TimeMeta tm = toTimeMeta();
+    return tm.years;
   }
 
   public int getMonthOfYear() {
-    return dateTime.getMonthOfYear();
-  }
-
-  public int getDayOfWeek() {
-    return dateTime.getDayOfWeek();
+    TimeMeta tm = toTimeMeta();
+    return tm.monthOfYear;
   }
 
   public int getDayOfYear() {
-    return dateTime.getDayOfYear();
+    TimeMeta tm = toTimeMeta();
+    return tm.getDayOfYear();
+  }
+
+  public int getDayOfWeek() {
+    TimeMeta tm = toTimeMeta();
+    return tm.getDayOfYear();
+  }
+
+  public int getWeekOfYear() {
+    TimeMeta tm = toTimeMeta();
+    return tm.getWeekOfYear();
   }
 
   public int getDayOfMonth() {
-    return dateTime.getDayOfMonth();
+    TimeMeta tm = toTimeMeta();
+    return tm.dayOfMonth;
   }
 
   public int getHourOfDay() {
-    return dateTime.getHourOfDay();
+    TimeMeta tm = toTimeMeta();
+    return tm.hours;
   }
 
   public int getMinuteOfHour() {
-    return dateTime.getMinuteOfHour();
-  }
-
-  public int getSecondOfDay() {
-    return dateTime.getSecondOfDay();
+    TimeMeta tm = toTimeMeta();
+    return tm.minutes;
   }
 
   public int getSecondOfMinute() {
-    return dateTime.getSecondOfMinute();
+    TimeMeta tm = toTimeMeta();
+    return tm.secs;
   }
 
   public int getMillisOfSecond() {
-    return dateTime.getMillisOfSecond();
+    TimeMeta tm = toTimeMeta();
+    return tm.fsecs / 1000;
   }
 
-  public int getWeekyear() {
-    return dateTime.getWeekyear();
+  public int getUnixTime() {
+    return (int)(DateTimeUtil.julianTimeToJavaTime(timestamp) / 1000);
   }
 
-  public int getWeekOfWeekyear() {
-    return dateTime.getWeekOfWeekyear();
+  public String toString() {
+    return asChars();
+  }
+
+  public String asChars(TimeZone timeZone, boolean includeTimeZone) {
+    TimeMeta tm = toTimeMeta();
+    DateTimeUtil.toUserTimezone(tm, timeZone);
+    if (includeTimeZone) {
+      tm.timeZone = timeZone.getRawOffset() / 1000;
+    }
+    return  DateTimeUtil.encodeDateTime(tm, DateStyle.ISO_DATES);
+  }
+
+  public String toString(TimeZone timeZone, boolean includeTimeZone) {
+    return asChars(timeZone, includeTimeZone);
+  }
+
+  @Override
+  public long asInt8() {
+    return timestamp;
+  }
+
+  @Override
+  public String asChars() {
+    TimeMeta tm = toTimeMeta();
+    return DateTimeUtil.encodeDateTime(tm, DateStyle.ISO_DATES);
+  }
+
+  public String toChars(String format) {
+    TimeMeta tm = toTimeMeta();
+
+    return DateTimeFormat.to_char(tm, format);
+  }
+
+  @Override
+  public int size() {
+    return SIZE;
+  }
+
+  @Override
+  public byte [] asByteArray() {
+    return Bytes.toBytes(timestamp);
+  }
+
+  @Override
+  public Datum equalsTo(Datum datum) {
+    if (datum.type() == TajoDataTypes.Type.TIME) {
+      return timestamp == datum.asInt8() ? BooleanDatum.TRUE : BooleanDatum.FALSE;
+    } else if (datum.isNull()) {
+      return datum;
+    } else {
+      throw new InvalidOperationException(datum.type());
+    }
+  }
+
+  @Override
+  public int compareTo(Datum datum) {
+    if (datum.type() == TajoDataTypes.Type.TIMESTAMP) {
+      TimestampDatum another = (TimestampDatum) datum;
+      return (timestamp < another.timestamp) ? -1 : ((timestamp > another.timestamp) ? 1 : 0);
+    } else if (datum.isNull()) {
+      return -1;
+    } else {
+      throw new InvalidOperationException(datum.type());
+    }
+  }
+
+  public boolean equals(Object obj) {
+    if (obj instanceof TimestampDatum) {
+      TimestampDatum another = (TimestampDatum) obj;
+      return timestamp == another.timestamp;
+    } else {
+      return false;
+    }
   }
 
   @Override
@@ -141,16 +200,17 @@
     if (datum.type() == TajoDataTypes.Type.INTERVAL) {
       IntervalDatum interval = (IntervalDatum)datum;
 
-      DateTime plusDateTime = null;
+      TimeMeta tm = new TimeMeta();
+      DateTimeUtil.toJulianTimeMeta(timestamp, tm);
+
       if (interval.getMonths() > 0) {
-        plusDateTime = dateTime.plusMonths(interval.getMonths());
-      } else {
-        plusDateTime = dateTime;
+        tm.plusMonths(interval.getMonths());
       }
       if (interval.getMilliSeconds() > 0) {
-        plusDateTime = plusDateTime.plusMillis((int) interval.getMilliSeconds());
+        tm.plusMillis(interval.getMilliSeconds());
       }
-      return new TimestampDatum(plusDateTime);
+
+      return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
 
     } else {
       throw new InvalidOperationException(datum.type());
@@ -163,89 +223,32 @@
       case INTERVAL:
         IntervalDatum interval = (IntervalDatum)datum;
 
-        DateTime minusDateTime = null;
+        TimeMeta tm = new TimeMeta();
+        DateTimeUtil.toJulianTimeMeta(timestamp, tm);
+
         if (interval.getMonths() > 0) {
-          minusDateTime = dateTime.minusMonths(interval.getMonths());
-        } else {
-          minusDateTime = dateTime;
+          tm.plusMonths(0 - interval.getMonths());
         }
         if (interval.getMilliSeconds() > 0) {
-          minusDateTime = minusDateTime.minusMillis((int)interval.getMilliSeconds());
+          tm.plusMillis(0 - interval.getMilliSeconds());
         }
-        return new TimestampDatum(minusDateTime);
+        return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
       case TIMESTAMP:
-        return new IntervalDatum(dateTime.getMillis() - ((TimestampDatum)datum).dateTime.getMillis());
+        return new IntervalDatum((timestamp - ((TimestampDatum)datum).timestamp) / 1000);
       default:
         throw new InvalidOperationException(datum.type());
     }
   }
 
   @Override
-  public long asInt8() {
-    return dateTime.getMillis();
-  }
-
-  public String toString() {
-    return asChars();
-  }
-
-  @Override
-  public String asChars() {
-    if (getMillisOfSecond() > 0) {
-      return StringUtils.stripEnd(dateTime.toString(FRACTION_FORMATTER), "0");
-    } else {
-      return dateTime.toString(DEFAULT_FORMATTER);
-    }
-  }
-
-  public String toChars(DateTimeFormatter format) {
-    return dateTime.toString(format);
-  }
-
-  @Override
-  public int size() {
-    return SIZE;
-  }
-
-  @Override
-  public byte [] asByteArray() {
-    return Bytes.toBytes(dateTime.getMillis());
-  }
-
-  @Override
-  public Datum equalsTo(Datum datum) {
-    if (datum.type() == TajoDataTypes.Type.TIME) {
-      return DatumFactory.createBool(dateTime.equals(((TimestampDatum) datum).dateTime));
-    } else if (datum.isNull()) {
-      return datum;
-    } else {
-      throw new InvalidOperationException();
-    }
-  }
-
-  @Override
-  public int compareTo(Datum datum) {
-    if (datum.type() == TajoDataTypes.Type.TIMESTAMP) {
-      return dateTime.compareTo(((TimestampDatum)datum).dateTime);
-    } else if (datum instanceof NullDatum || datum.isNull()) {
-      return -1;
-    } else {
-      throw new InvalidOperationException();
-    }
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (obj instanceof TimestampDatum) {
-      TimestampDatum another = (TimestampDatum) obj;
-      return dateTime.isEqual(another.dateTime);
-    } else {
-      return false;
-    }
-  }
-
-  @Override
   public int hashCode(){
-     return Objects.hashCode(dateTime);
+     return Objects.hashCode(timestamp);
+  }
+
+  public TimeMeta toTimeMeta() {
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.toJulianTimeMeta(timestamp, tm);
+
+    return tm;
   }
 }
diff --git a/tajo-common/src/main/java/org/apache/tajo/exception/ValueOutOfRangeException.java b/tajo-common/src/main/java/org/apache/tajo/exception/ValueOutOfRangeException.java
new file mode 100644
index 0000000..4cf6b56
--- /dev/null
+++ b/tajo-common/src/main/java/org/apache/tajo/exception/ValueOutOfRangeException.java
@@ -0,0 +1,25 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.exception;
+
+public class ValueOutOfRangeException extends RuntimeException {
+  public ValueOutOfRangeException(String message) {
+    super(message);
+  }
+}
diff --git a/tajo-common/src/main/java/org/apache/tajo/json/DatumAdapter.java b/tajo-common/src/main/java/org/apache/tajo/json/DatumAdapter.java
index 2a162a9..9e88acb 100644
--- a/tajo-common/src/main/java/org/apache/tajo/json/DatumAdapter.java
+++ b/tajo-common/src/main/java/org/apache/tajo/json/DatumAdapter.java
@@ -21,7 +21,6 @@
 import com.google.gson.*;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.datum.*;
-import org.joda.time.DateTime;
 
 import java.lang.reflect.Type;
 
@@ -39,7 +38,7 @@
     case TIME:
       return new TimeDatum(jsonObject.get("value").getAsLong());
     case TIMESTAMP:
-      return new TimestampDatum(new DateTime(jsonObject.get("value").getAsLong()));
+      return new TimestampDatum(jsonObject.get("value").getAsLong());
     case INTERVAL:
       String[] values = jsonObject.get("value").getAsString().split(",");
 
@@ -62,7 +61,7 @@
       jsonObj.addProperty("value", src.asInt8());
       break;
     case TIMESTAMP:
-      jsonObj.addProperty("value", ((TimestampDatum)src).getMillis());
+      jsonObj.addProperty("value", src.asInt8());
       break;
     case INTERVAL:
       IntervalDatum interval = (IntervalDatum)src;
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/StringUtils.java b/tajo-common/src/main/java/org/apache/tajo/util/StringUtils.java
index ed9014d..41ea153 100644
--- a/tajo-common/src/main/java/org/apache/tajo/util/StringUtils.java
+++ b/tajo-common/src/main/java/org/apache/tajo/util/StringUtils.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo.util;
 
+import org.apache.commons.lang.CharUtils;
+import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.commons.lang.SystemUtils;
 import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.SignalLogger;
@@ -165,4 +167,17 @@
           }
         }, SHUTDOWN_HOOK_PRIORITY);
   }
+
+  public static String unicodeEscapedDelimiter(String value) {
+    try {
+      String delimiter = StringEscapeUtils.unescapeJava(value);
+      return unicodeEscapedDelimiter(delimiter.charAt(0));
+    } catch (Throwable e) {
+    }
+    return value;
+  }
+
+  public static String unicodeEscapedDelimiter(char c) {
+    return CharUtils.unicodeEscaped(c);
+  }
 }
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/TimeStampUtil.java b/tajo-common/src/main/java/org/apache/tajo/util/TimeStampUtil.java
deleted file mode 100644
index aa1be8b..0000000
--- a/tajo-common/src/main/java/org/apache/tajo/util/TimeStampUtil.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.tajo.util;
-
-
-import org.apache.tajo.datum.Int8Datum;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeFieldType;
-import org.joda.time.DateTimeZone;
-
-public class TimeStampUtil {
-
-    public static long getDay(DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTimeAtStartOfDay());
-    }
-
-    public static long getHour(DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTime(dateTime.get(DateTimeFieldType.hourOfDay()), 0, 0, 0));
-    }
-
-    public static long getMinute(DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTime(dateTime.get(DateTimeFieldType.hourOfDay()),
-            dateTime.get(DateTimeFieldType.minuteOfHour()), 0, 0));
-    }
-
-    public static long getSecond(DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTime(dateTime.get(DateTimeFieldType.hourOfDay()),
-            dateTime.get(DateTimeFieldType.minuteOfHour()), dateTime.get(DateTimeFieldType.secondOfMinute()), 0));
-    }
-
-    public static long getMonth(DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDate(dateTime.getYear(),
-            dateTime.getMonthOfYear(),1));
-    }
-
-    public static long getDayOfWeek(DateTime dateTime,int week) {
-        return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDayOfWeek(week));
-    }
-
-    public static long getYear (DateTime dateTime) {
-        return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDate(dateTime.getYear(), 1, 1));
-    }
-
-    public static DateTime getUTCDateTime(Int8Datum int8Datum){
-        return new DateTime(int8Datum.asInt8()/1000, DateTimeZone.UTC);
-    }
-
-    public static long convertToMicroSeconds(DateTime dateTime) {
-        return  dateTime.getMillis() * 1000;
-    }
-}
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeConstants.java b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeConstants.java
new file mode 100644
index 0000000..353a500
--- /dev/null
+++ b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeConstants.java
@@ -0,0 +1,631 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util.datetime;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class DateTimeConstants {
+  public enum DateStyle {
+    XSO_DATES,
+    ISO_DATES,
+    SQL_DATES
+  };
+
+  public static final int[][]	DAY_OF_MONTH = {
+    {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 0},
+    {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 0}
+  };
+
+  /** assumes leap year every four years */
+  public static final double DAYS_PER_YEAR = 365.25;
+  public static final int MONTHS_PER_YEAR = 12;
+
+
+  // DAYS_PER_MONTH is very imprecise.  The more accurate value is
+  // 365.2425/12 = 30.436875, or '30 days_full 10:29:06'.  Right now we only
+  // return an integral number of days_full, but someday perhaps we should
+  // also return a 'time' value to be used as well.	ISO 8601 suggests
+  // 0 days_full.
+
+  /** assumes exactly 30 days_full per month */
+  public static final int DAYS_PER_MONTH	= 30;
+  /** assume no daylight savings time changes */
+  public static final int HOURS_PER_DAY = 24;
+
+  // This doesn't adjust for uneven daylight savings time intervals or leap
+  // seconds, and it crudely estimates leap years.  A more accurate value
+  // for days_full per years is 365.2422.
+
+  /** avoid floating-point computation */
+  public static final int SECS_PER_YEAR	= 36525 * 864;
+  public static final int SECS_PER_DAY = 86400;
+  public static final int SECS_PER_HOUR	= 3600;
+  public static final int SECS_PER_MINUTE = 60;
+  public static final int MINS_PER_HOUR	= 60;
+
+  public static final long MSECS_PER_DAY = 86400000L;
+  public static final long MSECS_PER_SEC = 1000L;
+
+  public static final long USECS_PER_DAY = 86400000000L;
+  public static final long USECS_PER_HOUR	= 3600000000L;
+  public static final long USECS_PER_MINUTE = 60000000L;
+  public static final long USECS_PER_SEC = 1000000L;
+
+  public static final int JULIAN_MINYEAR = -4713;
+  public static final int JULIAN_MINMONTH = 11;
+  public static final int JULIAN_MINDAY = 24;
+  public static final int JULIAN_MAXYEAR = 5874898;
+
+  /** == DateTimeUtil.toJulianDate(JULIAN_MAXYEAR, 1, 1) */
+  public static final int JULIAN_MAX = 2147483494;
+
+  // Julian-date equivalents of Day 0 in Unix and Postgres reckoning
+  /** == DateTimeUtil.toJulianDate(1970, 1, 1) */
+  public static final int UNIX_EPOCH_JDATE =     2440588;
+  /** == DateTimeUtil.toJulianDate(2000, 1, 1) */
+  public static final int POSTGRES_EPOCH_JDATE = 2451545;
+  /** == (POSTGRES_EPOCH_JDATE * SECS_PER_DAY) - (UNIX_EPOCH_JDATE * SECS_PER_DAY); */
+  public static final long SECS_DIFFERENCE_BETWEEN_JULIAN_AND_UNIXTIME = 946684800;
+
+  public static final int MAX_TZDISP_HOUR	=	15;	/* maximum allowed hour part */
+  public static final int TZDISP_LIMIT =	((MAX_TZDISP_HOUR + 1) * SECS_PER_HOUR);
+
+  public static final int INTERVAL_FULL_RANGE = 0x7FFF;
+
+  public static final String DAGO =			"ago";
+  public static final String DCURRENT =		"current";
+  public static final String EPOCH =			"epoch";
+  public static final String INVALID =			"invalid";
+  public static final String EARLY =			"-infinity";
+  public static final String LATE	=		"infinity";
+  public static final String NOW =				"now";
+  public static final String TODAY	=		"today";
+  public static final String TOMORROW	=	"tomorrow";
+  public static final String YESTERDAY =		"yesterday";
+  public static final String ZULU	 =		"zulu";
+
+  public static final String DMICROSEC	=	"usecond";
+  public static final String DMILLISEC =		"msecond";
+  public static final String DSECOND	=		"second";
+  public static final String DMINUTE =			"minute";
+  public static final String DHOUR	=		"hour";
+  public static final String DDAY	 =		"day";
+  public static final String DWEEK	=		"week";
+  public static final String DMONTH	=		"month";
+  public static final String DQUARTER	=	"quarter";
+  public static final String DYEAR		=	"year";
+  public static final String DDECADE	=		"decade";
+  public static final String DCENTURY	=	"century";
+  public static final String DMILLENNIUM	=	"millennium";
+  public static final String DA_D		=	"ad";
+  public static final String DB_C		=	"bc";
+  public static final String DTIMEZONE	=	"timezone";
+
+  public static final int DATEORDER_YMD = 0;
+  public static final int DATEORDER_DMY = 1;
+  public static final int DATEORDER_MDY = 2;
+
+  public static enum TokenField {
+    DECIMAL(0),
+
+    AM(0), PM(1), HR24(2),
+
+    AD(0), BC(1),
+
+    DTK_NUMBER(0),
+    DTK_STRING(1),
+
+    DTK_DATE(2),
+    DTK_TIME(3),
+    DTK_TZ(4),
+    DTK_AGO(5),
+
+    DTK_SPECIAL(6),
+    DTK_INVALID(7),
+    DTK_CURRENT(8),
+    DTK_EARLY(9),
+    DTK_LATE(10),
+    DTK_EPOCH(11),
+    DTK_NOW(12),
+    DTK_YESTERDAY(13),
+    DTK_TODAY(14),
+    DTK_TOMORROW(15),
+    DTK_ZULU(16),
+
+    DTK_DELTA(17),
+    DTK_SECOND(18),
+    DTK_MINUTE(19),
+    DTK_HOUR(20),
+    DTK_DAY(21),
+    DTK_WEEK(22),
+    DTK_MONTH(23),
+    DTK_QUARTER(24),
+    DTK_YEAR(25),
+    DTK_DECADE(26),
+    DTK_CENTURY(27),
+    DTK_MILLENNIUM(28),
+    DTK_MILLISEC(29),
+    DTK_MICROSEC(30),
+    DTK_JULIAN(31),
+
+    DTK_DOW(32),
+    DTK_DOY(33),
+    DTK_TZ_HOUR(34),
+    DTK_TZ_MINUTE(35),
+    DTK_ISOYEAR(36),
+    DTK_ISODOW(37),
+
+    RESERV(0),
+    MONTH(1),
+    YEAR(2),
+    DAY(3),
+    JULIAN(4),
+    TZ(5),
+    DTZ(6),
+    DTZMOD(7),
+    IGNORE_DTF(8),
+    AMPM(9),
+    HOUR(10),
+    MINUTE(11),
+    SECOND(12),
+    MILLISECOND(13),
+    MICROSECOND(14),
+    DOY(15),
+    DOW(16),
+    UNITS(17),
+    ADBC(18),
+    /* these are only for relative dates */
+    AGO(19),
+    ABS_BEFORE(20),
+    ABS_AFTER(21),
+    /* generic fields to help with parsing */
+    ISODATE(22),
+    ISOTIME(23),
+    /* these are only for parsing intervals */
+    WEEK(24),
+    DECADE(25),
+    CENTURY(26),
+    MILLENNIUM(27),
+    /* reserved for unrecognized string values */
+    UNKNOWN_FIELD(28);
+
+    int value;
+    TokenField(int value) {
+      this.value = value;
+    }
+
+    public int getValue() {
+      return value;
+    }
+  }
+
+  static Object datetktbl[][] = {
+    /*	text, token, lexval */
+    {"-infinity", TokenField.RESERV, TokenField.DTK_EARLY}, /* "-infinity" reserved for "early time" */
+    {"acsst", TokenField.DTZ, 42},	/* Cent. Australia */
+    {"acst", TokenField.DTZ, -16},		/* Atlantic/Porto Acre */
+    {"act", TokenField.TZ, -20},		/* Atlantic/Porto Acre */
+    {DA_D, TokenField.ADBC, TokenField.AD},			/* "ad" for years >= 0 */
+    {"adt", TokenField.DTZ, -12},		/* Atlantic Daylight Time */
+    {"aesst", TokenField.DTZ, 44},	/* E. Australia */
+    {"aest", TokenField.TZ, 40},		/* Australia Eastern Std Time */
+    {"aft", TokenField.TZ, 18},		/* Kabul */
+    {"ahst", TokenField.TZ, -40},		/* Alaska-Hawaii Std Time */
+    {"akdt", TokenField.DTZ, -32},		/* Alaska Daylight Time */
+    {"akst", TokenField.DTZ, -36},		/* Alaska Standard Time */
+    {"allballs", TokenField.RESERV, TokenField.DTK_ZULU},		/* 00:00:00 */
+    {"almst", TokenField.TZ, 28},		/* Almaty Savings Time */
+    {"almt", TokenField.TZ, 24},		/* Almaty Time */
+    {"am", TokenField.AMPM, TokenField.AM},
+    {"amst", TokenField.DTZ, 20},		/* Armenia Summer Time (Yerevan) */
+    {"amt", TokenField.TZ, 16},		/* Armenia Time (Yerevan) */
+    {"anast", TokenField.DTZ, 52},	/* Anadyr Summer Time (Russia) */
+    {"anat", TokenField.TZ, 48},		/* Anadyr Time (Russia) */
+    {"apr", TokenField.MONTH, 4},
+    {"april", TokenField.MONTH, 4},
+    {"art", TokenField.TZ, -12},		/* Argentina Time */
+    {"ast", TokenField.TZ, -16},		/* Atlantic Std Time (Canada) */
+    {"at", TokenField.IGNORE_DTF, 0},		/* "at" (throwaway) */
+    {"aug", TokenField.MONTH, 8},
+    {"august", TokenField.MONTH, 8},
+    {"awsst", TokenField.DTZ, 36},	/* W. Australia */
+    {"awst", TokenField.TZ, 32},		/* W. Australia */
+    {"awt", TokenField.DTZ, -12},
+    {"azost", TokenField.DTZ, 0},		/* Azores Summer Time */
+    {"azot", TokenField.TZ, -4},		/* Azores Time */
+    {"azst", TokenField.DTZ, 20},		/* Azerbaijan Summer Time */
+    {"azt", TokenField.TZ, 16},		/* Azerbaijan Time */
+    {DB_C, TokenField.ADBC, TokenField.BC},			/* "bc" for years < 0 */
+    {"bdst", TokenField.TZ, 8},		/* British Double Summer Time */
+    {"bdt", TokenField.TZ, 24},		/* Dacca */
+    {"bnt", TokenField.TZ, 32},		/* Brunei Darussalam Time */
+    {"bort", TokenField.TZ, 32},		/* Borneo Time (Indonesia) */
+    {"bot", TokenField.TZ, -16},		/* Bolivia Time */
+    {"bra", TokenField.TZ, -12},		/* Brazil Time */
+    {"bst", TokenField.DTZ, 4},		/* British Summer Time */
+    {"bt", TokenField.TZ, 12},		/* Baghdad Time */
+    {"btt", TokenField.TZ, 24},		/* Bhutan Time */
+    {"cadt", TokenField.DTZ, 42},		/* Central Australian DST */
+    {"cast", TokenField.TZ, 38},		/* Central Australian ST */
+    {"cat", TokenField.TZ, -40},		/* Central Alaska Time */
+    {"cct", TokenField.TZ, 32},		/* China Coast Time */
+    {"cdt", TokenField.DTZ, -20},		/* Central Daylight Time */
+    {"cest", TokenField.DTZ, 8},		/* Central European Dayl.Time */
+    {"cet", TokenField.TZ, 4},		/* Central European Time */
+    {"cetdst", TokenField.DTZ, 8},	/* Central European Dayl.Time */
+    {"chadt", TokenField.DTZ, 55},	/* Chatham Island Daylight Time (13:45) */
+    {"chast", TokenField.TZ, 51},		/* Chatham Island Time (12:45) */
+    {"ckt", TokenField.TZ, 48},		/* Cook Islands Time */
+    {"clst", TokenField.DTZ, -12},		/* Chile Summer Time */
+    {"clt", TokenField.TZ, -16},		/* Chile Time */
+    {"cot", TokenField.TZ, -20},		/* Columbia Time */
+    {"cst", TokenField.TZ, -24},		/* Central Standard Time */
+    {DCURRENT, TokenField.RESERV, TokenField.DTK_CURRENT},	/* "current" is always now */
+    {"cvt", TokenField.TZ, 28},		/* Christmas Island Time (Indian Ocean) */
+    {"cxt", TokenField.TZ, 28},		/* Christmas Island Time (Indian Ocean) */
+    {"d", TokenField.UNITS, TokenField.DTK_DAY},		/* "day of month" for ISO input */
+    {"davt", TokenField.TZ, 28},		/* Davis Time (Antarctica) */
+    {"ddut", TokenField.TZ, 40},		/* Dumont-d'Urville Time (Antarctica) */
+    {"dec", TokenField.MONTH, 12},
+    {"december", TokenField.MONTH, 12},
+    {"dnt", TokenField.TZ, 4},		/* Dansk Normal Tid */
+    {"dow", TokenField.RESERV, TokenField.DTK_DOW},	/* day of week */
+    {"doy", TokenField.RESERV, TokenField.DTK_DOY},	/* day of year */
+    {"dst", TokenField.DTZMOD, 6},
+    {"easst", TokenField.DTZ, -20},	/* Easter Island Summer Time */
+    {"east", TokenField.TZ, -24},		/* Easter Island Time */
+    {"eat", TokenField.TZ, 12},		/* East Africa Time */
+    {"edt", TokenField.DTZ, -16},		/* Eastern Daylight Time */
+    {"eest", TokenField.DTZ, 12},		/* Eastern Europe Summer Time */
+    {"eet", TokenField.TZ, 8},		/* East. Europe, USSR Zone 1 */
+    {"eetdst", TokenField.DTZ, 12},	/* Eastern Europe Daylight Time */
+    {"egst", TokenField.DTZ, 0},		/* East Greenland Summer Time */
+    {"egt", TokenField.TZ, -4},		/* East Greenland Time */
+    {EPOCH, TokenField.RESERV, TokenField.DTK_EPOCH}, /* "epoch" reserved for system epoch time */
+    {"est", TokenField.TZ, -20},		/* Eastern Standard Time */
+    {"feb", TokenField.MONTH, 2},
+    {"february", TokenField.MONTH, 2},
+    {"fjst", TokenField.DTZ, -52},		/* Fiji Summer Time (13 hour offset!) */
+    {"fjt", TokenField.TZ, -48},		/* Fiji Time */
+    {"fkst", TokenField.DTZ, -12},		/* Falkland Islands Summer Time */
+    {"fkt", TokenField.TZ, -8},		/* Falkland Islands Time */
+    {"fri", TokenField.DOW, 5},
+    {"friday", TokenField.DOW, 5},
+    {"fst", TokenField.TZ, 4},		/* French Summer Time */
+    {"fwt", TokenField.DTZ, 8},		/* French Winter Time  */
+    {"galt", TokenField.TZ, -24},		/* Galapagos Time */
+    {"gamt", TokenField.TZ, -36},		/* Gambier Time */
+    {"gest", TokenField.DTZ, 20},		/* Georgia Summer Time */
+    {"get", TokenField.TZ, 16},		/* Georgia Time */
+    {"gft", TokenField.TZ, -12},		/* French Guiana Time */
+    {"gilt", TokenField.TZ, 48},		/* Gilbert Islands Time */
+    {"gmt", TokenField.TZ, 0},		/* Greenwish Mean Time */
+    {"gst", TokenField.TZ, 40},		/* Guam Std Time, USSR Zone 9 */
+    {"gyt", TokenField.TZ, -16},		/* Guyana Time */
+    {"h", TokenField.UNITS, TokenField.DTK_HOUR},		/* "hour" */
+    {"hdt", TokenField.DTZ, -36},		/* Hawaii/Alaska Daylight Time */
+    {"hkt", TokenField.TZ, 32},		/* Hong Kong Time */
+    {"hst", TokenField.TZ, -40},		/* Hawaii Std Time */
+    {"ict", TokenField.TZ, 28},		/* Indochina Time */
+    {"idle", TokenField.TZ, 48},		/* Intl. Date Line, East */
+    {"idlw", TokenField.TZ, -48},		/* Intl. Date Line, West */
+    {LATE, TokenField.RESERV, TokenField.DTK_LATE},	/* "infinity" reserved for "late time" */
+    {INVALID, TokenField.RESERV, TokenField.DTK_INVALID},		/* "invalid" reserved for bad time */
+    {"iot", TokenField.TZ, 20},		/* Indian Chagos Time */
+    {"irkst", TokenField.DTZ, 36},	/* Irkutsk Summer Time */
+    {"irkt", TokenField.TZ, 32},		/* Irkutsk Time */
+    {"irt", TokenField.TZ, 14},		/* Iran Time */
+    {"isodow", TokenField.RESERV, TokenField.DTK_ISODOW},		/* ISO day of week, Sunday == 7 */
+    {"ist", TokenField.TZ, 8},		/* Israel */
+    {"it", TokenField.TZ, 14},		/* Iran Time */
+    {"j", TokenField.UNITS, TokenField.DTK_JULIAN},
+    {"jan", TokenField.MONTH, 1},
+    {"january", TokenField.MONTH, 1},
+    {"javt", TokenField.TZ, 28},		/* Java Time (07:00? see JT) */
+    {"jayt", TokenField.TZ, 36},		/* Jayapura Time (Indonesia) */
+    {"jd", TokenField.UNITS, TokenField.DTK_JULIAN},
+    {"jst", TokenField.TZ, 36},		/* Japan Std Time,USSR Zone 8 */
+    {"jt", TokenField.TZ, 30},		/* Java Time (07:30? see JAVT) */
+    {"jul", TokenField.MONTH, 7},
+    {"julian", TokenField.UNITS, TokenField.DTK_JULIAN},
+    {"july", TokenField.MONTH, 7},
+    {"jun", TokenField.MONTH, 6},
+    {"june", TokenField.MONTH, 6},
+    {"kdt", TokenField.DTZ, 40},		/* Korea Daylight Time */
+    {"kgst", TokenField.DTZ, 24},		/* Kyrgyzstan Summer Time */
+    {"kgt", TokenField.TZ, 20},		/* Kyrgyzstan Time */
+    {"kost", TokenField.TZ, 48},		/* Kosrae Time */
+    {"krast", TokenField.DTZ, 28},	/* Krasnoyarsk Summer Time */
+    {"krat", TokenField.TZ, 32},		/* Krasnoyarsk Standard Time */
+    {"kst", TokenField.TZ, 36},		/* Korea Standard Time */
+    {"lhdt", TokenField.DTZ, 44},		/* Lord Howe Daylight Time, Australia */
+    {"lhst", TokenField.TZ, 42},		/* Lord Howe Standard Time, Australia */
+    {"ligt", TokenField.TZ, 40},		/* From Melbourne, Australia */
+    {"lint", TokenField.TZ, 56},		/* Line Islands Time (Kiribati; +14 hours!) */
+    {"lkt", TokenField.TZ, 24},		/* Lanka Time */
+    {"m", TokenField.UNITS, TokenField.DTK_MONTH},	/* "month" for ISO input */
+    {"magst", TokenField.DTZ, 48},	/* Magadan Summer Time */
+    {"magt", TokenField.TZ, 44},		/* Magadan Time */
+    {"mar", TokenField.MONTH, 3},
+    {"march", TokenField.MONTH, 3},
+    {"mart", TokenField.TZ, -38},		/* Marquesas Time */
+    {"mawt", TokenField.TZ, 24},		/* Mawson, Antarctica */
+    {"may", TokenField.MONTH, 5},
+    {"mdt", TokenField.DTZ, -24},		/* Mountain Daylight Time */
+    {"mest", TokenField.DTZ, 8},		/* Middle Europe Summer Time */
+    {"met", TokenField.TZ, 4},		/* Middle Europe Time */
+    {"metdst", TokenField.DTZ, 8},	/* Middle Europe Daylight Time */
+    {"mewt", TokenField.TZ, 4},		/* Middle Europe Winter Time */
+    {"mez", TokenField.TZ, 4},		/* Middle Europe Zone */
+    {"mht", TokenField.TZ, 48},		/* Kwajalein */
+    {"mm", TokenField.UNITS, TokenField.DTK_MINUTE},	/* "minute" for ISO input */
+    {"mmt", TokenField.TZ, 26},		/* Myannar Time */
+    {"mon", TokenField.DOW, 1},
+    {"monday", TokenField.DOW, 1},
+    {"mpt", TokenField.TZ, 40},		/* North Mariana Islands Time */
+    {"msd", TokenField.DTZ, 16},		/* Moscow Summer Time */
+    {"msk", TokenField.TZ, 12},		/* Moscow Time */
+    {"mst", TokenField.TZ, -28},		/* Mountain Standard Time */
+    {"mt", TokenField.TZ, 34},		/* Moluccas Time */
+    {"mut", TokenField.TZ, 16},		/* Mauritius Island Time */
+    {"mvt", TokenField.TZ, 20},		/* Maldives Island Time */
+    {"myt", TokenField.TZ, 32},		/* Malaysia Time */
+    {"nct", TokenField.TZ, 44},		/* New Caledonia Time */
+    {"ndt", TokenField.DTZ, -10},		/* Nfld. Daylight Time */
+    {"nft", TokenField.TZ, -14},		/* Newfoundland Standard Time */
+    {"nor", TokenField.TZ, 4},		/* Norway Standard Time */
+    {"nov", TokenField.MONTH, 11},
+    {"november", TokenField.MONTH, 11},
+    {"novst", TokenField.DTZ, 28},	/* Novosibirsk Summer Time */
+    {"novt", TokenField.TZ, 24},		/* Novosibirsk Standard Time */
+    {NOW, TokenField.RESERV, TokenField.DTK_NOW},		/* current transaction time */
+    {"npt", TokenField.TZ, 23},		/* Nepal Standard Time (GMT-5:45) */
+    {"nst", TokenField.TZ, -14},		/* Nfld. Standard Time */
+    {"nt", TokenField.TZ, -44},		/* Nome Time */
+    {"nut", TokenField.TZ, -44},		/* Niue Time */
+    {"nzdt", TokenField.DTZ, 52},		/* New Zealand Daylight Time */
+    {"nzst", TokenField.TZ, 48},		/* New Zealand Standard Time */
+    {"nzt", TokenField.TZ, 48},		/* New Zealand Time */
+    {"oct", TokenField.MONTH, 10},
+    {"october", TokenField.MONTH, 10},
+    {"omsst", TokenField.DTZ, 28},	/* Omsk Summer Time */
+    {"omst", TokenField.TZ, 24},		/* Omsk Time */
+    {"on", TokenField.IGNORE_DTF, 0},		/* "on" (throwaway) */
+    {"pdt", TokenField.DTZ, -28},		/* Pacific Daylight Time */
+    {"pet", TokenField.TZ, -20},		/* Peru Time */
+    {"petst", TokenField.DTZ, 52},	/* Petropavlovsk-Kamchatski Summer Time */
+    {"pett", TokenField.TZ, 48},		/* Petropavlovsk-Kamchatski Time */
+    {"pgt", TokenField.TZ, 40},		/* Papua New Guinea Time */
+    {"phot", TokenField.TZ, 52},		/* Phoenix Islands (Kiribati) Time */
+    {"pht", TokenField.TZ, 32},		/* Philippine Time */
+    {"pkt", TokenField.TZ, 20},		/* Pakistan Time */
+    {"pm", TokenField.AMPM, TokenField.PM},
+    {"pmdt", TokenField.DTZ, -8},		/* Pierre & Miquelon Daylight Time */
+    {"pont", TokenField.TZ, 44},		/* Ponape Time (Micronesia) */
+    {"pst", TokenField.TZ, -32},		/* Pacific Standard Time */
+    {"pwt", TokenField.TZ, 36},		/* Palau Time */
+    {"pyst", TokenField.DTZ, -12},		/* Paraguay Summer Time */
+    {"pyt", TokenField.TZ, -16},		/* Paraguay Time */
+    {"ret", TokenField.DTZ, 16},		/* Reunion Island Time */
+    {"s", TokenField.UNITS, TokenField.DTK_SECOND},	/* "seconds" for ISO input */
+    {"sadt", TokenField.DTZ, 42},		/* S. Australian Dayl. Time */
+    {"sast", TokenField.TZ, 38},		/* South Australian Std Time */
+    {"sat", TokenField.DOW, 6},
+    {"saturday", TokenField.DOW, 6},
+    {"sct", TokenField.DTZ, 16},		/* Mahe Island Time */
+    {"sep", TokenField.MONTH, 9},
+    {"sept", TokenField.MONTH, 9},
+    {"september", TokenField.MONTH, 9},
+    {"set", TokenField.TZ, -4},		/* Seychelles Time ?? */
+    {"sst", TokenField.DTZ, 8},		/* Swedish Summer Time */
+    {"sun", TokenField.DOW, 0},
+    {"sunday", TokenField.DOW, 0},
+    {"swt", TokenField.TZ, 4},		/* Swedish Winter Time */
+    {"t", TokenField.ISOTIME, TokenField.DTK_TIME},	/* Filler for ISO time fields */
+    {"tft", TokenField.TZ, 20},		/* Kerguelen Time */
+    {"that", TokenField.TZ, -40},		/* Tahiti Time */
+    {"thu", TokenField.DOW, 4},
+    {"thur", TokenField.DOW, 4},
+    {"thurs", TokenField.DOW, 4},
+    {"thursday", TokenField.DOW, 4},
+    {"tjt", TokenField.TZ, 20},		/* Tajikistan Time */
+    {"tkt", TokenField.TZ, -40},		/* Tokelau Time */
+    {"tmt", TokenField.TZ, 20},		/* Turkmenistan Time */
+    {TODAY, TokenField.RESERV, TokenField.DTK_TODAY}, /* midnight */
+    {TOMORROW, TokenField.RESERV, TokenField.DTK_TOMORROW},	/* tomorrow midnight */
+    {"truk", TokenField.TZ, 40},		/* Truk Time */
+    {"tue", TokenField.DOW, 2},
+    {"tues", TokenField.DOW, 2},
+    {"tuesday", TokenField.DOW, 2},
+    {"tvt", TokenField.TZ, 48},		/* Tuvalu Time */
+    {"ulast", TokenField.DTZ, 36},	/* Ulan Bator Summer Time */
+    {"ulat", TokenField.TZ, 32},		/* Ulan Bator Time */
+    {"undefined", TokenField.RESERV, TokenField.DTK_INVALID}, /* pre-v6.1 invalid time */
+    {"ut", TokenField.TZ, 0},
+    {"utc", TokenField.TZ, 0},
+    {"uyst", TokenField.DTZ, -8},		/* Uruguay Summer Time */
+    {"uyt", TokenField.TZ, -12},		/* Uruguay Time */
+    {"uzst", TokenField.DTZ, 24},		/* Uzbekistan Summer Time */
+    {"uzt", TokenField.TZ, 20},		/* Uzbekistan Time */
+    {"vet", TokenField.TZ, -16},		/* Venezuela Time */
+    {"vlast", TokenField.DTZ, 44},	/* Vladivostok Summer Time */
+    {"vlat", TokenField.TZ, 40},		/* Vladivostok Time */
+    {"vut", TokenField.TZ, 44},		/* Vanuata Time */
+    {"wadt", TokenField.DTZ, 32},		/* West Australian DST */
+    {"wakt", TokenField.TZ, 48},		/* Wake Time */
+    {"wast", TokenField.TZ, 28},		/* West Australian Std Time */
+    {"wat", TokenField.TZ, -4},		/* West Africa Time */
+    {"wdt", TokenField.DTZ, 36},		/* West Australian DST */
+    {"wed", TokenField.DOW, 3},
+    {"wednesday", TokenField.DOW, 3},
+    {"weds", TokenField.DOW, 3},
+    {"west", TokenField.DTZ, 4},		/* Western Europe Summer Time */
+    {"wet", TokenField.TZ, 0},		/* Western Europe */
+    {"wetdst", TokenField.DTZ, 4},	/* Western Europe Daylight Savings Time */
+    {"wft", TokenField.TZ, 48},		/* Wallis and Futuna Time */
+    {"wgst", TokenField.DTZ, -8},		/* West Greenland Summer Time */
+    {"wgt", TokenField.TZ, -12},		/* West Greenland Time */
+    {"wst", TokenField.TZ, 32},		/* West Australian Standard Time */
+    {"y", TokenField.UNITS, TokenField.DTK_YEAR},		/* "year" for ISO input */
+    {"yakst", TokenField.DTZ, 40},	/* Yakutsk Summer Time */
+    {"yakt", TokenField.TZ, 36},		/* Yakutsk Time */
+    {"yapt", TokenField.TZ, 40},		/* Yap Time (Micronesia) */
+    {"ydt", TokenField.DTZ, -32},		/* Yukon Daylight Time */
+    {"yekst", TokenField.DTZ, 24},	/* Yekaterinburg Summer Time */
+    {"yekt", TokenField.TZ, 20},		/* Yekaterinburg Time */
+    {YESTERDAY, TokenField.RESERV, TokenField.DTK_YESTERDAY}, /* yesterday midnight */
+    {"yst", TokenField.TZ, -36},		/* Yukon Standard Time */
+    {"z", TokenField.TZ, 0},			/* time zone tag per ISO-8601 */
+    {"zp4", TokenField.TZ, -16},		/* UTC +4  hours. */
+    {"zp5", TokenField.TZ, -20},		/* UTC +5  hours. */
+    {"zp6", TokenField.TZ, -24},		/* UTC +6  hours. */
+    {ZULU, TokenField.TZ, 0},			/* UTC */
+  };
+
+  static Object[][] deltatktbl = {
+	  /* text, token, lexval */
+    {"@", TokenField.IGNORE_DTF, 0},		/* postgres relative prefix */
+    {DAGO, TokenField.AGO, 0},				/* "ago" indicates negative time offset */
+    {"c", TokenField.UNITS, TokenField.DTK_CENTURY},	/* "century" relative */
+    {"cent", TokenField.UNITS, TokenField.DTK_CENTURY},		/* "century" relative */
+    {"centuries", TokenField.UNITS, TokenField.DTK_CENTURY},	/* "centuries" relative */
+    {DCENTURY, TokenField.UNITS, TokenField.DTK_CENTURY},		/* "century" relative */
+    {"d", TokenField.UNITS, TokenField.DTK_DAY},		/* "day" relative */
+    {DDAY, TokenField.UNITS, TokenField.DTK_DAY},		/* "day" relative */
+    {"days_full", TokenField.UNITS, TokenField.DTK_DAY},	/* "days_full" relative */
+    {"dec", TokenField.UNITS, TokenField.DTK_DECADE}, /* "decade" relative */
+    {DDECADE, TokenField.UNITS, TokenField.DTK_DECADE},		/* "decade" relative */
+    {"decades", TokenField.UNITS, TokenField.DTK_DECADE},		/* "decades" relative */
+    {"decs", TokenField.UNITS, TokenField.DTK_DECADE},	/* "decades" relative */
+    {"h", TokenField.UNITS, TokenField.DTK_HOUR},		/* "hour" relative */
+    {DHOUR, TokenField.UNITS, TokenField.DTK_HOUR},	/* "hour" relative */
+    {"hours", TokenField.UNITS, TokenField.DTK_HOUR}, /* "hours" relative */
+    {"hr", TokenField.UNITS, TokenField.DTK_HOUR},	/* "hour" relative */
+    {"hrs", TokenField.UNITS, TokenField.DTK_HOUR},	/* "hours" relative */
+    {INVALID, TokenField.RESERV, TokenField.DTK_INVALID},		/* reserved for invalid time */
+    {"m", TokenField.UNITS, TokenField.DTK_MINUTE},	/* "minute" relative */
+    {"microsecon", TokenField.UNITS, TokenField.DTK_MICROSEC},		/* "microsecond" relative */
+    {"mil", TokenField.UNITS, TokenField.DTK_MILLENNIUM},		/* "millennium" relative */
+    {"millennia", TokenField.UNITS, TokenField.DTK_MILLENNIUM},		/* "millennia" relative */
+    {DMILLENNIUM, TokenField.UNITS, TokenField.DTK_MILLENNIUM},		/* "millennium" relative */
+    {"millisecon", TokenField.UNITS, TokenField.DTK_MILLISEC},		/* relative */
+    {"mils", TokenField.UNITS, TokenField.DTK_MILLENNIUM},	/* "millennia" relative */
+    {"min", TokenField.UNITS, TokenField.DTK_MINUTE}, /* "minute" relative */
+    {"mins", TokenField.UNITS, TokenField.DTK_MINUTE},	/* "minutes" relative */
+    {DMINUTE, TokenField.UNITS, TokenField.DTK_MINUTE},		/* "minute" relative */
+    {"minutes", TokenField.UNITS, TokenField.DTK_MINUTE},		/* "minutes" relative */
+    {"mon", TokenField.UNITS, TokenField.DTK_MONTH},	/* "months_short" relative */
+    {"mons", TokenField.UNITS, TokenField.DTK_MONTH}, /* "months_short" relative */
+    {DMONTH, TokenField.UNITS, TokenField.DTK_MONTH}, /* "month" relative */
+    {"months_short", TokenField.UNITS, TokenField.DTK_MONTH},
+    {"ms", TokenField.UNITS, TokenField.DTK_MILLISEC},
+    {"msec", TokenField.UNITS, TokenField.DTK_MILLISEC},
+    {DMILLISEC, TokenField.UNITS, TokenField.DTK_MILLISEC},
+    {"mseconds", TokenField.UNITS, TokenField.DTK_MILLISEC},
+    {"msecs", TokenField.UNITS, TokenField.DTK_MILLISEC},
+    {"qtr", TokenField.UNITS, TokenField.DTK_QUARTER},	/* "quarter" relative */
+    {DQUARTER, TokenField.UNITS, TokenField.DTK_QUARTER},		/* "quarter" relative */
+    {"s", TokenField.UNITS, TokenField.DTK_SECOND},
+    {"sec", TokenField.UNITS, TokenField.DTK_SECOND},
+    {DSECOND, TokenField.UNITS, TokenField.DTK_SECOND},
+    {"seconds", TokenField.UNITS, TokenField.DTK_SECOND},
+    {"secs", TokenField.UNITS, TokenField.DTK_SECOND},
+    {DTIMEZONE, TokenField.UNITS, TokenField.DTK_TZ}, /* "timezone" time offset */
+    {"timezone_h", TokenField.UNITS, TokenField.DTK_TZ_HOUR}, /* timezone hour units */
+    {"timezone_m", TokenField.UNITS, TokenField.DTK_TZ_MINUTE},		/* timezone minutes units */
+    {"undefined", TokenField.RESERV, TokenField.DTK_INVALID}, /* pre-v6.1 invalid time */
+    {"us", TokenField.UNITS, TokenField.DTK_MICROSEC},	/* "microsecond" relative */
+    {"usec", TokenField.UNITS, TokenField.DTK_MICROSEC},		/* "microsecond" relative */
+    {DMICROSEC, TokenField.UNITS, TokenField.DTK_MICROSEC},	/* "microsecond" relative */
+    {"useconds", TokenField.UNITS, TokenField.DTK_MICROSEC},	/* "microseconds" relative */
+    {"usecs", TokenField.UNITS, TokenField.DTK_MICROSEC},		/* "microseconds" relative */
+    {"w", TokenField.UNITS, TokenField.DTK_WEEK},		/* "week" relative */
+    {DWEEK, TokenField.UNITS, TokenField.DTK_WEEK},	/* "week" relative */
+    {"weeks", TokenField.UNITS, TokenField.DTK_WEEK}, /* "weeks" relative */
+    {"y", TokenField.UNITS, TokenField.DTK_YEAR},		/* "year" relative */
+    {DYEAR, TokenField.UNITS, TokenField.DTK_YEAR},	/* "year" relative */
+    {"years", TokenField.UNITS, TokenField.DTK_YEAR}, /* "years" relative */
+    {"yr", TokenField.UNITS, TokenField.DTK_YEAR},	/* "year" relative */
+    {"yrs", TokenField.UNITS, TokenField.DTK_YEAR},	/* "years" relative */
+  };
+
+  public static class DateToken {
+    String key;
+    TokenField type;
+    int value;
+    TokenField valueType;
+
+    public String getKey() {
+      return key;
+    }
+
+    public TokenField getType() {
+      return type;
+    }
+
+    public int getValue() {
+      return value;
+    }
+
+    public TokenField getValueType() {
+      return valueType;
+    }
+  }
+  public static Map<String, DateToken> dateTokenMap = new HashMap<String, DateToken>();
+
+  static {
+    for (Object[] eachToken: datetktbl) {
+      DateToken dateToken = new DateToken();
+      dateToken.key = eachToken[0].toString();
+      dateToken.type = (TokenField)eachToken[1];
+      if (eachToken[2] instanceof TokenField) {
+        dateToken.valueType = (TokenField)eachToken[2];
+        dateToken.value = dateToken.valueType.getValue();
+      } else {
+        dateToken.valueType = TokenField.DECIMAL;
+        dateToken.value = ((Integer)eachToken[2]).intValue();
+      }
+      dateTokenMap.put(dateToken.key, dateToken);
+    }
+
+    for (Object[] eachToken: deltatktbl) {
+      DateToken dateToken = new DateToken();
+      dateToken.key = eachToken[0].toString();
+      dateToken.type = (TokenField)eachToken[1];
+      if (eachToken[2] instanceof TokenField) {
+        dateToken.valueType = (TokenField)eachToken[2];
+        dateToken.value = dateToken.valueType.getValue();
+      } else {
+        dateToken.valueType = TokenField.DECIMAL;
+        dateToken.value = ((Integer)eachToken[2]).intValue();
+      }
+      dateTokenMap.put(dateToken.key, dateToken);
+    }
+  }
+
+  public static int INTERVAL_MASK(TokenField t) { return (1 << (t.getValue())); }
+  public static int DTK_M(TokenField t) { return (0x01 << (t.getValue())); }
+
+  public static final int DTK_ALL_SECS_M = (DTK_M(TokenField.SECOND) |
+        DTK_M(TokenField.MILLISECOND) |
+        DTK_M(TokenField.MICROSECOND));
+  public static final int DTK_DATE_M = (DTK_M(TokenField.YEAR) | DTK_M(TokenField.MONTH) | DTK_M(TokenField.DAY));
+  public static final int DTK_TIME_M = (DTK_M(TokenField.HOUR) | DTK_M(TokenField.MINUTE) | DTK_M(TokenField.SECOND));
+}
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeFormat.java b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeFormat.java
new file mode 100644
index 0000000..fa5b458
--- /dev/null
+++ b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeFormat.java
@@ -0,0 +1,2148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util.datetime;
+
+import org.apache.tajo.datum.TimestampDatum;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicInteger;
+
+/**
+ * This class originated from src/backend/utils/adt/formatting.c of PostgreSQL
+ */
+public class DateTimeFormat {
+  /* ----------
+   * Full months_short
+   * ----------
+   */
+  static final String[] months_full = {
+    "January", "February", "March", "April", "May", "June", "July",
+        "August", "September", "October", "November", "December", null
+  };
+
+  static String[] days_short = {
+    "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat", null
+  };
+
+  static String[] months_short = {"Jan", "Feb", "Mar", "Apr", "May", "Jun",
+      "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", null};
+
+  static String[] days_full = {"Sunday", "Monday", "Tuesday", "Wednesday",
+      "Thursday", "Friday", "Saturday", null};
+
+  static int[][] ysum = {
+    {0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365},
+    {0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366}
+  };
+
+
+  /**
+   *  AD / BC
+   * ----------
+   * There is no 0 AD.  Years go from 1 BC to 1 AD, so we make it
+   * positive and map year == -1 to year zero, and shift all negative
+   * years up one.  For interval years, we just return the year.
+   * @param year
+   * @param is_interval
+   * @return
+   */
+  static int ADJUST_YEAR(int year, boolean is_interval)	{
+    return ((is_interval) ? (year) : ((year) <= 0 ? -((year) - 1) : (year)));
+  }
+
+  static final String A_D_STR	= "A.D.";
+  static final String a_d_STR	= "a.d.";
+  static final String AD_STR = "AD";
+  static final String ad_STR = "ad";
+
+  static final String B_C_STR	= "B.C.";
+  static final String b_c_STR	= "b.c.";
+  static final String BC_STR = "BC";
+  static final String bc_STR = "bc";
+
+  /**
+   * AD / BC strings for seq_search.
+   *
+   * These are given in two variants, a long form with periods and a standard
+   * form without.
+   *
+   * The array is laid out such that matches for AD have an even index, and
+   * matches for BC have an odd index.  So the boolean value for BC is given by
+   * taking the array index of the match, modulo 2.
+   */
+  static final String[] adbc_strings = {ad_STR, bc_STR, AD_STR, BC_STR, null};
+  static final String[] adbc_strings_long = {a_d_STR, b_c_STR, A_D_STR, B_C_STR, null};
+
+  /**
+   * ----------
+   * AM / PM
+   * ----------
+   */
+  static final String A_M_STR	= "A.M.";
+  static final String a_m_STR	= "a.m.";
+  static final String AM_STR = "AM";
+  static final String am_STR = "am";
+
+  static final String P_M_STR	= "P.M.";
+  static final String p_m_STR	= "p.m.";
+  static final String PM_STR = "PM";
+  static final String pm_STR = "pm";
+
+  /**
+   * AM / PM strings for seq_search.
+   *
+   * These are given in two variants, a long form with periods and a standard
+   * form without.
+   *
+   * The array is laid out such that matches for AM have an even index, and
+   * matches for PM have an odd index.  So the boolean value for PM is given by
+   * taking the array index of the match, modulo 2.
+   */
+  static final String[] ampm_strings = {am_STR, pm_STR, AM_STR, PM_STR, null};
+  static final String[] ampm_strings_long = {a_m_STR, p_m_STR, A_M_STR, P_M_STR, null};
+
+  /**
+   * ----------
+   * Months in roman-numeral
+   * (Must be in reverse order for seq_search (in FROM_CHAR), because
+   *	'VIII' must have higher precedence than 'V')
+   * ----------
+   */
+  static final String[] rm_months_upper =
+    {"XII", "XI", "X", "IX", "VIII", "VII", "VI", "V", "IV", "III", "II", "I", null};
+
+  static final String[] rm_months_lower =
+    {"xii", "xi", "x", "ix", "viii", "vii", "vi", "v", "iv", "iii", "ii", "i", null};
+
+  /**
+   * ----------
+   * Roman numbers
+   * ----------
+   */
+  static final String[] rm1 = {"I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX", null};
+  static final String[] rm10 = {"X", "XX", "XXX", "XL", "L", "LX", "LXX", "LXXX", "XC", null};
+  static final String[] rm100 = {"C", "CC", "CCC", "CD", "D", "DC", "DCC", "DCCC", "CM", null};
+
+  /**
+   * ----------
+   * Ordinal postfixes
+   * ----------
+   */
+  static final String[] numTH = {"ST", "ND", "RD", "TH", null};
+  static final String[] numth = {"st", "nd", "rd", "th", null};
+
+  /**
+   * ----------
+   * Flags & Options:
+   * ----------
+   */
+  static final int ONE_UPPER = 1;		/* Name */
+  static final int ALL_UPPER = 2;		/* NAME */
+  static final int ALL_LOWER = 3;		/* name */
+
+  static final int MAX_MONTH_LEN = 9;
+  static final int MAX_MON_LEN = 3;
+  static final int MAX_DAY_LEN = 9;
+  static final int MAX_DY_LEN	= 3;
+  static final int MAX_RM_LEN	= 4;
+
+  static final int DCH_S_FM =	0x01;
+  static final int DCH_S_TH	= 0x02;
+  static final int DCH_S_th	= 0x04;
+  static final int DCH_S_SP	= 0x08;
+  static final int DCH_S_TM	= 0x10;
+
+  static final int NODE_TYPE_END = 1;
+  static final int NODE_TYPE_ACTION = 2;
+  static final int NODE_TYPE_CHAR = 3;
+
+  static final int SUFFTYPE_PREFIX = 1;
+  static final int SUFFTYPE_POSTFIX = 2;
+
+  static final int CLOCK_24_HOUR = 0;
+  static final int CLOCK_12_HOUR = 1;
+
+  static final int MONTHS_PER_YEAR = 12;
+  static final int HOURS_PER_DAY = 24;
+
+  /**
+   * ----------
+   * Maximal length of one node
+   * ----------
+   */
+  static final int DCH_MAX_ITEM_SIZ	= 9;		/* max julian day		*/
+  static final int NUM_MAX_ITEM_SIZ	= 8;		/* roman number (RN has 15 chars)	*/
+
+  enum FORMAT_TYPE {
+    DCH_TYPE, NUM_TYPE
+  }
+
+  /**
+   * ----------
+   * Suffixes definition for DATE-TIME TO/FROM CHAR
+   * ----------
+   */
+  static KeySuffix[] DCH_suff = {
+    new KeySuffix("FM", 2, DCH_S_FM, SUFFTYPE_PREFIX),
+    new KeySuffix("fm", 2, DCH_S_FM, SUFFTYPE_PREFIX),
+    new KeySuffix("TM", 2, DCH_S_TM, SUFFTYPE_PREFIX),
+    new KeySuffix("tm", 2, DCH_S_TM, SUFFTYPE_PREFIX),
+    new KeySuffix("TH", 2, DCH_S_TH, SUFFTYPE_POSTFIX),
+    new KeySuffix("th", 2, DCH_S_th, SUFFTYPE_POSTFIX),
+    new KeySuffix("SP", 2, DCH_S_SP, SUFFTYPE_POSTFIX),
+  };
+
+  /**
+   * ----------
+   * Format-pictures (KeyWord).
+   *
+   * The KeyWord field; alphabetic sorted, *BUT* strings alike is sorted
+   *		  complicated -to-> easy:
+   *
+   *	(example: "DDD","DD","Day","D" )
+   *
+   * (this specific sort needs the algorithm for sequential search for strings,
+   * which not has exact end; -> How keyword is in "HH12blabla" ? - "HH"
+   * or "HH12"? You must first try "HH12", because "HH" is in string, but
+   * it is not good.
+   *
+   * (!)
+   *	 - Position for the keyword is similar as position in the enum DCH/NUM_poz.
+   * (!)
+   *
+   * For fast search is used the 'int index[]', index is ascii table from position
+   * 32 (' ') to 126 (~), in this index is DCH_ / NUM_ enums for each ASCII
+   * position or -1 if char is not used in the KeyWord. Search example for
+   * string "MM":
+   *	1)	see in index to index['M' - 32],
+   *	2)	take keywords position (enum DCH_MI) from index
+   *	3)	run sequential search in keywords[] from this position
+   *
+   * ----------
+   */
+  enum DCH_poz {
+    DCH_A_D(0),
+    DCH_A_M(1),
+    DCH_AD(2),
+    DCH_AM(3),
+    DCH_B_C(4),
+    DCH_BC(5),
+    DCH_CC(6),
+    DCH_DAY(7),
+    DCH_DDD(8),
+    DCH_DD(9),
+
+    DCH_DY(10),
+    DCH_Day(11),
+    DCH_Dy(12),
+    DCH_D(13),
+    DCH_FX(14),						/* global suffix */
+    DCH_HH24(15),
+    DCH_HH12(16),
+    DCH_HH(17),
+    DCH_IDDD(18),
+    DCH_ID(19),
+
+    DCH_IW(20),
+    DCH_IYYY(21),
+    DCH_IYY(22),
+    DCH_IY(23),
+    DCH_I(24),
+    DCH_J(25),
+    DCH_MI(26),
+    DCH_MM(27),
+    DCH_MONTH(28),
+    DCH_MON(29),
+
+    DCH_MS(30),
+    DCH_Month(31),
+    DCH_Mon(32),
+    DCH_P_M(33),
+    DCH_PM(34),
+    DCH_Q(35),
+    DCH_RM(36),
+    DCH_SSSS(37),
+    DCH_SS(38),
+    DCH_TZ(39),
+
+    DCH_US(40),
+    DCH_WW(41),
+    DCH_W(42),
+    DCH_Y_YYY(43),
+    DCH_YYYY(44),
+    DCH_YYY(45),
+    DCH_YY(46),
+    DCH_Y(47),
+    DCH_a_d(48),
+    DCH_a_m(49),
+
+    DCH_ad(50),
+    DCH_am(51),
+    DCH_b_c(52),
+    DCH_bc(53),
+    DCH_cc(54),
+    DCH_day(55),
+    DCH_ddd(56),
+    DCH_dd(57),
+    DCH_dy(58),
+    DCH_d(59),
+
+    DCH_fx(60),
+    DCH_hh24(61),
+    DCH_hh12(62),
+    DCH_hh(63),
+    DCH_iddd(64),
+    DCH_id(65),
+    DCH_iw(66),
+    DCH_iyyy(67),
+    DCH_iyy(68),
+    DCH_iy(69),
+
+    DCH_i(70),
+    DCH_j(71),
+    DCH_mi(72),
+    DCH_mm(73),
+    DCH_month(74),
+    DCH_mon(75),
+    DCH_ms(76),
+    DCH_p_m(77),
+    DCH_pm(78),
+    DCH_q(79),
+
+    DCH_rm(80),
+    DCH_ssss(89),
+    DCH_ss(90),
+    DCH_tz(91),
+    DCH_us(92),
+    DCH_ww(93),
+    DCH_w(94),
+    DCH_y_yyy(95),
+    DCH_yyyy(96),
+    DCH_yyy(97),
+    DCH_yy(98),
+    DCH_y(99),
+    _DCH_last_(Integer.MAX_VALUE);
+
+    int value;
+    DCH_poz(int value) {
+      this.value = value;
+    }
+
+    public int getValue() {
+      return value;
+    }
+  }
+
+  /**
+   * ----------
+   * FromCharDateMode
+   * ----------
+   *
+   * This value is used to nominate one of several distinct (and mutually
+   * exclusive) date conventions that a keyword can belong to.
+   */
+  enum FromCharDateMode
+  {
+    FROM_CHAR_DATE_NONE,	/* Value does not affect date mode. */
+    FROM_CHAR_DATE_GREGORIAN,	/* Gregorian (day, month, year) style date */
+    FROM_CHAR_DATE_ISOWEEK		/* ISO 8601 week date */
+  }
+
+  /**
+   * ----------
+   * KeyWords for DATE-TIME version
+   * ----------
+   */
+  static final Object[][] DCH_keywordValues = {
+      /*	name, len, id, is_digit, date_mode */
+      {"A.D.", 4, DCH_poz.DCH_A_D, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* A */
+      {"A.M.", 4, DCH_poz.DCH_A_M, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"AD", 2, DCH_poz.DCH_AD, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"AM", 2, DCH_poz.DCH_AM, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"B.C.", 4, DCH_poz.DCH_B_C, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* B */
+      {"BC", 2, DCH_poz.DCH_BC, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"CC", 2, DCH_poz.DCH_CC, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* C */
+      {"DAY", 3, DCH_poz.DCH_DAY, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* D */
+      {"DDD", 3, DCH_poz.DCH_DDD, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"DD", 2, DCH_poz.DCH_DD, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"DY", 2, DCH_poz.DCH_DY, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"Day", 3, DCH_poz.DCH_Day, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"Dy", 2, DCH_poz.DCH_Dy, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"D", 1, DCH_poz.DCH_D, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"FX", 2, DCH_poz.DCH_FX, false, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* F */
+      {"HH24", 4, DCH_poz.DCH_HH24, true, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* H */
+      {"HH12", 4, DCH_poz.DCH_HH12, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"HH", 2, DCH_poz.DCH_HH, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"IDDD", 4, DCH_poz.DCH_IDDD, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},		/* I */
+      {"ID", 2, DCH_poz.DCH_ID, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"IW", 2, DCH_poz.DCH_IW, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"IYYY", 4, DCH_poz.DCH_IYYY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"IYY", 3, DCH_poz.DCH_IYY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"IY", 2, DCH_poz.DCH_IY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"I", 1, DCH_poz.DCH_I, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"J", 1, DCH_poz.DCH_J, true, FromCharDateMode.FROM_CHAR_DATE_NONE}, /* J */
+      {"MI", 2, DCH_poz.DCH_MI, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* M */
+      {"MM", 2, DCH_poz.DCH_MM, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"MONTH", 5, DCH_poz.DCH_MONTH, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"MON", 3, DCH_poz.DCH_MON, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"MS", 2, DCH_poz.DCH_MS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"Month", 5, DCH_poz.DCH_Month, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"Mon", 3, DCH_poz.DCH_Mon, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"P.M.", 4, DCH_poz.DCH_P_M, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* P */
+      {"PM", 2, DCH_poz.DCH_PM, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"Q", 1, DCH_poz.DCH_Q, true, FromCharDateMode.FROM_CHAR_DATE_NONE}, /* Q */
+      {"RM", 2, DCH_poz.DCH_RM, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN}, /* R */
+      {"SSSS", 4, DCH_poz.DCH_SSSS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* S */
+      {"SS", 2, DCH_poz.DCH_SS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"TZ", 2, DCH_poz.DCH_TZ, false, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* T */
+      {"US", 2, DCH_poz.DCH_US, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* U */
+      {"WW", 2, DCH_poz.DCH_WW, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},	/* W */
+      {"W", 1, DCH_poz.DCH_W, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"Y,YYY", 5, DCH_poz.DCH_Y_YYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},	/* Y */
+      {"YYYY", 4, DCH_poz.DCH_YYYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"YYY", 3, DCH_poz.DCH_YYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"YY", 2, DCH_poz.DCH_YY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"Y", 1, DCH_poz.DCH_Y, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"a.d.", 4, DCH_poz.DCH_a_d, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* a */
+      {"a.m.", 4, DCH_poz.DCH_a_m, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"ad", 2, DCH_poz.DCH_ad, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"am", 2, DCH_poz.DCH_am, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"b.c.", 4, DCH_poz.DCH_b_c, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* b */
+      {"bc", 2, DCH_poz.DCH_bc, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"cc", 2, DCH_poz.DCH_CC, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* c */
+      {"day", 3, DCH_poz.DCH_day, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* d */
+      {"ddd", 3, DCH_poz.DCH_DDD, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"dd", 2, DCH_poz.DCH_DD, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"dy", 2, DCH_poz.DCH_dy, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"d", 1, DCH_poz.DCH_D, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"fx", 2, DCH_poz.DCH_FX, false, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* f */
+      {"hh24", 4, DCH_poz.DCH_HH24, true, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* h */
+      {"hh12", 4, DCH_poz.DCH_HH12, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"hh", 2, DCH_poz.DCH_HH, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"iddd", 4, DCH_poz.DCH_IDDD, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},		/* i */
+      {"id", 2, DCH_poz.DCH_ID, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"iw", 2, DCH_poz.DCH_IW, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"iyyy", 4, DCH_poz.DCH_IYYY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"iyy", 3, DCH_poz.DCH_IYY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"iy", 2, DCH_poz.DCH_IY, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"i", 1, DCH_poz.DCH_I, true, FromCharDateMode.FROM_CHAR_DATE_ISOWEEK},
+      {"j", 1, DCH_poz.DCH_J, true, FromCharDateMode.FROM_CHAR_DATE_NONE}, /* j */
+      {"mi", 2, DCH_poz.DCH_MI, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* m */
+      {"mm", 2, DCH_poz.DCH_MM, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"month", 5, DCH_poz.DCH_month, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"mon", 3, DCH_poz.DCH_mon, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"ms", 2, DCH_poz.DCH_MS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"p.m.", 4, DCH_poz.DCH_p_m, false, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* p */
+      {"pm", 2, DCH_poz.DCH_pm, false, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"q", 1, DCH_poz.DCH_Q, true, FromCharDateMode.FROM_CHAR_DATE_NONE}, /* q */
+      {"rm", 2, DCH_poz.DCH_rm, false, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN}, /* r */
+      {"ssss", 4, DCH_poz.DCH_SSSS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},	/* s */
+      {"ss", 2, DCH_poz.DCH_SS, true, FromCharDateMode.FROM_CHAR_DATE_NONE},
+      {"tz", 2, DCH_poz.DCH_tz, false, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* t */
+      {"us", 2, DCH_poz.DCH_US, true, FromCharDateMode.FROM_CHAR_DATE_NONE},		/* u */
+      {"ww", 2, DCH_poz.DCH_WW, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},	/* w */
+      {"w", 1, DCH_poz.DCH_W, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"y,yyy", 5, DCH_poz.DCH_Y_YYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},	/* y */
+      {"yyyy", 4, DCH_poz.DCH_YYYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"yyy", 3, DCH_poz.DCH_YYY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"yy", 2, DCH_poz.DCH_YY, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN},
+      {"y", 1, DCH_poz.DCH_Y, true, FromCharDateMode.FROM_CHAR_DATE_GREGORIAN}
+  };
+
+  static final KeyWord[] DCH_keywords = new KeyWord[DCH_keywordValues.length];
+
+  static Map<Character, Integer> DCH_index = new HashMap<Character, Integer>();
+
+  static {
+    int index = 0;
+    for(Object[] eachKeywordValue: DCH_keywordValues) {
+      KeyWord keyword = new KeyWord();
+      keyword.name = (String)eachKeywordValue[0];
+      keyword.len = ((Integer)eachKeywordValue[1]).intValue();
+      keyword.id = ((DCH_poz)eachKeywordValue[2]).getValue();
+      keyword.idType = ((DCH_poz)eachKeywordValue[2]);
+      keyword.is_digit = ((Boolean)eachKeywordValue[3]).booleanValue();
+      keyword.date_mode = (FromCharDateMode)eachKeywordValue[4];
+
+      Character c = new Character(keyword.name.charAt(0));
+      Integer pos = DCH_index.get(c);
+      if (pos == null) {
+        DCH_index.put(c, index);
+      }
+      DCH_keywords[index++] = keyword;
+    }
+  }
+
+  /**
+   * ----------
+   * Format parser structs
+   * ----------
+   */
+  static class KeySuffix {
+    String name;			/* suffix string		*/
+    int len;			    /* suffix length		*/
+    int id;				    /* used in node->suffix */
+    int type;			    /* prefix / postfix			*/
+
+    public KeySuffix(String name, int len, int id, int type) {
+      this.name = name;
+      this.len = len;
+      this.id = id;
+      this.type = type;
+    }
+  }
+
+  static class KeyWord {
+    String name;
+    int len;
+    int id;
+    DCH_poz idType;
+    boolean is_digit;
+    FromCharDateMode date_mode;
+  }
+
+  static class FormatNode {
+    int type;			  /* node type			*/
+    KeyWord key;		/* if node type is KEYWORD	*/
+    char character;	/* if node type is CHAR		*/
+    int suffix;			/* keyword suffix		*/
+  }
+
+  static class TmFromChar {
+    FromCharDateMode mode = FromCharDateMode.FROM_CHAR_DATE_NONE;
+    int	hh;
+    int pm;
+    int mi;
+    int ss;
+    int ssss;
+    int d;				/* stored as 1-7, Sunday = 1, 0 means missing */
+    int dd;
+    int ddd;
+    int mm;
+    int ms;
+    int year;
+    int bc;
+    int ww;
+    int w;
+    int cc;
+    int j;
+    int us;
+    int yysz;			/* is it YY or YYYY ? */
+    int clock;		/* 12 or 24 hour clock? */
+  }
+  static Map<String, FormatNode[]> formatNodeCache = new HashMap<String, FormatNode[]>();
+
+ /**
+  * ----------
+  * Skip TM / th in FROM_CHAR
+  * ----------
+  */
+  static int SKIP_THth(int suf)	{
+    return (S_THth(suf) != 0 ? 2 : 0);
+  }
+
+  /**
+   * ----------
+   * Suffix tests
+   * ----------
+   */
+  static int S_THth(int s) {
+    return ((((s) & DCH_S_TH) != 0 || ((s) & DCH_S_th) != 0) ? 1 : 0);
+  }
+  static int S_TH(int s) {
+    return (((s) & DCH_S_TH) != 0 ? 1 : 0);
+  }
+  static int S_th(int s) {
+    return (((s) & DCH_S_th) != 0 ? 1 : 0);
+  }
+  static int S_TH_TYPE(int s) {
+    return (((s) & DCH_S_TH) != 0 ? TH_UPPER : TH_LOWER);
+  }
+
+  static final int TH_UPPER	=	1;
+  static final int TH_LOWER = 2;
+
+  /* Oracle toggles FM behavior, we don't; see docs. */
+  static int S_FM(int s) {
+    return (((s) & DCH_S_FM) != 0 ? 1 : 0);
+  }
+  static int S_SP(int s) {
+    return (((s) & DCH_S_SP) != 0 ? 1 : 0);
+  }
+  static int S_TM(int s) {
+    return (((s) & DCH_S_TM) != 0 ? 1 : 0);
+  }
+
+  public static TimeMeta parseDateTime(String dateText, String formatText) {
+    TimeMeta tm = new TimeMeta();
+
+    //TODO consider TimeZone
+    doToTimestamp(dateText, formatText, tm);
+
+    // when we parse some date without day like '2014-04', we should set day to 1.
+    if (tm.dayOfMonth == 0) {
+      tm.dayOfMonth = 1;
+    }
+
+    if (tm.dayOfYear > 0 && tm.dayOfMonth > 0) {
+      tm.dayOfYear = 0;
+    }
+
+    return tm;
+  }
+
+  /**
+   * Make Timestamp from date_str which is formatted at argument 'fmt'
+   * ( toTimestamp is reverse to_char() )
+   * @param dateText
+   * @param formatText
+   * @return
+   */
+  public static TimestampDatum toTimestamp(String dateText, String formatText) {
+    TimeMeta tm = parseDateTime(dateText, formatText);
+
+    return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+  }
+
+  /**
+   * Parse the 'dateText' according to 'formatText', return results as a TimeMeta tm
+   * and fractional seconds.
+   *
+   * We parse 'formatText' into a list of FormatNodes, which is then passed to
+   * DCH_from_char to populate a TmFromChar with the parsed contents of
+   * 'dateText'.
+   *
+   * The TmFromChar is then analysed and converted into the final results in struct 'tm'.
+   *
+   * This function does very little error checking, e.g.
+   * to_timestamp('20096040','YYYYMMDD') works
+   * @param dateText
+   * @param formatText
+   * @param tm
+   */
+  static void doToTimestamp(String dateText, String formatText, TimeMeta tm) {
+    TmFromChar tmfc = new TmFromChar();
+    int formatLength = formatText.length();
+
+    if (formatLength > 0) {
+      FormatNode[] formatNodes;
+      synchronized(formatNodeCache) {
+        formatNodes = formatNodeCache.get(formatText);
+      }
+
+      if (formatNodes == null) {
+        formatNodes = new FormatNode[formatLength + 1];
+        for (int i = 0; i < formatNodes.length; i++) {
+          formatNodes[i] = new FormatNode();
+        }
+        parseFormat(formatNodes, formatText, FORMAT_TYPE.DCH_TYPE);
+        formatNodes[formatLength].type = NODE_TYPE_END;	/* Paranoia? */
+
+        synchronized(formatNodeCache) {
+          formatNodeCache.put(formatText, formatNodes);
+        }
+      }
+      DCH_from_char(formatNodes, dateText, tmfc);
+    }
+
+    /*
+     * Convert values that user define for FROM_CHAR (to_date/to_timestamp) to
+     * standard 'tm'
+     */
+    if (tmfc.ssss != 0) {
+      int x = tmfc.ssss;
+
+      tm.hours = x / DateTimeConstants.SECS_PER_HOUR;
+      x %= DateTimeConstants.SECS_PER_HOUR;
+      tm.minutes = x / DateTimeConstants.SECS_PER_MINUTE;
+      x %= DateTimeConstants.SECS_PER_MINUTE;
+      tm.secs = x;
+    }
+
+    if (tmfc.ss != 0) {
+      tm.secs = tmfc.ss;
+    }
+    if (tmfc.mi != 0) {
+      tm.minutes = tmfc.mi;
+    }
+    if (tmfc.hh != 0) {
+      tm.hours = tmfc.hh;
+    }
+
+    if (tmfc.clock == CLOCK_12_HOUR) {
+      if (tm.hours < 1 || tm.hours > HOURS_PER_DAY / 2) {
+        throw new IllegalArgumentException(
+            "hour \"" + tm.hours + "\" is invalid for the 12-hour clock, " +
+                "Use the 24-hour clock, or give an hour between 1 and 12.");
+      }
+      if (tmfc.pm != 0 && tm.hours < HOURS_PER_DAY / 2) {
+        tm.hours += HOURS_PER_DAY / 2;
+      } else if (tmfc.pm == 0 && tm.hours == HOURS_PER_DAY / 2) {
+        tm.hours = 0;
+      }
+    }
+
+    if (tmfc.year != 0) {
+      /*
+       * If CC and YY (or Y) are provided, use YY as 2 low-order digits for
+       * the year in the given century.  Keep in mind that the 21st century
+       * AD runs from 2001-2100, not 2000-2099; 6th century BC runs from
+       * 600BC to 501BC.
+       */
+      if (tmfc.cc != 0 && tmfc.yysz <= 2) {
+        if (tmfc.bc != 0) {
+          tmfc.cc = -tmfc.cc;
+        }
+        tm.years = tmfc.year % 100;
+        if (tm.years != 0) {
+          if (tmfc.cc >= 0) {
+            tm.years += (tmfc.cc - 1) * 100;
+          } else {
+            tm.years = (tmfc.cc + 1) * 100 - tm.years + 1;
+          }
+        } else {
+				  /* find century year for dates ending in "00" */
+          tm.years = tmfc.cc * 100 + ((tmfc.cc >= 0) ? 0 : 1);
+        }
+      } else {
+			  /* If a 4-digit year is provided, we use that and ignore CC. */
+        tm.years = tmfc.year;
+        if (tmfc.bc != 0 && tm.years > 0) {
+          tm.years = -(tm.years - 1);
+        }
+      }
+    }
+    else if (tmfc.cc != 0) {			/* use first year of century */
+      if (tmfc.bc != 0) {
+        tmfc.cc = -tmfc.cc;
+      }
+      if (tmfc.cc >= 0) {
+			  /* +1 becuase 21st century started in 2001 */
+        tm.years = (tmfc.cc - 1) * 100 + 1;
+      } else {
+			  /* +1 because year == 599 is 600 BC */
+        tm.years = tmfc.cc * 100 + 1;
+      }
+    }
+
+    if (tmfc.j != 0) {
+      DateTimeUtil.j2date(tmfc.j, tm);
+    }
+    if (tmfc.ww != 0) {
+      if (tmfc.mode == FromCharDateMode.FROM_CHAR_DATE_ISOWEEK) {
+        /*
+         * If tmfc.d is not set, then the date is left at the beginning of
+         * the ISO week (Monday).
+         */
+        if (tmfc.d != 0) {
+          DateTimeUtil.isoweekdate2date(tmfc.ww, tmfc.d, tm);
+        } else {
+          DateTimeUtil.isoweek2date(tmfc.ww, tm);
+        }
+      } else {
+        tmfc.ddd = (tmfc.ww - 1) * 7 + 1;
+      }
+    }
+
+    if (tmfc.w != 0) {
+      tmfc.dd = (tmfc.w - 1) * 7 + 1;
+    }
+    if (tmfc.d != 0) {
+      //tm.tm_wday = tmfc.d - 1;		/* convert to native numbering */
+    }
+    if (tmfc.dd != 0) {
+      tm.dayOfMonth = tmfc.dd;
+    }
+    if (tmfc.ddd != 0) {
+      tm.dayOfYear = tmfc.ddd;
+    }
+    if (tmfc.mm != 0) {
+      tm.monthOfYear = tmfc.mm;
+    }
+    if (tmfc.ddd != 0 && (tm.monthOfYear <= 1 || tm.dayOfMonth <= 1)) {
+      /*
+       * The month and day field have not been set, so we use the
+       * day-of-year field to populate them.	Depending on the date mode,
+       * this field may be interpreted as a Gregorian day-of-year, or an ISO
+       * week date day-of-year.
+       */
+      if (tm.years == 0 && tmfc.bc == 0) {
+        throw new IllegalArgumentException("cannot calculate day of year without year information");
+      }
+      if (tmfc.mode == FromCharDateMode.FROM_CHAR_DATE_ISOWEEK) {
+        /* zeroth day of the ISO year, in Julian */
+        int j0 = DateTimeUtil.isoweek2j(tm.years, 1) - 1;
+        DateTimeUtil.j2date(j0 + tmfc.ddd, tm);
+      } else {
+        int	i;
+
+        boolean leap = DateTimeUtil.isLeapYear(tm.years);
+        int[] y = ysum[leap ? 1 : 0];
+
+        for (i = 1; i <= MONTHS_PER_YEAR; i++) {
+          if (tmfc.ddd < y[i])
+            break;
+        }
+        if (tm.monthOfYear <= 1) {
+          tm.monthOfYear = i;
+        }
+
+        if (tm.dayOfMonth <= 1) {
+          tm.dayOfMonth = tmfc.ddd - y[i - 1];
+        }
+        tm.dayOfYear = 0;
+      }
+    }
+
+    if (tmfc.ms != 0) {
+      tm.fsecs += tmfc.ms * 1000;
+    }
+    if (tmfc.us != 0) {
+      tm.fsecs += tmfc.us;
+    }
+  }
+
+  /**
+   * Format parser, search small keywords and keyword's suffixes, and make
+   * format-node tree.
+   *
+   * for DATE-TIME & NUMBER version
+   * @param node
+   * @param str
+   * @param ver
+   */
+  static void parseFormat(FormatNode[] node, String str, FORMAT_TYPE ver) {
+    KeySuffix  s;
+    boolean	node_set = false;
+    int suffix;
+    int last = 0;
+
+    int nodeIndex = 0;
+    int charIdx = 0;
+    char[] chars = str.toCharArray();
+
+    while (charIdx < chars.length) {
+      suffix = 0;
+
+      // Prefix
+      if (ver == FORMAT_TYPE.DCH_TYPE && (s = suff_search(chars, charIdx, SUFFTYPE_PREFIX)) != null) {
+        suffix |= s.id;
+        if (s.len > 0) {
+          charIdx += s.len;
+        }
+      }
+
+      // Keyword
+      if (charIdx < chars.length && (node[nodeIndex].key = index_seq_search(chars, charIdx)) != null) {
+        node[nodeIndex].type = NODE_TYPE_ACTION;
+        node[nodeIndex].suffix = 0;
+        node_set = true;
+        if (node[nodeIndex].key.len > 0) {
+          charIdx += node[nodeIndex].key.len;
+        }
+
+        // NUM version: Prepare global NUMDesc struct
+        if (ver == FORMAT_TYPE.NUM_TYPE) {
+          //NUMDesc_prepare(Num, node);
+        }
+
+         // Postfix
+        if (ver == FORMAT_TYPE.DCH_TYPE && charIdx < chars.length  && (s = suff_search(chars, charIdx, SUFFTYPE_POSTFIX)) != null) {
+          suffix |= s.id;
+          if (s.len > 0) {
+            charIdx += s.len;
+          }
+        }
+      } else if (charIdx < chars.length) {
+        // Special characters '\' and '"'
+        if (chars[charIdx] == '"' && last != '\\') {
+          int			x = 0;
+
+          while (charIdx < chars.length ) {
+            charIdx++;
+            if (chars[charIdx] == '"' && x != '\\') {
+              charIdx++;
+              break;
+            } else if (chars[charIdx] == '\\' && x != '\\') {
+              x = '\\';
+              continue;
+            }
+            node[nodeIndex].type = NODE_TYPE_CHAR;
+            node[nodeIndex].character = chars[charIdx];
+            node[nodeIndex].key = null;
+            node[nodeIndex].suffix = 0;
+            nodeIndex++;
+            x = chars[charIdx];
+          }
+          node_set = false;
+          suffix = 0;
+          last = 0;
+        } else if (charIdx < chars.length - 1 && chars[charIdx] == '\\' && last != '\\' && chars[charIdx + 1] == '"') {
+          last = chars[charIdx];
+          charIdx++;
+        } else if (charIdx < chars.length) {
+          node[nodeIndex].type = NODE_TYPE_CHAR;
+          node[nodeIndex].character = chars[charIdx];
+          node[nodeIndex].key = null;
+          node_set = true;
+          last = 0;
+          charIdx++;
+        }
+      }
+
+		  // end
+      if (node_set) {
+        if (node[nodeIndex].type == NODE_TYPE_ACTION) {
+          node[nodeIndex].suffix = suffix;
+        }
+        nodeIndex++;
+        node[nodeIndex].suffix = 0;
+        node_set = false;
+      }
+    }
+
+    node[nodeIndex].type = NODE_TYPE_END;
+    node[nodeIndex].suffix = 0;
+  }
+
+  /**
+   * Process a string as denoted by a list of FormatNodes.
+   * The TmFromChar struct pointed to by 'out' is populated with the results.
+   *
+   * Note: we currently don't have any to_interval() function, so there
+   * is no need here for INVALID_FOR_INTERVAL checks.
+   * @param nodes
+   * @param dateText
+   * @param out
+   */
+  static void DCH_from_char(FormatNode[] nodes, String dateText, TmFromChar out) {
+    int	len;
+    AtomicInteger value = new AtomicInteger();
+    boolean	fx_mode = false;
+
+    char[] chars = dateText.toCharArray();
+    int charIdx = 0;
+    int nodeIdx = 0;
+    for (; nodeIdx < nodes.length; nodeIdx++) {
+      FormatNode node = nodes[nodeIdx];
+      if (node.type == NODE_TYPE_END || charIdx >= chars.length) {
+        break;
+      }
+      if (node.type != NODE_TYPE_ACTION) {
+        charIdx++;
+			  /* Ignore spaces when not in FX (fixed width) mode */
+        if (Character.isSpaceChar(node.character) && !fx_mode) {
+          while (charIdx < chars.length && Character.isSpaceChar(chars[charIdx])) {
+            charIdx++;
+          }
+        }
+        continue;
+      }
+
+      from_char_set_mode(out, node.key.date_mode);
+
+      switch (node.key.idType) {
+        case DCH_FX:
+          fx_mode = true;
+          break;
+        case DCH_A_M:
+        case DCH_P_M:
+        case DCH_a_m:
+        case DCH_p_m:
+          value.set(out.pm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, ampm_strings_long, ALL_UPPER, node.key.len, node);
+          assertOutValue(out.pm, value.get() % 2, node);
+          out.pm = value.get() % 2;
+          out.clock = CLOCK_12_HOUR;
+          break;
+        case DCH_AM:
+        case DCH_PM:
+        case DCH_am:
+        case DCH_pm:
+          value.set(out.pm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, ampm_strings, ALL_UPPER, node.key.len, node);
+          assertOutValue(out.pm, value.get() % 2, node);
+          out.pm = value.get() % 2;
+          out.clock = CLOCK_12_HOUR;
+          break;
+        case DCH_HH:
+        case DCH_HH12:
+          value.set(out.hh);
+          charIdx += from_char_parse_int_len(value, dateText, charIdx, 2, nodes, nodeIdx);
+          out.hh = value.get();
+          out.clock = CLOCK_12_HOUR;
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_HH24:
+          value.set(out.hh);
+          charIdx += from_char_parse_int_len(value, dateText, charIdx, 2, nodes, nodeIdx);
+          out.hh = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_MI:
+          value.set(out.mi);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.mi = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_SS:
+          value.set(out.ss);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.ss = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_MS:		/* millisecond */
+          value.set(out.ms);
+          len = from_char_parse_int_len(value, dateText, charIdx, 3, nodes, nodeIdx);
+          charIdx += len;
+          out.ms = value.get();
+          /*
+           * 25 is 0.25 and 250 is 0.25 too; 025 is 0.025 and not 0.25
+           */
+          out.ms *= len == 1 ? 100 :
+              len == 2 ? 10 : 1;
+
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_US:		/* microsecond */
+          value.set(out.us);
+          len = from_char_parse_int_len(value, dateText, charIdx, 6, nodes, nodeIdx);
+          charIdx += len;
+          out.us = value.get();
+          out.us *= len == 1 ? 100000 :
+              len == 2 ? 10000 :
+                  len == 3 ? 1000 :
+                      len == 4 ? 100 :
+                          len == 5 ? 10 : 1;
+
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_SSSS:
+          value.set(out.ssss);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.ssss = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_tz:
+        case DCH_TZ:
+          throw new IllegalArgumentException("\"TZ\"/\"tz\" format patterns are not supported in to_date");
+        case DCH_A_D:
+        case DCH_B_C:
+        case DCH_a_d:
+        case DCH_b_c:
+          value.set(out.bc);
+          charIdx += from_char_seq_search(value, dateText, charIdx, adbc_strings_long, ALL_UPPER, node.key.len, node);
+          assertOutValue(out.bc, value.get() % 2, node);
+          out.bc = value.get() % 2;
+          break;
+        case DCH_AD:
+        case DCH_BC:
+        case DCH_ad:
+        case DCH_bc:
+          value.set(out.bc);
+          charIdx += from_char_seq_search(value, dateText, charIdx, adbc_strings, ALL_UPPER, node.key.len, node);
+          assertOutValue(out.bc, value.get() % 2, node);
+          out.bc = value.get() % 2;
+          break;
+        case DCH_MONTH:
+        case DCH_Month:
+        case DCH_month:
+          value.set(out.mm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, months_full, ONE_UPPER, MAX_MONTH_LEN, node);
+          assertOutValue(out.mm, value.get() + 1, node);
+          out.mm = value.get() + 1;
+          break;
+        case DCH_MON:
+        case DCH_Mon:
+        case DCH_mon:
+          value.set(out.mm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, months_short, ONE_UPPER, MAX_MON_LEN, node);
+          assertOutValue(out.mm, value.get() + 1, node);
+          out.mm = value.get() + 1;
+          break;
+        case DCH_MM:
+          value.set(out.mm);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.mm = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_DAY:
+        case DCH_Day:
+        case DCH_day:
+          value.set(out.d);
+          charIdx += from_char_seq_search(value, dateText, charIdx, days_full, ONE_UPPER, MAX_DAY_LEN, node);
+          assertOutValue(out.d, value.get(), node);
+          out.d = value.get();
+          out.d++;
+          break;
+        case DCH_DY:
+        case DCH_Dy:
+        case DCH_dy:
+          value.set(out.d);
+          charIdx += from_char_seq_search(value, dateText, charIdx, days_full, ONE_UPPER, MAX_DY_LEN, node);
+          assertOutValue(out.d, value.get(), node);
+          out.d = value.get();
+          out.d++;
+          break;
+        case DCH_DDD:
+          value.set(out.ddd);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.ddd = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_IDDD:
+          value.set(out.ddd);
+          charIdx += from_char_parse_int_len(value, dateText, charIdx, 3, nodes, nodeIdx);
+          out.ddd = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_DD:
+          value.set(out.dd);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.dd = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_D:
+          value.set(out.d);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.d = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_ID:
+          value.set(out.d);
+          charIdx += from_char_parse_int_len(value, dateText, charIdx, 1, nodes, nodeIdx);
+          out.d = value.get();
+				  /* Shift numbering to match Gregorian where Sunday = 1 */
+          if (++out.d > 7) {
+            out.d = 1;
+          }
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_WW:
+        case DCH_IW:
+          value.set(out.ww);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.ww = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_Q:
+          /*
+           * We ignore 'Q' when converting to date because it is unclear
+           * which date in the quarter to use, and some people specify
+           * both quarter and month, so if it was honored it might
+           * conflict with the supplied month. That is also why we don't
+           * throw an error.
+           *
+           * We still parse the source string for an integer, but it
+           * isn't stored anywhere in 'out'.
+           */
+          charIdx += from_char_parse_int(null, dateText, charIdx, nodes, nodeIdx);
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_CC:
+          value.set(out.cc);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.cc = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_Y_YYY: {
+            int commaIndex = dateText.indexOf(",", charIdx);
+            if (commaIndex <= 0) {
+              throw new IllegalArgumentException("invalid input string for \"Y,YYY\"");
+            }
+            int millenia = Integer.parseInt(dateText.substring(charIdx, commaIndex));
+            int years = Integer.parseInt(dateText.substring(commaIndex + 1, commaIndex + 1 + 3));
+            years += (millenia * 1000);
+            assertOutValue(out.year, years, node);
+            out.year = years;
+            out.yysz = 4;
+            charIdx += strdigits_len(dateText, charIdx) + 4 + SKIP_THth(node.suffix);
+          }
+          break;
+        case DCH_YYYY:
+        case DCH_IYYY:
+          value.set(out.year);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.year = value.get();
+          out.yysz = 4;
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_YYY:
+        case DCH_IYY: {
+            int retVal = from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+            charIdx += retVal;
+            out.year = value.get();
+            if (retVal < 4) {
+              out.year = adjust_partial_year_to_2020(out.year);
+            }
+            out.yysz = 3;
+            charIdx += SKIP_THth(node.suffix);
+            }
+          break;
+        case DCH_YY:
+        case DCH_IY: {
+            value.set(out.year);
+            int retVal = from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+            charIdx += retVal;
+            out.year = value.get();
+            if (retVal < 4) {
+              out.year = adjust_partial_year_to_2020(out.year);
+            }
+            out.yysz = 2;
+            charIdx += SKIP_THth(node.suffix);
+          }
+          break;
+        case DCH_Y:
+        case DCH_I:
+          value.set(out.year);
+          int retVal = from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          charIdx += retVal;
+          out.year = value.get();
+          if (retVal < 4) {
+            out.year = adjust_partial_year_to_2020(out.year);
+          }
+          out.yysz = 1;
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_RM:
+          value.set(out.mm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, rm_months_upper, ALL_UPPER, MAX_RM_LEN, node);
+          assertOutValue(out.mm, MONTHS_PER_YEAR - value.get(), node);
+          out.mm = MONTHS_PER_YEAR - value.get();
+          break;
+        case DCH_rm:
+          value.set(out.mm);
+          charIdx += from_char_seq_search(value, dateText, charIdx, rm_months_lower, ALL_LOWER, MAX_RM_LEN, node);
+          assertOutValue(out.mm, MONTHS_PER_YEAR - value.get(), node);
+          out.mm = MONTHS_PER_YEAR - value.get();
+          break;
+        case DCH_W:
+          value.set(out.w);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.w = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+        case DCH_J:
+          value.set(out.j);
+          charIdx += from_char_parse_int(value, dateText, charIdx, nodes, nodeIdx);
+          out.j = value.get();
+          charIdx += SKIP_THth(node.suffix);
+          break;
+      }
+    }
+  }
+
+  static KeySuffix suff_search(char[] chars, int startIdx, int type) {
+    for (KeySuffix eachSuffix: DCH_suff) {
+      if (eachSuffix.type != type) {
+        continue;
+      }
+
+      if (strncmp(chars, startIdx, eachSuffix.name, eachSuffix.len)) {
+        return eachSuffix;
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Fast sequential search, use index for data selection which
+   * go to seq. cycle (it is very fast for unwanted strings)
+   * (can't be used binary search in format parsing)
+   * @param chars
+   * @param startIdx
+   * @return
+   */
+  static KeyWord index_seq_search(char[] chars, int startIdx) {
+    if (KeyWord_INDEX_FILTER(chars[startIdx]) == 0) {
+      return null;
+    }
+
+    Integer pos = DCH_index.get(chars[startIdx]);
+
+    if (pos != null) {
+      KeyWord keyword = DCH_keywords[pos];
+      do {
+        if (strncmp(chars, startIdx, keyword.name, keyword.len)) {
+          return keyword;
+        }
+        pos++;
+        if (pos >=  DCH_keywords.length) {
+          return null;
+        }
+        keyword = DCH_keywords[pos];
+      } while (chars[startIdx] == keyword.name.charAt(0));
+    }
+    return null;
+  }
+
+  static boolean strncmp(char[] chars, int startIdx, String str, int len) {
+    if (chars.length - startIdx < len) {
+      return false;
+    }
+
+    int index = startIdx;
+    for (int i = 0; i < len; i++, index++) {
+      if (chars[index] != str.charAt(i)) {
+        return false;
+      }
+    }
+
+    return true;
+  }
+
+  static int KeyWord_INDEX_FILTER(char c)	{
+    return ((c) <= ' ' || (c) >= '~' ? 0 : 1);
+  }
+
+  /**
+   * Set the date mode of a from-char conversion.
+   *
+   * Puke if the date mode has already been set, and the caller attempts to set
+   * it to a conflicting mode.
+   * @param tmfc
+   * @param mode
+   */
+  static void from_char_set_mode(TmFromChar tmfc, FromCharDateMode mode) {
+    if (mode != FromCharDateMode.FROM_CHAR_DATE_NONE) {
+      if (tmfc.mode == FromCharDateMode.FROM_CHAR_DATE_NONE) {
+        tmfc.mode = mode;
+      } else if (tmfc.mode != mode) {
+        throw new IllegalArgumentException("invalid combination of date conventions: " +
+                "Do not mix Gregorian and ISO week date " +
+                "conventions in a formatting template.");
+      }
+    }
+  }
+
+  /**
+   * Perform a sequential search in 'array' for text matching the first 'max'
+   * characters of the source string.
+   *
+   * If a match is found, copy the array index of the match into the integer
+   * pointed to by 'dest', advance 'src' to the end of the part of the string
+   * which matched, and return the number of characters consumed.
+   *
+   * If the string doesn't match, throw an error.
+   * @param dest
+   * @param src
+   * @param charIdx
+   * @param array
+   * @param type
+   * @param max
+   * @param node
+   * @return
+   */
+  static int from_char_seq_search(AtomicInteger dest, String src, int charIdx, String[] array, int type, int max,
+                       FormatNode node) {
+    AtomicInteger len = new AtomicInteger(0);
+
+    dest.set(seq_search(src, charIdx, array, type, max, len));
+    if (len.get() <= 0) {
+      String copy;
+      if (charIdx + node.key.len >= src.length()) {
+        copy = src.substring(charIdx, charIdx + node.key.len);
+      } else {
+        copy = src.substring(charIdx);
+      }
+      throw new IllegalArgumentException("Invalid value \"" + copy + "\" for \"" + node.key.name + "\". " +
+          "The given value did not match any of the allowed values for this field.");
+    }
+    return len.get();
+  }
+
+  /**
+   * Sequential search with to upper/lower conversion
+   * @param name
+   * @param charIdx
+   * @param array
+   * @param type
+   * @param max
+   * @param len
+   * @return
+   */
+  static int seq_search(String name, int charIdx, String[] array, int type, int max, AtomicInteger len) {
+    if (name == null || name.length() <= charIdx) {
+      return -1;
+    }
+
+    char[] nameChars = name.toCharArray();
+    char nameChar = nameChars[charIdx];
+
+	  /* set first char */
+    if (type == ONE_UPPER || type == ALL_UPPER) {
+      nameChar = Character.toUpperCase(nameChar);
+    } else if (type == ALL_LOWER) {
+      nameChar = Character.toLowerCase(nameChar);
+    }
+
+    int arrayIndex = 0;
+    for (int last = 0; array[arrayIndex] != null; arrayIndex++) {
+      String arrayStr = array[arrayIndex];
+		  /* comperate first chars */
+      if (nameChar != arrayStr.charAt(0)) {
+        continue;
+      }
+      int arrayStrLen = arrayStr.length();
+      int arrayCharIdx = 1;
+      int nameCharIdx = charIdx + 1;
+
+      for (int idx = 1; ; nameCharIdx++, arrayCharIdx++, idx++) {
+			  // search fragment (max) only
+        if (max != 0 && idx == max) {
+          len.set(idx + 1);   // '\0'
+          return arrayIndex;
+        }
+			  // full size
+        if (arrayCharIdx == arrayStrLen - 1) {
+          len.set(idx + 1);   // '\0'
+          return arrayIndex;
+        }
+        // Not found in array 'a'
+        if (nameCharIdx == nameChars.length - 1) {
+          break;
+        }
+        /*
+         * Convert (but convert new chars only)
+         */
+        nameChar = nameChars[nameCharIdx];
+        if (idx > last) {
+          if (type == ONE_UPPER || type == ALL_LOWER) {
+            nameChar = Character.toLowerCase(nameChar);
+          } else if (type == ALL_UPPER) {
+            nameChar = Character.toUpperCase(nameChar);
+          }
+          last = idx;
+        }
+        if (nameChar != arrayStr.charAt(arrayCharIdx)){
+          break;
+        }
+      }
+    }
+
+    return -1;
+  }
+
+  /**
+   * Read a single integer from the source string, into the int pointed to by
+   * 'dest'. If 'dest' is NULL, the result is discarded.
+   *
+   * In fixed-width mode (the node does not have the FM suffix), consume at most
+   * 'len' characters.  However, any leading whitespace isn't counted in 'len'.
+   *
+   * We use strtol() to recover the integer value from the source string, in
+   * accordance with the given FormatNode.
+   *
+   * If the conversion completes successfully, src will have been advanced to
+   * point at the character immediately following the last character used in the
+   * conversion.
+   *
+   * Return the number of characters consumed.
+   *
+   * Note that from_char_parse_int() provides a more convenient wrapper where
+   * the length of the field is the same as the length of the format keyword (as
+   * with DD and MI).
+   * @param dest
+   * @param src
+   * @param charIdx
+   * @param len
+   * @param nodes
+   * @param nodeIndex
+   * @return
+   */
+  static int from_char_parse_int_len(AtomicInteger dest, String src, int charIdx, int len, FormatNode[] nodes, int nodeIndex) {
+    long result;
+    int	 initCharIdx = charIdx;
+    StringBuilder tempSb = new StringBuilder();
+
+    /*
+     * Skip any whitespace before parsing the integer.
+     */
+    charIdx = strspace_len(src, charIdx);
+
+    int used = src.length() <= charIdx + len ? src.length() - (charIdx + len) : len;
+    if (used <= 0) {
+      used = src.length() - charIdx;
+    }
+    String copy = src.substring(charIdx, charIdx + used);
+
+    if (S_FM(nodes[nodeIndex].suffix) != 0 || is_next_separator(nodes, nodeIndex)) {
+		/*
+		 * This node is in Fill Mode, or the next node is known to be a
+		 * non-digit value, so we just slurp as many characters as we can get.
+		 */
+      result = DateTimeUtil.strtol(src, charIdx, tempSb);
+      charIdx = src.length() - tempSb.length();
+    } else {
+
+      /*
+       * We need to pull exactly the number of characters given in 'len' out
+       * of the string, and convert those.
+       */
+      if (used < len) {
+        throw new IllegalArgumentException("source string too short for \"" + nodes[nodeIndex].key.name + "\" + formatting field");
+      }
+      result = DateTimeUtil.strtol(copy, 0, tempSb);
+      used = copy.length() - tempSb.length();
+
+      if (used > 0 && used < len) {
+        throw new IllegalArgumentException("invalid value \"" + copy + "\" for \"" + nodes[nodeIndex].key.name + "\"." +
+            "Field requires " + len + " characters, but only " + used);
+      }
+      charIdx += used;
+    }
+
+    if (charIdx == initCharIdx) {
+      throw new IllegalArgumentException("invalid value \"" + copy + "\" for \"" + nodes[nodeIndex].key.name + "\"." +
+          "Value must be an integer.");
+    }
+    if (result < Integer.MIN_VALUE || result > Integer.MAX_VALUE) {
+      throw new IllegalArgumentException("value for \"" + nodes[nodeIndex].key.name + "\"" +
+          " in source string is out of range." +
+          "Value must be in the range " + Integer.MIN_VALUE + " to " + Integer.MAX_VALUE + ".");
+    }
+    if (dest != null) {
+      assertOutValue(dest.get(), (int)result, nodes[nodeIndex]);
+      dest.set((int)result);
+    }
+    return charIdx - initCharIdx;
+  }
+
+  /**
+   * Call from_char_parse_int_len(), using the length of the format keyword as
+   * the expected length of the field.
+   *
+   * Don't call this function if the field differs in length from the format
+   * keyword (as with HH24; the keyword length is 4, but the field length is 2).
+   * In such cases, call from_char_parse_int_len() instead to specify the
+   * required length explicitly.
+   * @param dest
+   * @param src
+   * @param charIdx
+   * @param nodes
+   * @param nodeIdx
+   * @return
+   */
+  static int from_char_parse_int(AtomicInteger dest, String src, int charIdx, FormatNode[] nodes, int nodeIdx) {
+    return from_char_parse_int_len(dest, src, charIdx, nodes[nodeIdx].key.len, nodes, nodeIdx);
+  }
+
+  static int strspace_len(String str, int charIdx) {
+    int len = str.length();
+    while (charIdx < len && Character.isSpaceChar(str.charAt(charIdx))) {
+      charIdx++;
+    }
+    return charIdx;
+  }
+
+  static int strdigits_len(String str, int charIdx) {
+    int len = strspace_len(str, charIdx);
+    int index = charIdx + len;
+
+    int strLen = str.length();
+
+    while (index < strLen && Character.isDigit(str.charAt(index)) && len <= DCH_MAX_ITEM_SIZ) {
+      len++;
+      index++;
+    }
+    return len;
+  }
+
+  static void assertOutValue(int dest, int value, FormatNode node) {
+    if (dest != 0 && dest != value) {
+      throw new IllegalArgumentException(
+          "conflicting values for \"" + node.key.name + "\" field in formatting string," +
+          "This value contradicts a previous setting for the same field type(" + dest + "," + value + ")");
+    }
+  }
+
+  /**
+   * Return true if next format picture is not digit value
+   * @param nodes
+   * @param nodeIndex
+   * @return
+   */
+  static boolean is_next_separator(FormatNode[] nodes, int nodeIndex) {
+    int index = nodeIndex;
+    if (nodes[index].type == NODE_TYPE_END) {
+      return false;
+    }
+
+    if (nodes[index].type == NODE_TYPE_ACTION && S_THth(nodes[index].suffix) != 0) {
+      return true;
+    }
+
+    /*
+     * Next node
+     */
+    index++;
+
+	/* end of format string is treated like a non-digit separator */
+    if (nodes[index].type == NODE_TYPE_END) {
+      return true;
+    }
+
+    if (nodes[index].type == NODE_TYPE_ACTION) {
+      return nodes[index].key.is_digit ? false : true;
+    } else if (Character.isDigit(nodes[index].character)) {
+      return false;
+    }
+
+    return true;				/* some non-digit input (separator) */
+  }
+
+  /**
+   * Adjust all dates toward 2020; this is effectively what happens when we
+   * assume '70' is 1970 and '69' is 2069.
+   * @param year
+   * @return
+   */
+  static int adjust_partial_year_to_2020(int year) {
+    if (year < 70) {
+      /* Force 0-69 into the 2000's */
+      return year + 2000;
+    } else if (year < 100) {
+      /* Force 70-99 into the 1900's */
+      return year + 1900;
+    } else if (year < 520) {
+      /* Force 100-519 into the 2000's */
+      return year + 2000;
+    } else if (year < 1000) {
+      /* Force 520-999 into the 1000's */
+      return year + 1000;
+    } else {
+      return year;
+    }
+  }
+
+  /**
+   * Converts TimeMeta to a string using given the format pattern text.
+   * @param tm
+   * @param formatText
+   * @return
+   */
+  public static String to_char(TimeMeta tm, String formatText) {
+    int fmt_len = formatText.length();
+
+    StringBuilder out = new StringBuilder();
+    if (fmt_len > 0) {
+      FormatNode[] formatNodes;
+
+      synchronized(formatNodeCache) {
+        formatNodes = formatNodeCache.get(formatText);
+      }
+
+      if (formatNodes == null) {
+        formatNodes = new FormatNode[fmt_len + 1];
+        for (int i = 0; i < formatNodes.length; i++) {
+          formatNodes[i] = new FormatNode();
+        }
+        parseFormat(formatNodes, formatText, FORMAT_TYPE.DCH_TYPE);
+        formatNodes[fmt_len].type = NODE_TYPE_END;	/* Paranoia? */
+
+        synchronized(formatNodeCache) {
+          formatNodeCache.put(formatText, formatNodes);
+        }
+      }
+      DCH_to_char(formatNodes, false, tm, out);
+      return out.toString();
+    } else {
+      throw new IllegalArgumentException("No format text.");
+    }
+  }
+
+  /**
+   * Process a TmToChar struct as denoted by a list of FormatNodes.
+   * The formatted data is written to the string pointed to by 'out'.
+   * @param nodes
+   * @param isInterval
+   * @param tm
+   * @param out
+   */
+  private static void DCH_to_char(FormatNode[] nodes, boolean isInterval, TimeMeta tm, StringBuilder out) {
+    int i;
+    for (FormatNode node: nodes) {
+      if (node.type == NODE_TYPE_END) {
+        break;
+      }
+      if (node.type != NODE_TYPE_ACTION) {
+        out.append(node.character);
+        continue;
+      }
+
+      switch (node.key.idType) {
+        case DCH_A_M:
+        case DCH_P_M:
+          out.append((tm.hours % HOURS_PER_DAY >= HOURS_PER_DAY / 2) ? P_M_STR : A_M_STR);
+          break;
+        case DCH_AM:
+        case DCH_PM:
+          out.append((tm.hours % HOURS_PER_DAY >= HOURS_PER_DAY / 2) ? PM_STR : AM_STR);
+          break;
+        case DCH_a_m:
+        case DCH_p_m:
+          out.append((tm.hours % HOURS_PER_DAY >= HOURS_PER_DAY / 2) ? p_m_STR : a_m_STR);
+          break;
+        case DCH_am:
+        case DCH_pm:
+          out.append((tm.hours % HOURS_PER_DAY >= HOURS_PER_DAY / 2) ? pm_STR : am_STR);
+          break;
+        case DCH_HH:
+        case DCH_HH12: {
+          /*
+           * display time as shown on a 12-hour clock, even for
+           * intervals
+           */
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr,
+              tm.hours % (HOURS_PER_DAY / 2) == 0 ? HOURS_PER_DAY / 2 : tm.hours % (HOURS_PER_DAY / 2)));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_HH24: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr, tm.hours));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_MI: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr, tm.minutes));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_SS: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr, tm.secs));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_MS:		/* millisecond */
+          out.append(String.format("%03d", (int) (tm.fsecs / 1000.0)));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_US:		/* microsecond */
+          out.append(String.format("%06d", (int) tm.fsecs));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_SSSS:
+          out.append(String.format("%d", tm.hours * DateTimeConstants.SECS_PER_HOUR +
+              tm.minutes * DateTimeConstants.SECS_PER_MINUTE + tm.secs));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_tz:
+          invalidForInterval(isInterval, node);
+          //TODO
+          break;
+        case DCH_A_D:
+        case DCH_B_C:
+          invalidForInterval(isInterval, node);
+          out.append((tm.years <= 0 ? B_C_STR : A_D_STR));
+          break;
+        case DCH_AD:
+        case DCH_BC:
+          invalidForInterval(isInterval, node);
+          out.append((tm.years <= 0 ? BC_STR : AD_STR));
+          break;
+        case DCH_a_d:
+        case DCH_b_c:
+          invalidForInterval(isInterval, node);
+          out.append((tm.years <= 0 ? b_c_STR : a_d_STR));
+          break;
+        case DCH_ad:
+        case DCH_bc:
+          invalidForInterval(isInterval, node);
+          out.append((tm.years <= 0 ? bc_STR : ad_STR));
+          break;
+        case DCH_MONTH:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_full[tm.monthOfYear - 1].toUpperCase());
+          } else {
+            String formatStr =(S_FM(node.suffix) != 0 ? "%0d": "%-09d");
+            out.append(String.format(formatStr, months_full[tm.monthOfYear - 1].toUpperCase()));
+          }
+          break;
+        case DCH_Month:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_full[tm.monthOfYear - 1]);
+          } else {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%s": "%-9s");
+            out.append(String.format(formatStr, months_full[tm.monthOfYear - 1]));
+          }
+          break;
+        case DCH_month:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_full[tm.monthOfYear - 1].toLowerCase());
+          } else {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%s": "%-9s");
+            out.append(String.format(formatStr, months_full[tm.monthOfYear - 1].toLowerCase()));
+          }
+          break;
+        case DCH_MON:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_short[tm.monthOfYear - 1].toUpperCase());
+          } else {
+            out.append(months_short[tm.monthOfYear - 1].toUpperCase());
+          }
+          break;
+        case DCH_Mon:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_short[tm.monthOfYear - 1]);
+          } else {
+            out.append(months_short[tm.monthOfYear - 1]);
+          }
+          break;
+        case DCH_mon:
+          invalidForInterval(isInterval, node);
+          if (tm.monthOfYear == 0)
+            break;
+          if (S_TM(node.suffix) != 0) {
+            out.append(months_short[tm.monthOfYear - 1].toLowerCase());
+          } else {
+            out.append(months_short[tm.monthOfYear - 1].toLowerCase());
+          }
+          break;
+        case DCH_MM: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr, tm.monthOfYear));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_DAY: {
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_full[tm.getDayOfWeek()].toUpperCase());
+          } else {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%s" : "%-9s");
+            out.append(String.format(formatStr, days_full[tm.getDayOfWeek()].toUpperCase()));
+          }
+          break;
+        }
+        case DCH_Day:
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_full[tm.getDayOfWeek()]);
+          } else {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%s" : "%-9s");
+            out.append(String.format(formatStr, days_full[tm.getDayOfWeek()]));
+          }
+          break;
+        case DCH_day:
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_full[tm.getDayOfWeek()].toLowerCase());
+          } else {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%s" : "%-9s");
+            out.append(String.format(formatStr, days_full[tm.getDayOfWeek()].toLowerCase()));
+          }
+          break;
+        case DCH_DY:
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_short[tm.getDayOfWeek()]);
+          } else {
+            out.append(days_short[tm.getDayOfWeek()]);
+          }
+          break;
+        case DCH_Dy:
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_short[tm.getDayOfWeek()]);
+          } else {
+            out.append(days_short[tm.getDayOfWeek()]);
+          }
+          break;
+        case DCH_dy:
+          invalidForInterval(isInterval, node);
+          if (S_TM(node.suffix) != 0) {
+            out.append(days_short[tm.getDayOfWeek()]);
+          } else {
+            out.append(days_short[tm.getDayOfWeek()]);
+          }
+          break;
+        case DCH_DDD:
+        case DCH_IDDD: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%03d");
+          out.append(String.format(formatStr,
+              (node.key.idType == DCH_poz.DCH_DDD) ?
+                  tm.getDayOfYear() : DateTimeUtil.date2isoyearday(tm.years, tm.monthOfYear, tm.dayOfMonth)
+          ));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_DD: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%02d");
+          out.append(String.format(formatStr, tm.dayOfMonth));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_D:
+          invalidForInterval(isInterval, node);
+          out.append(String.format("%d", tm.getDayOfWeek() + 1));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_ID:
+          invalidForInterval(isInterval, node);
+          out.append(String.format("%d", (tm.getDayOfWeek() == 0) ? 7 : tm.getDayOfWeek()));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_WW: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%02d");
+          out.append(String.format(formatStr, (tm.getDayOfYear() - 1) / 7 + 1));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_IW: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%02d");
+          out.append(String.format(formatStr,
+              DateTimeUtil.date2isoweek(tm.years, tm.monthOfYear, tm.dayOfMonth)));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_Q:
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          out.append(String.format("%d", (tm.monthOfYear - 1) / 3 + 1));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_CC: {
+          if (isInterval) { /* straight calculation */
+            i = tm.years / 100;
+          } else {
+            if (tm.years > 0) {
+						/* Century 20 == 1901 - 2000 */
+              i = (tm.years - 1) / 100 + 1;
+            } else {
+						/* Century 6BC == 600BC - 501BC */
+              i = tm.years / 100 - 1;
+            }
+          }
+          if (i <= 99 && i >= -99) {
+            String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%02d");
+            out.append(String.format(formatStr, i));
+          } else {
+            out.append(String.format("%d", i));
+          }
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_Y_YYY:
+          i = ADJUST_YEAR(tm.years, isInterval) / 1000;
+          out.append(String.format("%d,%03d", i, ADJUST_YEAR(tm.years, isInterval) - (i * 1000)));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_YYYY:
+        case DCH_IYYY: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%d" : "%04d");
+          out.append(String.format(formatStr,
+              (node.key.idType == DCH_poz.DCH_YYYY ? ADJUST_YEAR(tm.years, isInterval) :
+                  ADJUST_YEAR(DateTimeUtil.date2isoyear(tm.years, tm.monthOfYear, tm.dayOfMonth), isInterval))
+          ));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_YYY:
+        case DCH_IYY: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%03d");
+          out.append(String.format(formatStr,
+              (node.key.idType == DCH_poz.DCH_YYY ? ADJUST_YEAR(tm.years, isInterval) :
+                  ADJUST_YEAR(DateTimeUtil.date2isoyear(tm.years, tm.monthOfYear, tm.dayOfMonth), isInterval)) % 1000
+          ));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        }
+        case DCH_YY:
+        case DCH_IY: {
+          String formatStr = (S_FM(node.suffix) != 0 ? "%0d" : "%02d");
+          out.append(String.format(formatStr,
+              (node.key.idType == DCH_poz.DCH_YY ? ADJUST_YEAR(tm.years, isInterval) :
+                  ADJUST_YEAR(DateTimeUtil.date2isoyear(tm.years, tm.monthOfYear, tm.dayOfMonth), isInterval)) % 100
+          ));
+          if (S_THth(node.suffix) != 0)
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          break;
+        }
+        case DCH_Y:
+        case DCH_I:
+          out.append(String.format("%1d",
+              (node.key.idType == DCH_poz.DCH_Y ?
+                  ADJUST_YEAR(tm.years, isInterval) :
+                  ADJUST_YEAR(DateTimeUtil.date2isoyear(tm.years, tm.monthOfYear, tm.dayOfMonth),
+                      isInterval)) % 10));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_RM: {
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          String formatStr = (S_FM(node.suffix) != 0 ? "%s" : "%-4s");
+          out.append(String.format(formatStr,
+              rm_months_upper[MONTHS_PER_YEAR - tm.monthOfYear]));
+          break;
+        }
+        case DCH_rm: {
+          if (tm.monthOfYear == 0) {
+            break;
+          }
+          String formatStr = (S_FM(node.suffix) != 0 ? "%s" : "%-4s");
+          out.append(String.format(formatStr, rm_months_lower[MONTHS_PER_YEAR - tm.monthOfYear]));
+          break;
+        }
+        case DCH_W:
+          out.append(String.format("%d", (tm.dayOfMonth - 1) / 7 + 1));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+        case DCH_J:
+          out.append(String.format("%d", DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth)));
+          if (S_THth(node.suffix) != 0) {
+            str_numth(out, out, S_TH_TYPE(node.suffix));
+          }
+          break;
+      }
+    }
+  }
+
+  /**
+   * Return ST/ND/RD/TH for simple (1..9) numbers
+   * type --> 0 upper, 1 lower
+   * @param num
+   * @param type
+   * @return
+   */
+  static String get_th(StringBuilder num, int type) {
+    int	len = num.length();
+    char last = num.charAt(len - 1);
+
+    if (!Character.isDigit(last)) {
+      throw new IllegalArgumentException("\"" + num.toString() + "\" is not a number");
+    }
+
+    /*
+     * All "teens" (<x>1[0-9]) get 'TH/th', while <x>[02-9][123] still get
+     * 'ST/st', 'ND/nd', 'RD/rd', respectively
+     */
+    char seclast;
+    if ((len > 1) && ((seclast = num.charAt(len - 2)) == '1')) {
+      last = 0;
+    }
+
+    switch (last) {
+      case '1':
+        if (type == TH_UPPER)
+          return numTH[0];
+        return numth[0];
+      case '2':
+        if (type == TH_UPPER)
+          return numTH[1];
+        return numth[1];
+      case '3':
+        if (type == TH_UPPER)
+          return numTH[2];
+        return numth[2];
+      default:
+        if (type == TH_UPPER)
+          return numTH[3];
+        return numth[3];
+    }
+  }
+
+  /**
+   * Convert string-number to ordinal string-number
+   * type --> 0 upper, 1 lower
+   * @param dest
+   * @param num
+   * @param type
+   */
+  static void str_numth(StringBuilder dest, StringBuilder num, int type) {
+    if (!dest.equals(num)) {
+      dest.append(num);
+    }
+    dest.append(get_th(num, type));
+  }
+
+  private static void invalidForInterval(boolean isInterval, FormatNode node) {
+    if (isInterval) {
+      throw new IllegalArgumentException("\"" + node.key.name + "\" not support for interval");
+    }
+  }
+}
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeUtil.java b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeUtil.java
new file mode 100644
index 0000000..327b423
--- /dev/null
+++ b/tajo-common/src/main/java/org/apache/tajo/util/datetime/DateTimeUtil.java
@@ -0,0 +1,2106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util.datetime;
+
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.Int8Datum;
+import org.apache.tajo.exception.ValueOutOfRangeException;
+import org.apache.tajo.util.datetime.DateTimeConstants.DateStyle;
+import org.apache.tajo.util.datetime.DateTimeConstants.DateToken;
+import org.apache.tajo.util.datetime.DateTimeConstants.TokenField;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * This Class is originated from j2date in datetime.c of PostgreSQL.
+ */
+public class DateTimeUtil {
+
+  private static int MAX_FRACTION_LENGTH = 6;
+
+  /** maximum possible number of fields in a date * string */
+  private static int MAXDATEFIELDS = 25;
+
+  public static boolean isJulianCalendar(int year, int month, int day) {
+    return year <= 1752 && month <= 9 && day < 14;
+  }
+
+  public static int getCenturyOfEra(int year) {
+    if (year > 0) {
+      return (year - 1) / 100 + 1;
+    } else if (year < 0) {
+      //600BC to 501BC -> -6
+      int pYear = -year;
+      return -((pYear - 1) / 100 + 1);
+    } else {
+      return 0;
+    }
+  }
+
+  public static boolean isLeapYear(int year) {
+    return ((year & 3) == 0) && ((year % 100) != 0 || (year % 400) == 0);
+  }
+
+  public static int getDaysInYearMonth(int year, int month) {
+    if (isLeapYear(year)) {
+      return DateTimeConstants.DAY_OF_MONTH[1][month - 1];
+    } else {
+      return DateTimeConstants.DAY_OF_MONTH[0][month - 1];
+    }
+  }
+
+  /**
+   * Julian date support.
+   *
+   * isValidJulianDate checks the minimum date exactly, but is a bit sloppy
+   * about the maximum, since it's far enough out to not be especially
+   * interesting.
+   * @param years
+   * @param months
+   * @param days
+   * @return
+   */
+  public static boolean isValidJulianDate(int years, int months, int days) {
+    return years > DateTimeConstants.JULIAN_MINYEAR || years == DateTimeConstants.JULIAN_MINYEAR &&
+        months > DateTimeConstants.JULIAN_MINMONTH || months == DateTimeConstants.JULIAN_MINMONTH &&
+        days >= DateTimeConstants.JULIAN_MINDAY && years < DateTimeConstants.JULIAN_MAXYEAR;
+  }
+
+  /**
+   * Calendar time to Julian date conversions.
+   * Julian date is commonly used in astronomical applications,
+   *	since it is numerically accurate and computationally simple.
+   * The algorithms here will accurately convert between Julian day
+   *	and calendar date for all non-negative Julian days_full
+   *	(i.e. from Nov 24, -4713 on).
+   *
+   * These routines will be used by other date/time packages
+   * - thomas 97/02/25
+   *
+   * Rewritten to eliminate overflow problems. This now allows the
+   * routines to work correctly for all Julian day counts from
+   * 0 to 2147483647	(Nov 24, -4713 to Jun 3, 5874898) assuming
+   * a 32-bit integer. Longer types should also work to the limits
+   * of their precision.
+   * @param year
+   * @param month
+   * @param day
+   * @return
+   */
+  public static int date2j(int year, int month, int day) {
+    int julian;
+    int century;
+
+    if (month > 2) {
+      month += 1;
+      year += 4800;
+    } else {
+      month += 13;
+      year += 4799;
+    }
+
+    century = year / 100;
+    julian = (year * 365) - 32167;
+    julian += (((year / 4) - century) + (century / 4));
+    julian += ((7834 * month) / 256) + day;
+
+    return julian;
+  }
+
+  /**
+   * Set TimeMeta's date fields.
+   * @param julianDate
+   * @param tm
+   */
+  public static void j2date(int julianDate, TimeMeta tm) {
+    long julian;
+    long quad;
+    long extra;
+    long y;
+
+    julian = julianDate;
+    julian += 32044;
+    quad = julian / 146097;
+    extra = (julian - quad * 146097) * 4 + 3;
+    julian += 60 + quad * 3 + extra / 146097;
+    quad = julian / 1461;
+    julian -= quad * 1461;
+    y = julian * 4 / 1461;
+    julian = ((y != 0) ? ((julian + 305) % 365) : ((julian + 306) % 366))
+        + 123;
+    y += quad * 4;
+
+
+    tm.years = (int)(y - 4800);
+    quad = julian * 2141 / 65536;
+    tm.dayOfMonth = (int)(julian - 7834 * quad / 256);
+    tm.monthOfYear = (int) ((quad + 10) % DateTimeConstants.MONTHS_PER_YEAR + 1);
+  }
+
+  /**
+   * This method is originated from j2date in datetime.c of PostgreSQL.
+   *
+   * julianToDay - convert Julian date to day-of-week (0..6 == Sun..Sat)
+   *
+   * Note: various places use the locution julianToDay(date - 1) to produce a
+   * result according to the convention 0..6 = Mon..Sun.	This is a bit of
+   * a crock, but will work as long as the computation here is just a modulo.
+   * @param julianDate
+   * @return
+   */
+  public static int j2day(int julianDate) {
+    long day;
+
+    day = julianDate;
+
+    day += 1;
+    day %= 7;
+
+    return (int) day;
+  }
+
+  /**
+   * This method is originated from date2isoweek in timestamp.c of PostgreSQL.
+   * Returns ISO week number of year.
+   * @param year
+   * @param mon
+   * @param mday
+   * @return
+   */
+  public static int date2isoweek(int year, int mon, int mday) {
+    double result;
+    int day0;
+    int day4;
+    int dayn;
+
+    /* current day */
+    dayn = date2j(year, mon, mday);
+
+    /* fourth day of current year */
+    day4 = date2j(year, 1, 4);
+
+    /* day0 == offset to first day of week (Monday) */
+    day0 = j2day(day4 - 1);
+
+    /*
+     * We need the first week containing a Thursday, otherwise this day falls
+     * into the previous year for purposes of counting weeks
+     */
+    if (dayn < day4 - day0) {
+      day4 = date2j(year - 1, 1, 4);
+
+      /* day0 == offset to first day of week (Monday) */
+      day0 = j2day(day4 - 1);
+    }
+
+    result = (dayn - (day4 - day0)) / 7 + 1;
+
+      /*
+       * Sometimes the last few days_full in a year will fall into the first week of
+       * the next year, so check for this.
+       */
+    if (result >= 52) {
+      day4 = date2j(year + 1, 1, 4);
+
+      /* day0 == offset to first day of week (Monday) */
+      day0 = j2day(day4 - 1);
+
+      if (dayn >= day4 - day0) {
+        result = (dayn - (day4 - day0)) / 7 + 1;
+      }
+    }
+
+    return (int) result;
+  }
+
+  /**
+   * date2isoyear()
+   *
+   * Returns ISO 8601 year number.
+   * @param year
+   * @param mon
+   * @param mday
+   * @return
+   */
+  public static int date2isoyear(int year, int mon, int mday) {
+    /* current day */
+    int dayn = date2j(year, mon, mday);
+
+	  /* fourth day of current year */
+    int day4 = date2j(year, 1, 4);
+
+	  /* day0 == offset to first day of week (Monday) */
+    int day0 = j2day(day4 - 1);
+
+    /*
+     * We need the first week containing a Thursday, otherwise this day falls
+     * into the previous year for purposes of counting weeks
+     */
+    if (dayn < day4 - day0) {
+      day4 = date2j(year - 1, 1, 4);
+
+		/* day0 == offset to first day of week (Monday) */
+      day0 = j2day(day4 - 1);
+
+      year--;
+    }
+
+    double result = (dayn - (day4 - day0)) / 7 + 1;
+
+    /*
+     * Sometimes the last few days in a year will fall into the first week of
+     * the next year, so check for this.
+     */
+    if (result >= 52) {
+      day4 = date2j(year + 1, 1, 4);
+
+		/* day0 == offset to first day of week (Monday) */
+      day0 = j2day(day4 - 1);
+
+      if (dayn >= day4 - day0) {
+        year++;
+      }
+    }
+
+    return year;
+  }
+
+  /**
+   * Converts julian timestamp to epoch.
+   * @param timestamp
+   * @return
+   */
+  public static int julianTimeToEpoch(long timestamp) {
+    long totalSecs = timestamp / DateTimeConstants.USECS_PER_SEC;
+    return (int)(totalSecs + DateTimeConstants.SECS_DIFFERENCE_BETWEEN_JULIAN_AND_UNIXTIME);
+  }
+
+  /**
+   * Converts julian timestamp to java timestamp.
+   * @param timestamp
+   * @return
+   */
+  public static long julianTimeToJavaTime(long timestamp) {
+    double totalSecs = (double)timestamp / (double)DateTimeConstants.MSECS_PER_SEC;
+    return (long)(Math.round(totalSecs + DateTimeConstants.SECS_DIFFERENCE_BETWEEN_JULIAN_AND_UNIXTIME * 1000.0));
+  }
+
+  /**
+   * Converts java timestamp to julian timestamp.
+   * @param javaTimestamp
+   * @return
+   */
+  public static long javaTimeToJulianTime(long javaTimestamp) {
+    double totalSecs = javaTimestamp / 1000.0;
+    return (long)((totalSecs -
+        DateTimeConstants.SECS_DIFFERENCE_BETWEEN_JULIAN_AND_UNIXTIME) * DateTimeConstants.USECS_PER_SEC);
+  }
+
+  /**
+   * Calculate the time value(hour, minute, sec, fsec)
+   * If tm.TomeZone is set, the result value is adjusted.
+   * @param tm
+   * @return
+   */
+  public static long toTime(TimeMeta tm) {
+    if (tm.timeZone != 0 && tm.timeZone != Integer.MAX_VALUE) {
+      int timeZoneSecs = tm.timeZone;
+      tm.timeZone = Integer.MAX_VALUE;
+      tm.plusMillis(0 - timeZoneSecs * 1000);
+    }
+    return toTime(tm.hours, tm.minutes, tm.secs, tm.fsecs);
+  }
+
+  /**
+   * Calculate the time value(hour, minute, sec, fsec)
+   * @param hour
+   * @param min
+   * @param sec
+   * @param fsec
+   * @return
+   */
+  public static long toTime(int hour, int min, int sec, int fsec) {
+    return (((((hour * DateTimeConstants.MINS_PER_HOUR) + min) *
+        DateTimeConstants.SECS_PER_MINUTE) + sec) *
+        DateTimeConstants.USECS_PER_SEC) + fsec;
+  }
+
+  public static long toJavaTime(int hour, int min, int sec, int fsec) {
+    return toTime(hour, min, sec, fsec)/DateTimeConstants.MSECS_PER_SEC;
+  }
+
+  /**
+   * Calculate julian timestamp.
+   * @param years
+   * @param months
+   * @param days
+   * @param hours
+   * @param minutes
+   * @param seconds
+   * @param fsec
+   * @return
+   */
+  public static long toJulianTimestamp(
+      int years, int months, int days, int hours, int minutes, int seconds, int fsec) {
+    /* Julian day routines are not correct for negative Julian days_full */
+    if (!isValidJulianDate(years, months, days)) {
+      throw new ValueOutOfRangeException("Out of Range Julian days_full");
+    }
+
+    long numJulianDays = date2j(years, months, days) - DateTimeConstants.POSTGRES_EPOCH_JDATE;
+
+    return toJulianTimestamp(numJulianDays, hours, minutes, seconds, fsec);
+  }
+
+  /**
+   * Calculate julian timestamp.
+   * @param numJulianDays
+   * @param hours
+   * @param minutes
+   * @param seconds
+   * @param fsec
+   * @return
+   */
+  private static long toJulianTimestamp(long numJulianDays, int hours, int minutes, int seconds, int fsec) {
+    long time = toTime(hours, minutes, seconds, fsec);
+
+    long timestamp = numJulianDays * DateTimeConstants.USECS_PER_DAY + time;
+      /* check for major overflow */
+    if ((timestamp - time) / DateTimeConstants.USECS_PER_DAY != numJulianDays) {
+      throw new RuntimeException("Out of Range of Time");
+    }
+      /* check for just-barely overflow (okay except time-of-day wraps) */
+      /* caution: we want to allow 1999-12-31 24:00:00 */
+    if ((timestamp < 0 && numJulianDays > 0) || (timestamp > 0 && numJulianDays < -1)) {
+      throw new RuntimeException("Out of Range of Date");
+    }
+
+    return timestamp;
+  }
+
+  /**
+   * Calculate julian timestamp.
+   * If tm.TomeZone is set, the result value is adjusted.
+   * @param tm
+   * @return
+   */
+  public static long toJulianTimestamp(TimeMeta tm) {
+    if (tm.timeZone != 0 && tm.timeZone != Integer.MAX_VALUE) {
+      int timeZoneSecs = tm.timeZone;
+      tm.timeZone = Integer.MAX_VALUE;
+      tm.plusMillis(0 - timeZoneSecs * 1000);
+    }
+    if (tm.dayOfYear > 0) {
+      return toJulianTimestamp(date2j(tm.years, 1, 1) + tm.dayOfYear - 1, tm.hours, tm.minutes, tm.secs, tm.fsecs);
+    } else {
+      return toJulianTimestamp(tm.years, tm.monthOfYear, tm.dayOfMonth, tm.hours, tm.minutes, tm.secs, tm.fsecs);
+    }
+  }
+
+  /**
+   * Set TimeMeta's field value using given julian timestamp.
+   * Note that year is _not_ 1900-based, but is an explicit full value.
+   * Also, month is one-based, _not_ zero-based.
+   * Returns:
+   *	 0 on success
+   *	-1 on out of range
+   *
+   * If attimezone is NULL, the global timezone (including possibly brute forced
+   * timezone) will be used.
+   */
+  public static void toJulianTimeMeta(long julianTimestamp, TimeMeta tm) {
+    long date;
+    long time;
+
+    // TODO - If timezone is set, timestamp value should be adjusted here.
+    time = julianTimestamp;
+
+    // TMODULO
+    date = time / DateTimeConstants.USECS_PER_DAY;
+    if (date != 0) {
+      time -= date * DateTimeConstants.USECS_PER_DAY;
+    }
+    if (time < 0) {
+      time += DateTimeConstants.USECS_PER_DAY;
+      date -= 1;
+    }
+
+    /* add offset to go from J2000 back to standard Julian date */
+    date += DateTimeConstants.POSTGRES_EPOCH_JDATE;
+
+    /* Julian day routine does not work for negative Julian days_full */
+    if (date < 0 || date > Integer.MAX_VALUE) {
+      throw new RuntimeException("Timestamp Out Of Scope");
+    }
+
+    j2date((int) date, tm);
+    date2j(time, tm);
+  }
+
+  /**
+   * This method is originated from dt2time in timestamp.c of PostgreSQL.
+   *
+   * @param julianDate
+   * @return hour, min, sec, fsec
+   */
+  public static void date2j(long julianDate, TimeMeta tm) {
+    long time = julianDate;
+
+    tm.hours = (int) (time / DateTimeConstants.USECS_PER_HOUR);
+    time -= tm.hours * DateTimeConstants.USECS_PER_HOUR;
+    tm.minutes = (int) (time / DateTimeConstants.USECS_PER_MINUTE);
+    time -= tm.minutes * DateTimeConstants.USECS_PER_MINUTE;
+    tm.secs = (int) (time / DateTimeConstants.USECS_PER_SEC);
+    tm.fsecs = (int) (time - (tm.secs * DateTimeConstants.USECS_PER_SEC));
+  }
+
+  /**
+   * Decode date string which includes delimiters.
+   *
+   * This method is originated from DecodeDate() in datetime.c of PostgreSQL.
+   * @param str The date string like '2013-12-25'.
+   * @param fmask
+   * @param tmaskValue
+   * @param is2digits
+   * @param tm
+   */
+  private static void decodeDate(String str, int fmask, AtomicInteger tmaskValue,  AtomicBoolean is2digits, TimeMeta tm) {
+
+    int idx = 0;
+    int nf = 0;
+    TokenField type = null;
+    int val = 0;
+
+    AtomicInteger dmask = new AtomicInteger(0);
+    int tmask = tmaskValue.get();
+    boolean haveTextMonth = false;
+
+    int length = str.length();
+    char[] dateStr = str.toCharArray();
+    String[] fields = new String[MAXDATEFIELDS];
+
+    while(idx < length && nf < MAXDATEFIELDS) {
+
+      /* skip field separators */
+      while (idx < length && !Character.isLetterOrDigit(dateStr[idx])) {
+        idx++;
+      }
+
+      if (idx == length) {
+        throw new IllegalArgumentException("BAD Format: " + str);
+      }
+
+      int fieldStartIdx = idx;
+      int fieldLength = idx;
+      if (Character.isDigit(dateStr[idx])) {
+        while (idx < length && Character.isDigit(dateStr[idx])) {
+          idx++;
+        }
+        fieldLength = idx;
+      } else if (Character.isLetterOrDigit(dateStr[idx])) {
+        while (idx < length && Character.isLetterOrDigit(dateStr[idx])) {
+          idx++;
+        }
+        fieldLength = idx;
+      }
+
+      fields[nf] = str.substring(fieldStartIdx, fieldLength);
+      nf++;
+    }
+
+    /* look first for text fields, since that will be unambiguous month */
+    for (int i = 0; i < nf; i++) {
+      if (Character.isLetter(fields[i].charAt(0))) {
+        DateToken dateToken =  DateTimeConstants.dateTokenMap.get(fields[i].toLowerCase());
+        type = dateToken.getType();
+
+        if (type == TokenField.IGNORE_DTF) {
+          continue;
+        }
+
+        dmask.set(DateTimeConstants.DTK_M(type));
+        switch (type) {
+          case MONTH:
+            tm.monthOfYear = type.getValue();
+            haveTextMonth = true;
+            break;
+
+          default:
+            throw new IllegalArgumentException("BAD Format: " + str);
+        }
+        if ((fmask & dmask.get()) != 0) {
+          throw new IllegalArgumentException("BAD Format: " + str);
+        }
+
+        fmask |= dmask.get();
+        tmask |= dmask.get();
+
+			/* mark this field as being completed */
+        fields[i] = null;
+      }
+    }
+
+    /* now pick up remaining numeric fields */
+    for (int i = 0; i < nf; i++) {
+      if (fields[i] == null) {
+        continue;
+      }
+
+      length = fields[i].length();
+      if (length  <= 0) {
+        throw new IllegalArgumentException("BAD Format: " + str);
+      }
+
+      decodeNumber(length, fields[i], haveTextMonth, fmask, dmask, tm, new AtomicLong(0), is2digits);
+
+      if ( (fmask & dmask.get()) != 0 ) {
+        throw new IllegalArgumentException("BAD Format: " + str);
+      }
+      fmask |= dmask.get();
+      tmask |= dmask.get();
+    }
+
+    tmaskValue.set(tmask);
+
+    if ((fmask & ~(DateTimeConstants.DTK_M(TokenField.DOY) | DateTimeConstants.DTK_M(TokenField.TZ))) != DateTimeConstants.DTK_DATE_M) {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+  }
+
+  /**
+   * Decode time string which includes delimiters.
+   * Return 0 if okay, a DTERR code if not.
+   *
+   * Only check the lower limit on hours, since this same code can be
+   * used to represent time spans.
+   * @param str
+   * @param fmask
+   * @param range
+   * @param tmask
+   * @param tm
+   * @param fsec
+   */
+  private static void decodeTime(String str, int fmask, int range,
+             AtomicInteger tmask, TimeMeta tm, AtomicLong fsec) {
+    StringBuilder cp = new StringBuilder();
+
+    tmask.set(DateTimeConstants.DTK_TIME_M);
+
+    tm.hours = strtoi(str, 0, cp);
+    if (cp.charAt(0) != ':') {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+
+    tm.minutes = strtoi(cp.toString(), 1, cp);
+
+    if (cp.length() == 0) {
+      tm.secs = 0;
+      fsec.set(0);
+		  /* If it's a MINUTE TO SECOND interval, take 2 fields as being mm:ss */
+      if (range == (DateTimeConstants.INTERVAL_MASK(TokenField.MINUTE) | DateTimeConstants.INTERVAL_MASK(TokenField.SECOND))) {
+        tm.secs = tm.minutes;
+        tm.minutes = tm.hours;
+        tm.hours = 0;
+      }
+    } else if (cp.charAt(0) == '.') {
+		  /* always assume mm:ss.sss is MINUTE TO SECOND */
+      ParseFractionalSecond(cp, fsec);
+      tm.secs = tm.minutes;
+      tm.minutes = tm.hours;
+      tm.hours = 0;
+    }
+    else if (cp.charAt(0) == ':') {
+      tm.secs = strtoi(cp.toString(), 1, cp);
+      if (cp.length() == 0){
+        fsec.set(0);
+      } else if (cp.charAt(0) == '.') {
+        ParseFractionalSecond(cp, fsec);
+      } else{
+        throw new IllegalArgumentException("BAD Format: " + str);
+      }
+    } else {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+
+	/* do a sanity check */
+    if (tm.hours < 0 || tm.minutes < 0 || tm.minutes > DateTimeConstants.MINS_PER_HOUR - 1 ||
+        tm.secs < 0 || tm.secs > DateTimeConstants.SECS_PER_MINUTE ||
+            fsec.get() < 0 ||
+            fsec.get() > DateTimeConstants.USECS_PER_SEC) {
+      throw new IllegalArgumentException("BAD Format: FIELD_OVERFLOW: " + str);
+    }
+  }
+
+  /**
+   * Parse datetime string to julian time.
+   * The result is the UTC time basis.
+   * @param str
+   * @return
+   */
+  public static long toJulianTimestamp(String str) {
+    TimeMeta tm = decodeDateTime(str, MAXDATEFIELDS);
+    return toJulianTimestamp(tm);
+  }
+
+  public static TimeMeta decodeDateTime(String str) {
+    return decodeDateTime(str, MAXDATEFIELDS);
+  }
+
+  /**
+   * Break string into tokens based on a date/time context.
+   *
+   * This method is originated form ParseDateTime() in datetime.c of PostgreSQL.
+   *
+   * @param str The input string
+   * @param maxFields
+   */
+  public static TimeMeta decodeDateTime(String str, int maxFields) {
+    int idx = 0;
+    int nf = 0;
+    int length = str.length();
+    char [] timeStr = str.toCharArray();
+    String [] fields = new String[maxFields];
+    TokenField[] fieldTypes = new TokenField[maxFields];
+
+    while (idx < length) {
+
+      /* Ignore spaces between fields */
+      if (Character.isSpaceChar(timeStr[idx])) {
+        idx++;
+        continue;
+      }
+
+      /* Record start of current field */
+      if (nf >= maxFields) {
+        throw new IllegalArgumentException("Too many fields");
+      }
+
+      int startIdx = idx;
+
+      //January 8, 1999
+      /* leading digit? then date or time */
+      if (Character.isDigit(timeStr[idx])) {
+        idx++;
+        while (idx < length && Character.isDigit(timeStr[idx])) {
+          idx++;
+        }
+
+        if (idx < length && timeStr[idx] == ':') {
+          fieldTypes[nf] = TokenField.DTK_TIME;
+
+          while (idx <length && (Character.isDigit(timeStr[idx]) || timeStr[idx] == ':' || timeStr[idx] == '.')) {
+            idx++;
+          }
+        }
+
+        /* date field? allow embedded text month */
+        else if (idx < length && (timeStr[idx] == '-' || timeStr[idx] == '/' || timeStr[idx] == '.')) {
+
+          /* save delimiting character to use later */
+          char delim = timeStr[idx];
+          idx++;
+
+          /* second field is all digits? then no embedded text month */
+          if (Character.isDigit(timeStr[idx])) {
+            fieldTypes[nf] = delim == '.' ? TokenField.DTK_NUMBER : TokenField.DTK_DATE;
+
+            while (idx < length && Character.isDigit(timeStr[idx])) {
+              idx++;
+            }
+
+            /*
+					   * insist that the delimiters match to get a three-field
+					   * date.
+					   */
+            if (idx < length && timeStr[idx] == delim) {
+              fieldTypes[nf] = TokenField.DTK_DATE;
+              idx++;
+              while (idx < length && (Character.isDigit(timeStr[idx]) || timeStr[idx] == delim)) {
+                idx++;
+              }
+            }
+          } else {
+            fieldTypes[nf] = TokenField.DTK_DATE;
+            while (idx < length && Character.isLetterOrDigit(timeStr[idx]) || timeStr[idx] == delim) {
+              idx++;
+            }
+          }
+        } else {
+          /*
+			     * otherwise, number only and will determine year, month, day, or
+			     * concatenated fields later...
+			    */
+          fieldTypes[nf] = TokenField.DTK_NUMBER;
+        }
+      }
+
+      /* Leading decimal point? Then fractional seconds... */
+      else if (timeStr[idx] == '.') {
+        idx++;
+        while (idx < length && Character.isDigit(timeStr[idx])) {
+          idx++;
+          continue;
+        }
+        fieldTypes[nf] = TokenField.DTK_NUMBER;
+      }
+
+      // text? then date string, month, day of week, special, or timezone
+      else if (Character.isLetter(timeStr[idx])) {
+        boolean isDate;
+        idx++;
+        while (idx < length && Character.isLetter(timeStr[idx])) {
+          idx++;
+        }
+
+        // Dates can have embedded '-', '/', or '.' separators.  It could
+        // also be a timezone name containing embedded '/', '+', '-', '_',
+        // or ':' (but '_' or ':' can't be the first punctuation). If the
+        // next character is a digit or '+', we need to check whether what
+        // we have so far is a recognized non-timezone keyword --- if so,
+        // don't believe that this is the start of a timezone.
+
+        isDate = false;
+        if (idx < length && (timeStr[idx] == '-' || timeStr[idx] == '/' || timeStr[idx] == '.')) {
+          isDate = true;
+        } else if (idx < length && (timeStr[idx] == '+' || Character.isDigit(timeStr[idx]))) {
+          // The original ParseDateTime handles this case. But, we currently omit this case.
+          throw new IllegalArgumentException("Cannot parse this datetime field " + str.substring(startIdx, idx));
+        }
+
+        if (isDate) {
+          fieldTypes[nf] = TokenField.DTK_DATE;
+
+          do {
+            idx++;
+          } while (idx <length && (timeStr[idx] == '+' || timeStr[idx] == '-' || timeStr[idx] == '/' ||
+              timeStr[idx] == '_' || timeStr[idx] == '.' || timeStr[idx] == ':' ||
+              Character.isLetterOrDigit(timeStr[idx])));
+        } else {
+          fieldTypes[nf] = TokenField.DTK_STRING;
+        }
+      }
+
+      // sign? then special or numeric timezone
+      else if (timeStr[idx] == '+' || timeStr[idx] == '-') {
+        idx++;
+
+        // soak up leading whitespace
+        while (idx < length && Character.isSpaceChar(timeStr[idx])) {
+          idx++;
+        }
+
+        // numeric timezone?
+        // note that "DTK_TZ" could also be a signed float or yyyy-mm */
+        if (idx < length && Character.isDigit(timeStr[idx])) {
+          fieldTypes[nf] = TokenField.DTK_TZ;
+          idx++;
+
+          while (idx < length && (Character.isDigit(timeStr[idx]) || timeStr[idx] == ':' || timeStr[idx] == '.' ||
+              timeStr[idx] == '-')) {
+            idx++;
+          }
+        }
+        /* special? */
+        else if (idx < length && Character.isLetter(timeStr[idx])) {
+          fieldTypes[nf] = TokenField.DTK_SPECIAL;
+          idx++;
+
+          while (idx < length && Character.isLetter(timeStr[idx])) {
+            idx++;
+          }
+        } else {
+          throw new IllegalArgumentException("BAD Format: " + str.substring(startIdx, idx));
+        }
+      }
+      /* ignore other punctuation but use as delimiter */
+      else if (isPunctuation(timeStr[idx])) {
+        idx++;
+        continue;
+      } else {  // otherwise, something is not right...
+        throw new IllegalArgumentException("BAD datetime format: " + str.substring(startIdx, idx));
+      }
+
+      fields[nf] = str.substring(startIdx, idx);
+      nf++;
+    }
+    return decodeDateTime(fields, fieldTypes, nf);
+  }
+
+  /**
+   * Fetch a fractional-second value with suitable error checking
+   * @param cp
+   * @param fsec
+   */
+  public static void ParseFractionalSecond(StringBuilder cp, AtomicLong fsec) {
+	  /* Caller should always pass the start of the fraction part */
+    double frac = strtod(cp.toString(), 1, cp);
+    fsec.set(Math.round(frac * 1000000));
+  }
+
+  /**
+   * Interpret string as a numeric timezone.
+   *
+   * Return 0 if okay (and set *tzp), a DTERR code if not okay.
+   *
+   * NB: this must *not* ereport on failure; see commands/variable.c.
+   * @param str
+   * @param tz
+   */
+  public static void decodeTimezone(String str, AtomicInteger tz) {
+    int min = 0;
+    int sec = 0;
+    StringBuilder sb = new StringBuilder();
+
+    int strIndex = 0;
+	  /* leading character must be "+" or "-" */
+    if (str.charAt(strIndex) != '+' && str.charAt(strIndex) != '-') {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+    int hr = strtoi(str, 1, sb);
+
+	  /* explicit delimiter? */
+    if (sb.length() > 0 && sb.charAt(0) == ':') {
+      min = strtoi(sb.toString(), 1, sb);
+      if (sb.charAt(0) == ':') {
+        sec = strtoi(sb.toString(), 1, sb);
+      }
+    }
+	  /* otherwise, might have run things together... */
+    else if (sb.length() == 0 && str.length() > 3) {
+      min = hr % 100;
+      hr = hr / 100;
+		/* we could, but don't, support a run-together hhmmss format */
+    } else {
+      min = 0;
+    }
+	  /* Range-check the values; see notes in datatype/timestamp.h */
+    if (hr < 0 || hr > DateTimeConstants.MAX_TZDISP_HOUR) {
+      throw new IllegalArgumentException("BAD Format: TZDISP_OVERFLOW: " + str);
+    }
+    if (min < 0 || min >= DateTimeConstants.MINS_PER_HOUR) {
+      throw new IllegalArgumentException("BAD Format: TZDISP_OVERFLOW: " + str);
+    }
+    if (sec < 0 || sec >= DateTimeConstants.SECS_PER_MINUTE) {
+      throw new IllegalArgumentException("BAD Format: TZDISP_OVERFLOW: " + str);
+    }
+
+    int tzValue = (hr * DateTimeConstants.MINS_PER_HOUR + min) * DateTimeConstants.SECS_PER_MINUTE + sec;
+    if (str.charAt(strIndex) == '-') {
+      tzValue = -tzValue;
+    }
+    tz.set(tzValue);
+  }
+
+  /**
+   * Interpret plain numeric field as a date value in context.
+   * @param flen
+   * @param str
+   * @param haveTextMonth
+   * @param fmask
+   * @param tmaskValue
+   * @param tm
+   * @param fsec
+   * @param is2digits
+   */
+  private static void decodeNumber(int flen, String str, boolean haveTextMonth, int fmask,
+               AtomicInteger tmaskValue, TimeMeta tm, AtomicLong fsec, AtomicBoolean is2digits) {
+    int	val;
+    StringBuilder cp = new StringBuilder();
+
+    int tmask = 0;
+    tmaskValue.set(tmask);
+
+    val = strtoi(str, 0, cp);
+    if (cp.toString().equals(str)) {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+
+    if (cp.length() > 0 && cp.charAt(0) == '.') {
+		/*
+		 * More than two digits before decimal point? Then could be a date or
+		 * a run-together time: 2001.360 20011225 040506.789
+		 */
+      if (cp.length() - str.length() > 2) {
+        decodeNumberField(flen, str,
+            (fmask | DateTimeConstants.DTK_DATE_M),
+            tmaskValue, tm,
+            fsec, is2digits);
+        return;
+      }
+      ParseFractionalSecond(cp, fsec);
+    }
+
+  	// Special case for day of year
+    if (flen == 3 && (fmask & DateTimeConstants.DTK_DATE_M) == DateTimeConstants.DTK_M(TokenField.YEAR) &&
+        val >= 1 && val <= 366) {
+      tmaskValue.set((DateTimeConstants.DTK_M(TokenField.DOY) |
+          DateTimeConstants.DTK_M(TokenField.MONTH) |
+          DateTimeConstants.DTK_M(TokenField.DAY)));
+      tm.dayOfYear = val;
+		  // tm_mon and tm_mday can't actually be set yet ...
+      return;
+    }
+
+	  /* Switch based on what we have so far */
+    int checkValue = fmask & DateTimeConstants.DTK_DATE_M;
+    if (checkValue == 0) {
+			/*
+			 * Nothing so far; make a decision about what we think the input
+			 * is.	There used to be lots of heuristics here, but the
+			 * consensus now is to be paranoid.  It *must* be either
+			 * YYYY-MM-DD (with a more-than-two-digit year field), or the
+			 * field order defined by DateOrder.
+			 */
+      if (flen >= 3 || TajoConf.getDateOrder() == DateTimeConstants.DATEORDER_YMD) {
+        tmaskValue.set(DateTimeConstants.DTK_M(TokenField.YEAR));
+        tm.years = val;
+      } else if (TajoConf.getDateOrder() == DateTimeConstants.DATEORDER_DMY) {
+        tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));
+        tm.dayOfMonth = val;
+      } else {
+        tmaskValue.set(DateTimeConstants.DTK_M(TokenField.MONTH));
+        tm.monthOfYear = val;
+      }
+    } else if (checkValue == (DateTimeConstants.DTK_M(TokenField.YEAR))) {
+			/* Must be at second field of YY-MM-DD */
+      tmaskValue.set(DateTimeConstants.DTK_M(TokenField.MONTH));
+      tm.monthOfYear = val;
+    } else if (checkValue == (DateTimeConstants.DTK_M(TokenField.MONTH))) {
+      if (haveTextMonth) {
+				/*
+				 * We are at the first numeric field of a date that included a
+				 * textual month name.	We want to support the variants
+				 * MON-DD-YYYY, DD-MON-YYYY, and YYYY-MON-DD as unambiguous
+				 * inputs.	We will also accept MON-DD-YY or DD-MON-YY in
+				 * either DMY or MDY modes, as well as YY-MON-DD in YMD mode.
+				 */
+        if (flen >= 3 || TajoConf.getDateOrder() == DateTimeConstants.DATEORDER_YMD) {
+          tmaskValue.set(DateTimeConstants.DTK_M(TokenField.YEAR));
+          tm.years = val;
+        } else {
+          tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));
+          tm.dayOfMonth = val;
+        }
+      } else {
+				/* Must be at second field of MM-DD-YY */
+        tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));
+        tm.dayOfMonth = val;
+      }
+    } else if (checkValue == (DateTimeConstants.DTK_M(TokenField.YEAR) | DateTimeConstants.DTK_M(TokenField.MONTH))) {
+      if (haveTextMonth) {
+				/* Need to accept DD-MON-YYYY even in YMD mode */
+        if (flen >= 3 && is2digits.get()) {
+					/* Guess that first numeric field is day was wrong */
+          tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));		/* YEAR is already set */
+          tm.dayOfMonth = tm.years;
+          tm.years = val;
+          is2digits.set(false);
+        } else {
+          tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));
+          tm.dayOfMonth = val;
+        }
+      } else {
+				/* Must be at third field of YY-MM-DD */
+        tmaskValue.set(DateTimeConstants.DTK_M(TokenField.DAY));
+        tm.dayOfMonth = val;
+      }
+    } else if (checkValue == DateTimeConstants.DTK_M(TokenField.DAY)) {
+			/* Must be at second field of DD-MM-YY */
+      tmaskValue.set(DateTimeConstants.DTK_M(TokenField.MONTH));
+      tm.monthOfYear = val;
+    } else if (checkValue == (DateTimeConstants.DTK_M(TokenField.MONTH) | DateTimeConstants.DTK_M(TokenField.DAY))) {
+			/* Must be at third field of DD-MM-YY or MM-DD-YY */
+      tmaskValue.set(DateTimeConstants.DTK_M(TokenField.YEAR));
+      tm.years = val;
+    } else if (checkValue == (DateTimeConstants.DTK_M(TokenField.YEAR) | DateTimeConstants.DTK_M(TokenField.MONTH) | DateTimeConstants.DTK_M(TokenField.DAY))) {
+			/* we have all the date, so it must be a time field */
+      decodeNumberField(flen, str, fmask,
+          tmaskValue, tm,
+          fsec, is2digits);
+      return;
+
+    } else {
+      throw new IllegalArgumentException("BAD Format: " + str);
+    }
+
+	/*
+	 * When processing a year field, mark it for adjustment if it's only one
+	 * or two digits.
+	 */
+    if (tmaskValue.get() == DateTimeConstants.DTK_M(TokenField.YEAR)) {
+      is2digits.set(flen <= 2);
+    }
+  }
+
+  /**
+   * Interpret numeric string as a concatenated date or time field.
+   *
+   * Use the context of previously decoded fields to help with
+   * the interpretation.
+   * @param len
+   * @param str
+   * @param fmask
+   * @param tmaskValue
+   * @param tm
+   * @param fsec
+   * @param is2digits
+   * @return
+   */
+  static TokenField decodeNumberField(int len, String str, int fmask,
+                    AtomicInteger tmaskValue, TimeMeta tm, AtomicLong fsec, AtomicBoolean is2digits) {
+    /*
+     * Have a decimal point? Then this is a date or something with a seconds
+     * field...
+     */
+    int index = str.indexOf('.');
+
+    if (index >= 0) {
+      String cp = str.substring(index + 1);
+		/*
+		 * Can we use ParseFractionalSecond here?  Not clear whether trailing
+		 * junk should be rejected ...
+		 */
+      double frac = strtod(cp, 0, null);
+      fsec.set(Math.round(frac * 1000000));
+		  /* Now truncate off the fraction for further processing */
+      len = str.length();
+    }
+	  /* No decimal point and no complete date yet? */
+    else if ((fmask & DateTimeConstants.DTK_DATE_M) != DateTimeConstants.DTK_DATE_M) {
+		  /* yyyymmdd? */
+      if (len == 8) {
+        tmaskValue.set(DateTimeConstants.DTK_DATE_M);
+
+        tm.dayOfMonth = Integer.parseInt(str.substring(6));
+        tm.monthOfYear = Integer.parseInt(str.substring(4, 6));
+        tm.years = Integer.parseInt(str.substring(0, 4));
+
+        return TokenField.DTK_DATE;
+      }
+		  /* yymmdd? */
+      else if (len == 6) {
+        tmaskValue.set(DateTimeConstants.DTK_DATE_M);
+        tm.dayOfMonth = Integer.parseInt(str.substring(4));
+        tm.monthOfYear = Integer.parseInt(str.substring(2, 4));
+        tm.years = Integer.parseInt(str.substring(0, 2));
+        is2digits.set(true);
+
+        return TokenField.DTK_DATE;
+      }
+    }
+
+	  /* not all time fields are specified? */
+    if ((fmask & DateTimeConstants.DTK_TIME_M) != DateTimeConstants.DTK_TIME_M) {
+		  /* hhmmss */
+      if (len == 6) {
+        tmaskValue.set(DateTimeConstants.DTK_TIME_M);
+        tm.secs = Integer.parseInt(str.substring(4));
+        tm.minutes = Integer.parseInt(str.substring(2, 4));
+        tm.hours = Integer.parseInt(str.substring(0, 2));
+
+        return TokenField.DTK_TIME;
+      }
+		  /* hhmm? */
+      else if (len == 4) {
+        tmaskValue.set(DateTimeConstants.DTK_TIME_M);
+        tm.secs = 0;
+        tm.minutes = Integer.parseInt(str.substring(2, 4));
+        tm.hours = Integer.parseInt(str.substring(0, 2));
+
+        return TokenField.DTK_TIME;
+      }
+    }
+
+    throw new IllegalArgumentException("BAD Format: " + str);
+  }
+
+  private static TimeMeta decodeDateTime(String[] fields, TokenField[] fieldTypes, int nf) {
+    int	fmask = 0;
+    AtomicInteger tmask = new AtomicInteger(0);
+    int type;
+
+    /* "prefix type" for ISO y2001m02d04 format */
+    TokenField ptype = null;
+
+    boolean		haveTextMonth = false;
+    boolean		isjulian = false;
+    AtomicBoolean is2digits = new AtomicBoolean(false);
+    boolean		bc = false;
+
+    int tzp = Integer.MAX_VALUE;
+    String namedTimeZone = null;
+
+    StringBuilder sb = new StringBuilder();
+    // We'll insist on at least all of the date fields, but initialize the
+    // remaining fields in case they are not set later...
+    TokenField dtype = TokenField.DTK_DATE;
+    TokenField mer = null;
+
+    TimeMeta tm = new TimeMeta();
+    TimeMeta cur_tm = new TimeMeta();
+
+    AtomicLong fsec = new AtomicLong();
+    AtomicInteger tz = new AtomicInteger(Integer.MAX_VALUE);
+
+    // don't know daylight savings time status apriori */
+    tm.isDST = false;
+
+    for (int i = 0; i < nf; i++) {
+      if (fieldTypes[i] == null) {
+        continue;
+      }
+      switch (fieldTypes[i]) {
+        case DTK_DATE:
+          /***
+           * Integral julian day with attached time zone?
+           * All other forms with JD will be separated into
+           * distinct fields, so we handle just this case here.
+           ***/
+          if (ptype == TokenField.DTK_JULIAN) {
+            int			val;
+
+            if (tzp == Integer.MAX_VALUE) {
+              throw new IllegalArgumentException("BAD Format: " + fields[i]);
+            }
+
+            val = strtoi(fields[i], 0, sb);
+
+            date2j(val, tm);
+            isjulian = true;
+
+					  /* Get the time zone from the end of the string */
+            decodeTimezone(sb.toString(), tz);
+
+            tmask.set(DateTimeConstants.DTK_DATE_M | DateTimeConstants.DTK_TIME_M | DateTimeConstants.DTK_M(TokenField.TZ));
+            ptype = null;
+            break;
+          }
+          /***
+           * Already have a date? Then this might be a time zone name
+           * with embedded punctuation (e.g. "America/New_York") or a
+           * run-together time with trailing time zone (e.g. hhmmss-zz).
+           * - thomas 2001-12-25
+           *
+           * We consider it a time zone if we already have month & day.
+           * This is to allow the form "mmm dd hhmmss tz year", which
+           * we've historically accepted.
+           ***/
+          else if (ptype != null ||
+              ((fmask & (DateTimeConstants.DTK_M(TokenField.MONTH) | DateTimeConstants.DTK_M(TokenField.DAY))) ==
+                  (DateTimeConstants.DTK_M(TokenField.MONTH) | DateTimeConstants.DTK_M(TokenField.DAY))))
+          {
+					/* No time zone accepted? Then quit... */
+            if (tzp == Integer.MAX_VALUE) {
+              throw new IllegalArgumentException("BAD Format: " + fields[i]);
+            }
+
+            if (Character.isDigit(fields[i].charAt(0)) || ptype != null) {
+              if (ptype != null) {
+							  /* Sanity check; should not fail this test */
+                if (ptype != TokenField.DTK_TIME) {
+                  throw new IllegalArgumentException("BAD Format: " + fields[i]);
+                }
+                ptype = null;
+              }
+
+						/*
+						 * Starts with a digit but we already have a time
+						 * field? Then we are in trouble with a date and time
+						 * already...
+						 */
+              if ((fmask & DateTimeConstants.DTK_TIME_M) == DateTimeConstants.DTK_TIME_M) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+
+              int index = fields[i].indexOf("-");
+              if (index < 0) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+
+						  /* Get the time zone from the end of the string */
+              decodeTimezone(fields[i].substring(index + 1), tz);
+
+              /*
+               * Then read the rest of the field as a concatenated
+               * time
+               */
+              decodeNumberField(fields[i].length(), fields[i],
+                  fmask,
+                  tmask, tm,
+                  fsec, is2digits);
+
+              /*
+               * modify tmask after returning from
+               * DecodeNumberField()
+               */
+              tmask.set(tmask.get() | DateTimeConstants.DTK_M(TokenField.TZ));
+            }
+            else {
+              namedTimeZone = pg_tzset(fields[i]);
+              if (namedTimeZone == null) {
+							/*
+							 * We should return an error code instead of
+							 * ereport'ing directly, but then there is no way
+							 * to report the bad time zone name.
+							 */
+                throw new IllegalArgumentException("BAD Format: time zone \"%s\" not recognized: " + fields[i]);
+              }
+						  /* we'll apply the zone setting below */
+              tmask.set(DateTimeConstants.DTK_M(TokenField.TZ));
+            }
+          } else {
+            decodeDate(fields[i], fmask, tmask, is2digits, tm);
+          }
+          break;
+
+        case DTK_TIME:
+          decodeTime(fields[i], (fmask | DateTimeConstants.DTK_DATE_M),
+              DateTimeConstants.INTERVAL_FULL_RANGE,
+              tmask, tm, fsec);
+          break;
+
+        case DTK_TZ: {
+          decodeTimezone(fields[i], tz);
+          tmask.set(DateTimeConstants.DTK_M(TokenField.TZ));
+          break;
+        }
+
+        case DTK_NUMBER:
+
+          /*
+           * Was this an "ISO date" with embedded field labels? An
+           * example is "y2001m02d04" - thomas 2001-02-04
+           */
+          if (ptype != null) {
+            int val = strtoi(fields[i], 0, sb);
+
+            /*
+             * only a few kinds are allowed to have an embedded
+             * decimal
+             */
+            if (sb.length() == 0) {
+              continue;
+            }
+            if (sb.charAt(0) == '.') {
+              switch (ptype) {
+                case DTK_JULIAN:
+                case DTK_TIME:
+                case DTK_SECOND:
+                  break;
+                default:
+                  throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+            } else {
+              throw new IllegalArgumentException("BAD Format: " + fields[i]);
+            }
+            switch (ptype) {
+              case DTK_YEAR:
+                tm.years = val;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.YEAR));
+                break;
+
+              case DTK_MONTH:
+
+                /*
+                 * already have a month and hour? then assume
+                 * minutes
+                 */
+                if ((fmask & DateTimeConstants.DTK_M(TokenField.MONTH)) != 0 &&
+                    (fmask & DateTimeConstants.DTK_M(TokenField.HOUR)) != 0) {
+                  tm.minutes = val;
+                  tmask.set(DateTimeConstants.DTK_M(TokenField.MINUTE));
+                }
+                else {
+                  tm.monthOfYear = val;
+                  tmask.set(DateTimeConstants.DTK_M(TokenField.MONTH));
+                }
+                break;
+
+              case DTK_DAY:
+                tm.dayOfMonth = val;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.DAY));
+                break;
+
+              case DTK_HOUR:
+                tm.hours = val;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.HOUR));
+                break;
+
+              case DTK_MINUTE:
+                tm.minutes = val;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.MINUTE));
+                break;
+
+              case DTK_SECOND:
+                tm.secs = val;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.SECOND));
+                if (sb.charAt(0) == '.') {
+                  ParseFractionalSecond(sb, fsec);
+                  tmask.set(DateTimeConstants.DTK_ALL_SECS_M);
+                }
+                break;
+              case DTK_TZ:
+                tmask.set(DateTimeConstants.DTK_M(TokenField.TZ));
+                decodeTimezone(fields[i], tz);
+                break;
+
+              case DTK_JULIAN:
+							  /* previous field was a label for "julian date" */
+                if (val < 0) {
+                  throw new IllegalArgumentException("BAD Format: FIELD_OVERFLOW: " + fields[i]);
+                }
+                tmask.set(DateTimeConstants.DTK_DATE_M);
+                date2j(val, tm);
+                isjulian = true;
+
+							  /* fractional Julian Day? */
+                if (sb.charAt(0) == '.') {
+                  double time = strtod(sb.toString(), 0, sb);
+
+                  time *= DateTimeConstants.USECS_PER_DAY;
+                  date2j((long)time, tm);
+                  tmask.set(tmask.get() | DateTimeConstants.DTK_TIME_M);
+                }
+                break;
+
+              case DTK_TIME:
+							/* previous field was "t" for ISO time */
+                decodeNumberField(fields[i].length(), fields[i],
+                    (fmask | DateTimeConstants.DTK_DATE_M),
+                    tmask, tm,
+                    fsec, is2digits);
+                if (tmask.get() != DateTimeConstants.DTK_TIME_M) {
+                  throw new IllegalArgumentException("BAD Format: FIELD_OVERFLOW: " + fields[i]);
+                }
+                break;
+
+              default:
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+            }
+
+            ptype = null;
+            dtype = TokenField.DTK_DATE;
+          } else {
+            int flen = fields[i].length();
+            int index = fields[i].indexOf(".");
+            String cp = null;
+            if (index > 0) {
+              cp = fields[i].substring(index + 1);
+            }
+
+					  /* Embedded decimal and no date yet? */
+            if (cp != null && ((fmask & DateTimeConstants.DTK_DATE_M) == 0 )) {
+              decodeDate(fields[i], fmask,
+                  tmask, is2digits, tm);
+            }
+					  /* embedded decimal and several digits before? */
+            else if (cp != null && flen - cp.length() > 2) {
+						/*
+						 * Interpret as a concatenated date or time Set the
+						 * type field to allow decoding other fields later.
+						 * Example: 20011223 or 040506
+						 */
+              decodeNumberField(flen, fields[i], fmask,
+                  tmask, tm,
+                  fsec, is2digits);
+            }
+            else if (flen > 4) {
+              decodeNumberField(flen, fields[i], fmask,
+                  tmask, tm,
+                  fsec, is2digits);
+            }
+					  /* otherwise it is a single date/time field... */
+            else {
+              decodeNumber(flen, fields[i],
+                  haveTextMonth, fmask,
+                  tmask, tm,
+                  fsec, is2digits);
+            }
+          }
+          break;
+        case DTK_STRING:
+        case DTK_SPECIAL:
+          DateToken dateToken =  DateTimeConstants.dateTokenMap.get(fields[i].toLowerCase());
+          if (dateToken == null) {
+            throw new IllegalArgumentException("BAD Format: " + fields[i]);
+          }
+          tmask.set(DateTimeConstants.DTK_M(dateToken.getType()));
+          switch (dateToken.getType()) {
+            case RESERV:
+              switch(dateToken.getValueType()) {
+                case DTK_CURRENT:
+                  throw new IllegalArgumentException("BAD Format: date/time value \"current\" is no longer supported" + fields[i]);
+
+                case DTK_NOW:
+                  tmask.set(DateTimeConstants.DTK_DATE_M | DateTimeConstants.DTK_TIME_M | DateTimeConstants.DTK_M(TokenField.TZ));
+                  dtype = TokenField.DTK_DATE;
+                  date2j(javaTimeToJulianTime(System.currentTimeMillis()), tm);
+                  break;
+
+                case DTK_YESTERDAY:
+                  tmask.set(DateTimeConstants.DTK_DATE_M);
+                  dtype = TokenField.DTK_DATE;
+                  date2j(javaTimeToJulianTime(System.currentTimeMillis()), tm);
+                  tm.plusDays(-1);
+                  break;
+
+                case DTK_TODAY:
+                  tmask.set(DateTimeConstants.DTK_DATE_M);
+                  dtype = TokenField.DTK_DATE;
+                  date2j(javaTimeToJulianTime(System.currentTimeMillis()), cur_tm);
+                  tm.years = cur_tm.years;
+                  tm.monthOfYear = cur_tm.monthOfYear;
+                  tm.dayOfMonth = cur_tm.dayOfMonth;
+                  break;
+
+                case DTK_TOMORROW:
+                  tmask.set(DateTimeConstants.DTK_DATE_M);
+                  dtype = TokenField.DTK_DATE;
+                  date2j(javaTimeToJulianTime(System.currentTimeMillis()), tm);
+                  tm.plusDays(1);
+                  break;
+
+                case DTK_ZULU:
+                  tmask.set(DateTimeConstants.DTK_TIME_M | DateTimeConstants.DTK_M(TokenField.TZ));
+                  dtype = TokenField.DTK_DATE;
+                  tm.hours = 0;
+                  tm.minutes = 0;
+                  tm.secs = 0;
+                  break;
+
+                default:
+                  dtype = dateToken.getValueType();
+              }
+              break;
+
+            case MONTH:
+              /*
+               * already have a (numeric) month? then see if we can
+               * substitute...
+               */
+              if ((fmask & DateTimeConstants.DTK_M(TokenField.MONTH)) != 0 && !haveTextMonth &&
+                  (fmask & DateTimeConstants.DTK_M(TokenField.DAY)) == 0 &&
+                  tm.monthOfYear >= 1 && tm.monthOfYear <= 31) {
+                tm.dayOfMonth = tm.monthOfYear;
+                tmask.set(DateTimeConstants.DTK_M(TokenField.DAY));
+              }
+              haveTextMonth = true;
+              tm.monthOfYear = dateToken.getValue();
+              break;
+
+            case DTZMOD:
+
+						/*
+						 * daylight savings time modifier (solves "MET DST"
+						 * syntax)
+						 */
+              tmask.set(tmask.get() | DateTimeConstants.DTK_M(TokenField.DTZ));
+              tm.isDST = true;
+              if (tzp == Integer.MAX_VALUE) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+              tzp += dateToken.getValue() * DateTimeConstants.MINS_PER_HOUR;
+              break;
+
+            case DTZ:
+
+						/*
+						 * set mask for TZ here _or_ check for DTZ later when
+						 * getting default timezone
+						 */
+              tmask.set(tmask.get() | DateTimeConstants.DTK_M(TokenField.TZ));
+              tm.isDST = true;
+              if (tzp == Integer.MAX_VALUE) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+              tzp = dateToken.getValue() * DateTimeConstants.MINS_PER_HOUR;
+              break;
+
+            case TZ:
+              tm.isDST = false;
+              if (tzp == Integer.MAX_VALUE) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+              tzp = dateToken.getValue() * DateTimeConstants.MINS_PER_HOUR;
+              break;
+
+            case IGNORE_DTF:
+              break;
+
+            case AMPM:
+              mer = dateToken.getValueType();
+              break;
+
+            case ADBC:
+              bc = (dateToken.getValueType() == TokenField.BC);
+              break;
+
+            case DOW:
+              tm.dayOfWeek = dateToken.getValue();
+              break;
+
+            case UNITS:
+              tmask.set(0);
+              ptype = dateToken.getValueType();
+              break;
+
+            case ISOTIME:
+
+						/*
+						 * This is a filler field "t" indicating that the next
+						 * field is time. Try to verify that this is sensible.
+						 */
+              tmask.set(0);
+
+						/* No preceding date? Then quit... */
+              if ((fmask & DateTimeConstants.DTK_DATE_M) != DateTimeConstants.DTK_DATE_M) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+
+              /***
+               * We will need one of the following fields:
+               *	DTK_NUMBER should be hhmmss.fff
+               *	DTK_TIME should be hh:mm:ss.fff
+               *	DTK_DATE should be hhmmss-zz
+               ***/
+              if (i >= nf - 1 ||
+                  (fieldTypes[i + 1] != TokenField.DTK_NUMBER &&
+                      fieldTypes[i + 1] != TokenField.DTK_TIME &&
+                      fieldTypes[i + 1] != TokenField.DTK_DATE)) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+
+              ptype = dateToken.getValueType();
+              break;
+
+            case UNKNOWN_FIELD:
+
+						/*
+						 * Before giving up and declaring error, check to see
+						 * if it is an all-alpha timezone name.
+						 */
+              namedTimeZone = pg_tzset(fields[i]);
+              if (namedTimeZone == null) {
+                throw new IllegalArgumentException("BAD Format: " + fields[i]);
+              }
+						/* we'll apply the zone setting below */
+              tmask.set(DateTimeConstants.DTK_M(TokenField.TZ));
+              break;
+
+            default:
+              throw new IllegalArgumentException("BAD Format: " + fields[i]);
+          }
+          break;
+      }
+      if ((tmask.get() & fmask) != 0) {
+        throw new IllegalArgumentException("BAD Format: " + fields[i]);
+      }
+      fmask |= tmask.get();
+    }   /* end loop over fields */
+
+    tm.fsecs = fsec.intValue();
+    tm.timeZone = tz.get();
+    /* do final checking/adjustment of Y/M/D fields */
+    validateDate(fmask, isjulian, is2digits.get(), bc, tm);
+
+	  /* handle AM/PM */
+    if (mer != null && mer != TokenField.HR24 && tm.hours > DateTimeConstants.HOURS_PER_DAY / 2) {
+      throw new IllegalArgumentException("BAD Format: overflow hour: " + tm.hours);
+    }
+    if (mer != null && mer == TokenField.AM && tm.hours == DateTimeConstants.HOURS_PER_DAY / 2) {
+      tm.hours = 0;
+    } else if (mer != null && mer == TokenField.PM && tm.hours != DateTimeConstants.HOURS_PER_DAY / 2) {
+      tm.hours += DateTimeConstants.HOURS_PER_DAY / 2;
+    }
+	  /* do additional checking for full date specs... */
+    if (dtype == TokenField.DTK_DATE) {
+      if ((fmask & DateTimeConstants.DTK_DATE_M) != DateTimeConstants.DTK_DATE_M) {
+        if ((fmask & DateTimeConstants.DTK_TIME_M) == DateTimeConstants.DTK_TIME_M) {
+          return tm;
+        }
+        throw new IllegalArgumentException("BAD Format: " + tm);
+      }
+
+		/*
+		 * If we had a full timezone spec, compute the offset (we could not do
+		 * it before, because we need the date to resolve DST status).
+		 */
+      if (namedTimeZone != null) {
+			/* daylight savings time modifier disallowed with full TZ */
+        if ( (fmask & DateTimeConstants.DTK_M(TokenField.DTZMOD)) != 0 ) {
+          throw new IllegalArgumentException("BAD Format: " + tm);
+        }
+      }
+    }
+
+    return tm;
+  }
+
+  private static String pg_tzset(String str) {
+    //TODO implements logic
+    return null;
+  }
+
+  /**
+   * Check valid year/month/day values, handle BC and DOY cases
+   * Return 0 if okay, a DTERR code if not.
+   * @param fmask
+   * @param isjulian
+   * @param is2digits
+   * @param bc
+   * @param tm
+   * @return
+   */
+  private static int validateDate(int fmask, boolean isjulian, boolean is2digits, boolean bc, TimeMeta tm) {
+    if ( (fmask & DateTimeConstants.DTK_M(TokenField.YEAR)) != 0 ) {
+      if (isjulian) {
+			/* tm_year is correct and should not be touched */
+      } else if (bc) {
+			  /* there is no year zero in AD/BC notation */
+        if (tm.years <= 0) {
+          throw new IllegalArgumentException("BAD Format: year overflow:" + tm.years);
+        }
+			  /* internally, we represent 1 BC as year zero, 2 BC as -1, etc */
+        tm.years = -(tm.years - 1);
+      }
+      else if (is2digits) {
+			  /* process 1 or 2-digit input as 1970-2069 AD, allow '0' and '00' */
+        if (tm.years < 0) { /* just paranoia */
+          throw new IllegalArgumentException("BAD Format: year overflow:" + tm.years);
+        }
+        if (tm.years < 70) {
+          tm.years += 2000;
+        } else if (tm.years < 100) {
+          tm.years += 1900;
+        }
+      }
+      else {
+			  /* there is no year zero in AD/BC notation */
+        if (tm.years <= 0) {
+          throw new IllegalArgumentException("BAD Format: year overflow:" + tm.years);
+        }
+      }
+    }
+
+	  /* now that we have correct year, decode DOY */
+    if ( (fmask & DateTimeConstants.DTK_M(TokenField.DOY)) != 0 ) {
+      j2date(date2j(tm.years, 1, 1) + tm.dayOfYear - 1, tm);
+    }
+
+	  /* check for valid month */
+    if ( (fmask & DateTimeConstants.DTK_M(TokenField.MONTH)) != 0 ) {
+      if (tm.monthOfYear < 1 || tm.monthOfYear > DateTimeConstants.MONTHS_PER_YEAR) {
+        throw new IllegalArgumentException("BAD Format: month overflow:" + tm.monthOfYear);
+      }
+    }
+
+	  /* minimal check for valid day */
+    if ( (fmask & DateTimeConstants.DTK_M(TokenField.DAY)) != 0 ) {
+      if (tm.dayOfMonth < 1 || tm.dayOfMonth > 31) {
+        throw new IllegalArgumentException("BAD Format: day overflow:" + tm.dayOfMonth);
+      }
+    }
+
+    if ((fmask & DateTimeConstants.DTK_DATE_M) == DateTimeConstants.DTK_DATE_M) {
+		/*
+		 * Check for valid day of month, now that we know for sure the month
+		 * and year.  Note we don't use MD_FIELD_OVERFLOW here, since it seems
+		 * unlikely that "Feb 29" is a YMD-order error.
+		 */
+      boolean leapYear = isLeapYear(tm.years);
+      if (tm.dayOfMonth > DateTimeConstants.DAY_OF_MONTH[leapYear ? 1: 0][tm.monthOfYear - 1])
+        throw new IllegalArgumentException("BAD Format: day overflow:" + tm.dayOfMonth);
+    }
+
+    return 0;
+  }
+
+  public static int strtoi(String str, int startIndex, StringBuilder sb) {
+    sb.setLength(0);
+    char[] chars = str.toCharArray();
+
+    int index = startIndex;
+    for (; index < chars.length; index++) {
+      if (!Character.isDigit(chars[index])) {
+        break;
+      }
+    }
+
+    int val = index == startIndex ? 0 : Integer.parseInt(str.substring(startIndex, index));
+    sb.append(chars, index, chars.length - index);
+
+    return val;
+  }
+
+  public static long strtol(String str, int startIndex, StringBuilder sb) {
+    sb.setLength(0);
+    char[] chars = str.toCharArray();
+
+    int index = startIndex;
+    for (; index < chars.length; index++) {
+      if (!Character.isDigit(chars[index])) {
+        break;
+      }
+    }
+
+    long val = index == startIndex ? 0 : Long.parseLong(str.substring(startIndex, index));
+    sb.append(chars, index, chars.length - index);
+
+    return val;
+  }
+
+  public static double strtod(String str, int strIndex, StringBuilder sb) {
+    if (sb != null) {
+      sb.setLength(0);
+    }
+    char[] chars = str.toCharArray();
+
+    int index = strIndex;
+    for (; index < chars.length; index++) {
+      if (!Character.isDigit(chars[index])) {
+        break;
+      }
+    }
+
+    double val = Double.parseDouble(str.substring(0, index));
+    if (sb != null) {
+      sb.append(chars, index, chars.length - index);
+    }
+    return val;
+  }
+
+  /**
+   * Check whether it is a punctuation character or not.
+   * @param c The character to be checked
+   * @return True if it is a punctuation character. Otherwise, false.
+   */
+  public static boolean isPunctuation(char c) {
+    return ((c >= '!' && c <= '/') ||
+        (c >= ':' && c <= '@') ||
+        (c >= '[' && c <= '`') ||
+        (c >= '{' && c <= '~'));
+  }
+
+  public static String toString(TimeMeta tm) {
+    return encodeDateTime(tm, DateStyle.ISO_DATES);
+  }
+
+  /**
+   * Encode date and time interpreted as local time.
+   *
+   * tm and fsec are the value to encode, print_tz determines whether to include
+   * a time zone (the difference between timestamp and timestamptz types), tz is
+   * the numeric time zone offset, tzn is the textual time zone, which if
+   * specified will be used instead of tz by some styles, style is the date
+   * style, str is where to write the output.
+   *
+   * Supported date styles:
+   *	Postgres - day mon hh:mm:ss yyyy tz
+   *	SQL - mm/dd/yyyy hh:mm:ss.ss tz
+   *	ISO - yyyy-mm-dd hh:mm:ss+/-tz
+   *	German - dd.mm.yyyy hh:mm:ss tz
+   *	XSD - yyyy-mm-ddThh:mm:ss.ss+/-tz
+   *
+   * This method is originated from EncodeDateTime of datetime.c of PostgreSQL.
+   * @param tm
+   * @param style
+   * @return
+   */
+  public static String encodeDateTime(TimeMeta tm, DateStyle style) {
+
+    StringBuilder sb = new StringBuilder();
+    switch (style) {
+
+      case ISO_DATES:
+      case XSO_DATES:
+        if (style == DateTimeConstants.DateStyle.ISO_DATES) {
+          sb.append(String.format("%04d-%02d-%02d %02d:%02d:",
+              (tm.years > 0) ? tm.years : -(tm.years - 1),
+              tm.monthOfYear, tm.dayOfMonth, tm.hours, tm.minutes));
+        } else {
+          sb.append(String.format("%04d-%02d-%02dT%02d:%02d:",
+              (tm.years > 0) ? tm.years : -(tm.years - 1),
+              tm.monthOfYear, tm.dayOfMonth, tm.hours, tm.minutes));
+        }
+
+        appendSecondsToEncodeOutput(sb, tm.secs, tm.fsecs, 6, true);
+        if (tm.timeZone != 0 && tm.timeZone != Integer.MAX_VALUE) {
+          sb.append(getTimeZoneDisplayTime(tm.timeZone));
+        }
+        if (tm.years <= 0) {
+          sb.append(" BC");
+        }
+        break;
+
+      case SQL_DATES:
+        // Compatible with Oracle/Ingres date formats
+
+    }
+
+    return sb.toString();
+  }
+
+  public static String encodeDate(TimeMeta tm, DateStyle style) {
+    StringBuilder sb = new StringBuilder();
+    switch (style) {
+      case ISO_DATES:
+      case XSO_DATES:
+      case SQL_DATES:
+        // Compatible with Oracle/Ingres date formats
+      default:
+        sb.append(String.format("%04d-%02d-%02d",
+            (tm.years > 0) ? tm.years : -(tm.years - 1),
+            tm.monthOfYear, tm.dayOfMonth));
+    }
+
+    return sb.toString();
+  }
+
+  public static String encodeTime(TimeMeta tm, DateStyle style) {
+    StringBuilder sb = new StringBuilder();
+    switch (style) {
+
+      case ISO_DATES:
+      case XSO_DATES:
+      case SQL_DATES:
+        // Compatible with Oracle/Ingres date formats
+      default :
+        sb.append(String.format("%02d:%02d:", tm.hours, tm.minutes));
+        appendSecondsToEncodeOutput(sb, tm.secs, tm.fsecs, 6, true);
+        if (tm.timeZone != 0 && tm.timeZone != Integer.MAX_VALUE) {
+          sb.append(getTimeZoneDisplayTime(tm.timeZone));
+        }
+        break;
+    }
+
+    return sb.toString();
+  }
+
+  /**
+   * Append sections and fractional seconds (if any) at *cp.
+   * precision is the max number of fraction digits, fillzeros says to
+   * pad to two integral-seconds digits.
+   * Note that any sign is stripped from the input seconds values.
+   *
+   * This method is originated form AppendSeconds in datetime.c of PostgreSQL.
+   */
+  public static void appendSecondsToEncodeOutput(
+      StringBuilder sb, int sec, int fsec, int precision, boolean fillzeros) {
+    if (fsec == 0) {
+      if (fillzeros)
+        sb.append(String.format("%02d", Math.abs(sec)));
+      else
+        sb.append(String.format("%d", Math.abs(sec)));
+    } else {
+      if (fillzeros) {
+        sb.append(String.format("%02d", Math.abs(sec)));
+      } else {
+        sb.append(String.format("%d", Math.abs(sec)));
+      }
+
+      if (precision > MAX_FRACTION_LENGTH) {
+        precision = MAX_FRACTION_LENGTH;
+      }
+
+      if (precision > 0) {
+        char[] fracChars = String.valueOf(fsec).toCharArray();
+        char[] resultChars = new char[MAX_FRACTION_LENGTH];
+
+        int numFillZero = MAX_FRACTION_LENGTH - fracChars.length;
+        for (int i = 0, fracIdx = 0; i < MAX_FRACTION_LENGTH; i++) {
+          if (i < numFillZero) {
+            resultChars[i] = '0';
+          } else {
+            resultChars[i] = fracChars[fracIdx];
+            fracIdx++;
+          }
+        }
+        sb.append(".").append(resultChars, 0, precision);
+      }
+      trimTrailingZeros(sb);
+    }
+  }
+
+  /**
+   * ... resulting from printing numbers with full precision.
+   *
+   * Before Postgres 8.4, this always left at least 2 fractional digits,
+   * but conversations on the lists suggest this isn't desired
+   * since showing '0.10' is misleading with values of precision(1).
+   *
+   * This method is originated form AppendSeconds in datetime.c of PostgreSQL.
+   * @param sb
+   */
+  public static void trimTrailingZeros(StringBuilder sb) {
+    int len = sb.length();
+    while (len > 1 && sb.charAt(len - 1) == '0' && sb.charAt(len - 2) != '.') {
+      len--;
+      sb.setLength(len);
+    }
+  }
+
+  /**
+   * Return the Julian day which corresponds to the first day (Monday) of the given ISO 8601 year and week.
+   * Julian days_full are used to convert between ISO week dates and Gregorian dates.
+   *
+   * This method is originated form AppendSeconds in timestamp.c of PostgreSQL.
+   * @param year
+   * @param week
+   * @return
+   */
+  public static int isoweek2j(int year, int week) {
+	  /* fourth day of current year */
+    int day4 = date2j(year, 1, 4);
+
+	  /* day0 == offset to first day of week (Monday) */
+    int day0 = j2day(day4 - 1);
+
+    return ((week - 1) * 7) + (day4 - day0);
+  }
+
+  /**
+   * Convert ISO week of year number to date.
+   * The year field must be specified with the ISO year!
+   * karel 2000/08/07
+   *
+   * This method is originated form AppendSeconds in timestamp.c of PostgreSQL.
+   * @param woy
+   * @param tm
+   */
+  public static void isoweek2date(int woy, TimeMeta tm) {
+    j2date(isoweek2j(tm.years, woy), tm);
+  }
+
+  /**
+   * Convert an ISO 8601 week date (ISO year, ISO week) into a Gregorian date.
+   * Gregorian day of week sent so weekday strings can be supplied.
+   * Populates year, mon, and mday with the correct Gregorian values.
+   * year must be passed in as the ISO year.
+   *
+   * This method is originated form AppendSeconds in timestamp.c of PostgreSQL.
+   * @param isoweek
+   * @param wday
+   * @param tm
+   */
+  public static void isoweekdate2date(int isoweek, int wday, TimeMeta tm) {
+    int jday;
+    jday = isoweek2j(tm.years, isoweek);
+	  /* convert Gregorian week start (Sunday=1) to ISO week start (Monday=1) */
+    if (wday > 1) {
+      jday += wday - 2;
+    } else {
+      jday += 6;
+    }
+    j2date(jday, tm);
+  }
+
+  /**
+   * Returns the ISO 8601 day-of-year, given a Gregorian year, month and day.
+   * Possible return values are 1 through 371 (364 in non-leap years).
+   * @param year
+   * @param mon
+   * @param mday
+   * @return
+   */
+  public static int date2isoyearday(int year, int mon, int mday) {
+    return date2j(year, mon, mday) - isoweek2j(date2isoyear(year, mon, mday), 1) + 1;
+  }
+
+  public static void toUserTimezone(TimeMeta tm) {
+    toUserTimezone(tm, TajoConf.getCurrentTimeZone());
+  }
+
+  public static void toUserTimezone(TimeMeta tm, TimeZone timeZone) {
+    tm.plusMillis(timeZone.getRawOffset());
+  }
+
+  public static void toUTCTimezone(TimeMeta tm) {
+    TimeZone timeZone = TajoConf.getCurrentTimeZone();
+    tm.plusMillis(0 - timeZone.getRawOffset());
+  }
+
+  public static String getTimeZoneDisplayTime(TimeZone timeZone) {
+    return getTimeZoneDisplayTime(timeZone.getRawOffset() / 1000);
+  }
+
+  public static String getTimeZoneDisplayTime(int totalSecs) {
+    if (totalSecs == 0) {
+      return "";
+    }
+    int minutes = Math.abs(totalSecs) / DateTimeConstants.SECS_PER_MINUTE;
+    int hours = minutes / DateTimeConstants.MINS_PER_HOUR;
+    minutes = minutes - hours * DateTimeConstants.MINS_PER_HOUR;
+
+    StringBuilder sb = new StringBuilder();
+    String prefix = "";
+
+    sb.append(totalSecs > 0 ? "+" : "-").append(String.format("%02d", hours));
+
+    if (minutes > 0) {
+      sb.append(":").append(String.format("%02d", minutes));
+      prefix = ":";
+    }
+
+    return sb.toString();
+  }
+
+  public static long getDay(DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTimeAtStartOfDay());
+  }
+
+  public static long getHour(DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTime(dateTime.get(org.joda.time.DateTimeFieldType.hourOfDay()), 0, 0, 0));
+  }
+
+  public static long getMinute(DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTime(dateTime.get(org.joda.time.DateTimeFieldType.hourOfDay()),
+        dateTime.get(org.joda.time.DateTimeFieldType.minuteOfHour()), 0, 0));
+  }
+
+  public static long getSecond(DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTime(dateTime.get(org.joda.time.DateTimeFieldType.hourOfDay()),
+        dateTime.get(org.joda.time.DateTimeFieldType.minuteOfHour()), dateTime.get(org.joda.time.DateTimeFieldType.secondOfMinute()), 0));
+  }
+
+  public static long getMonth(DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDate(dateTime.getYear(),
+        dateTime.getMonthOfYear(),1));
+  }
+
+  public static long getDayOfWeek(DateTime dateTime,int week) {
+    return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDayOfWeek(week));
+  }
+
+  public static long getYear (DateTime dateTime) {
+    return convertToMicroSeconds(dateTime.withTimeAtStartOfDay().withDate(dateTime.getYear(), 1, 1));
+  }
+
+  public static DateTime getUTCDateTime(Int8Datum int8Datum){
+    return new DateTime(int8Datum.asInt8()/1000, DateTimeZone.UTC);
+  }
+
+  public static long convertToMicroSeconds(DateTime dateTime) {
+    return  dateTime.getMillis() * 1000;
+  }
+}
diff --git a/tajo-common/src/main/java/org/apache/tajo/util/datetime/TimeMeta.java b/tajo-common/src/main/java/org/apache/tajo/util/datetime/TimeMeta.java
new file mode 100644
index 0000000..25fe64a
--- /dev/null
+++ b/tajo-common/src/main/java/org/apache/tajo/util/datetime/TimeMeta.java
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util.datetime;
+
+import org.apache.tajo.util.datetime.DateTimeConstants.DateStyle;
+
+public class TimeMeta {
+  public int      fsecs;    // 1/1,000,000 secs
+  public int			secs;
+  public int			minutes;
+  public int			hours;
+  public int			dayOfMonth;
+  public int      dayOfYear;   //used for only DateTimeFormat
+  public int			monthOfYear; // origin 0, not 1
+  public int			years;		   // relative to 1900
+  public boolean	isDST;       // daylight savings time
+  public int      dayOfWeek;
+  public int      timeZone = Integer.MAX_VALUE;   //sec, used for only Text -> Timestamp
+
+  @Override
+  public String toString() {
+    return DateTimeUtil.encodeDateTime(this, DateStyle.ISO_DATES);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (!(o instanceof TimeMeta)) {
+      return false;
+    }
+    TimeMeta other = (TimeMeta)o;
+
+    return fsecs == other.fsecs &&
+        secs == other.secs &&
+        minutes == other.minutes &&
+        hours == other.hours &&
+        dayOfMonth == other.dayOfMonth &&
+        dayOfYear == other.dayOfYear &&
+        monthOfYear == other.monthOfYear &&
+        years == other.years &&
+        isDST == other.isDST
+        ;
+  }
+
+  public void plusMonths(int months) {
+    if (months == 0) {
+      return;
+    }
+    int thisYear = years;
+    int thisMonth = monthOfYear;
+
+    int yearToUse;
+    // Initially, monthToUse is zero-based
+    int monthToUse = thisMonth - 1 + months;
+    if (monthToUse >= 0) {
+      yearToUse = thisYear + (monthToUse / DateTimeConstants.MONTHS_PER_YEAR);
+      monthToUse = (monthToUse % DateTimeConstants.MONTHS_PER_YEAR) + 1;
+    } else {
+      yearToUse = thisYear + (monthToUse / DateTimeConstants.MONTHS_PER_YEAR) - 1;
+      monthToUse = Math.abs(monthToUse);
+      int remMonthToUse = monthToUse % DateTimeConstants.MONTHS_PER_YEAR;
+      // Take care of the boundary condition
+      if (remMonthToUse == 0) {
+        remMonthToUse = DateTimeConstants.MONTHS_PER_YEAR;
+      }
+      monthToUse = DateTimeConstants.MONTHS_PER_YEAR - remMonthToUse + 1;
+      // Take care MONTHS_PER_YEAR the boundary condition
+      if (monthToUse == 1) {
+        yearToUse += 1;
+      }
+    }
+    // End of do not refactor.
+    // ----------------------------------------------------------
+
+    //
+    // Quietly force DOM to nearest sane value.
+    //
+    int dayToUse = dayOfMonth;
+    int maxDay = DateTimeUtil.getDaysInYearMonth(yearToUse, monthToUse);
+    if (dayToUse > maxDay) {
+      dayToUse = maxDay;
+    }
+
+    this.years = yearToUse;
+    this.monthOfYear = monthToUse;
+    this.dayOfMonth = dayToUse;
+  }
+
+  public void plusDays(int days) {
+    long timestamp = DateTimeUtil.toJulianTimestamp(this);
+    timestamp += days * DateTimeConstants.USECS_PER_DAY;
+
+    DateTimeUtil.toJulianTimeMeta(timestamp, this);
+  }
+
+  public void plusMillis(long millis) {
+    plusTime(millis * 1000);
+  }
+
+  public void plusTime(long time) {
+    long timestamp = DateTimeUtil.toJulianTimestamp(this);
+    timestamp += time;
+    DateTimeUtil.toJulianTimeMeta(timestamp, this);
+  }
+
+  public int getCenturyOfEra() {
+    return DateTimeUtil.getCenturyOfEra(years);
+  }
+
+  public int getDayOfYear() {
+    int dayOfYear = 0;
+    for (int i = 0; i < monthOfYear - 1; i++) {
+      dayOfYear += DateTimeUtil.getDaysInYearMonth(years, i + 1);
+    }
+
+    return dayOfYear + dayOfMonth;
+  }
+
+  public int getWeekOfYear() {
+    return DateTimeUtil.date2isoweek(years, monthOfYear, dayOfMonth);
+  }
+
+  public int getWeekyear() {
+    return DateTimeUtil.date2isoyear(years, monthOfYear, dayOfMonth);
+  }
+
+  public int getISODayOfWeek() {
+    int dow = getDayOfWeek();
+    if (dow == 0) {   //Sunday
+      return 7;
+    } else {
+      return dow;
+    }
+  }
+
+  public int getDayOfWeek() {
+    return (DateTimeUtil.date2j(years, monthOfYear, dayOfMonth) + 1) % 7;
+  }
+}
\ No newline at end of file
diff --git a/tajo-common/src/main/proto/tajo_protos.proto b/tajo-common/src/main/proto/tajo_protos.proto
index 0abc266..a7aa4f7 100644
--- a/tajo-common/src/main/proto/tajo_protos.proto
+++ b/tajo-common/src/main/proto/tajo_protos.proto
@@ -45,4 +45,10 @@
   TA_FAILED = 6;
   TA_KILL_WAIT = 7;
   TA_KILLED = 8;
+}
+
+enum FetcherState {
+  FETCH_INIT = 0;
+  FETCH_FETCHING = 1;
+  FETCH_FINISHED = 2;
 }
\ No newline at end of file
diff --git a/tajo-common/src/test/java/org/apache/tajo/datum/TestDateDatum.java b/tajo-common/src/test/java/org/apache/tajo/datum/TestDateDatum.java
index 2123dc5..f2ad261 100644
--- a/tajo-common/src/test/java/org/apache/tajo/datum/TestDateDatum.java
+++ b/tajo-common/src/test/java/org/apache/tajo/datum/TestDateDatum.java
@@ -68,13 +68,6 @@
     assertEquals(d, copy);
 	}
 
-  @Test
-  public final void testAsByteArray() {
-    DateDatum d = DatumFactory.createDate(DATE);
-    DateDatum copy = new DateDatum(d.asByteArray());
-    assertEquals(d.asInt8(), copy.asInt8());
-  }
-
 	@Test
   public final void testSize() {
     Datum d = DatumFactory.createDate(DATE);
diff --git a/tajo-common/src/test/java/org/apache/tajo/datum/TestIntervalDatum.java b/tajo-common/src/test/java/org/apache/tajo/datum/TestIntervalDatum.java
index 3bce64c..511b356 100644
--- a/tajo-common/src/test/java/org/apache/tajo/datum/TestIntervalDatum.java
+++ b/tajo-common/src/test/java/org/apache/tajo/datum/TestIntervalDatum.java
@@ -19,9 +19,9 @@
 package org.apache.tajo.datum;
 
 import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.exception.InvalidOperationException;
-import org.joda.time.DateTime;
-import org.joda.time.LocalTime;
+import org.apache.tajo.util.datetime.DateTimeUtil;
 import org.junit.Test;
 
 import static org.junit.Assert.assertEquals;
@@ -64,27 +64,35 @@
     // http://www.postgresql.org/docs/8.2/static/functions-datetime.html
 
     // date '2001-09-28' + integer '7'	==> date '2001-10-05'
-    Datum datum = new DateDatum(2001, 9, 28);
+    Datum datum = DatumFactory.createDate(2001, 9, 28);
     Datum[] datums = new Datum[]{new Int2Datum((short) 7), new Int4Datum(7), new Int8Datum(7),
           new Float4Datum(7.0f), new Float8Datum(7.0f)};
 
     for (int i = 0; i < datums.length; i++) {
       Datum result = datum.plus(datums[i]);
       assertEquals(TajoDataTypes.Type.DATE, result.type());
-      assertEquals(new DateDatum(2001, 10, 5), result);
+      assertEquals("date '2001-09-28' + " + datums[i].asChars() + "(" + i + " th test)", "2001-10-05", result.asChars());
     }
 
+    //TimestampDatum and TimeDatum should be TimeZone when convert to string
     // date '2001-09-28' + interval '1 hour'	==> timestamp '2001-09-28 01:00:00'
-    datum = new DateDatum(2001, 9, 28);
+    datum = DatumFactory.createDate(2001, 9, 28);
     Datum result = datum.plus(new IntervalDatum(60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 28, 1, 0, 0, 0)), result);
+    assertEquals("2001-09-28 01:00:00", ((TimestampDatum)result).asChars(TajoConf.getCurrentTimeZone(), false));
+
+    // interval '1 hour' +  date '2001-09-28'	==> timestamp '2001-09-28 01:00:00'
+    datum = new IntervalDatum(60 * 60 * 1000);
+    result = datum.plus(DatumFactory.createDate(2001, 9, 28));
+    assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
+    assertEquals("2001-09-28 01:00:00", ((TimestampDatum)result).asChars(TajoConf.getCurrentTimeZone(), false));
 
     // date '2001-09-28' + time '03:00' ==> timestamp '2001-09-28 03:00:00'
-    datum = new DateDatum(2001, 9, 28);
-    result = datum.plus(new TimeDatum(new LocalTime(3, 0)));
+    datum = DatumFactory.createDate(2001, 9, 28);
+    TimeDatum time = new TimeDatum(DateTimeUtil.toTime(3, 0, 0, 0));
+    result = datum.plus(time);
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 28, 3, 0, 0, 0)), result);
+    assertEquals("2001-09-28 03:00:00", result.asChars());
 
     // interval '1 day' + interval '1 hour'	interval '1 day 01:00:00'
     datum = new IntervalDatum(IntervalDatum.DAY_MILLIS);
@@ -93,63 +101,65 @@
     assertEquals("1 day 01:00:00", result.asChars());
 
     // timestamp '2001-09-28 01:00' + interval '23 hours'	==> timestamp '2001-09-29 00:00:00'
-    datum = new TimestampDatum(new DateTime(2001, 9, 28, 1, 0, 0, 0));
+    datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2001, 9, 28, 1, 0, 0, 0));
     result = datum.plus(new IntervalDatum(23 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 29, 0, 0, 0, 0)), result);
+    assertEquals("2001-09-29 00:00:00", result.asChars());
 
     // time '01:00' + interval '3 hours' ==> time '04:00:00'
-    datum = new TimeDatum(new LocalTime(1, 0, 0, 0));
+    datum = new TimeDatum(DateTimeUtil.toTime(1, 0, 0, 0));
     result = datum.plus(new IntervalDatum(3 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIME, result.type());
-    assertEquals(new TimeDatum(new LocalTime(4, 0, 0, 0)), result);
+    assertEquals(new TimeDatum(DateTimeUtil.toTime(4, 0, 0, 0)), result);
 
-//    // - interval '23 hours' ==> interval '-23:00:00'
-//    // TODO Currently Interval's inverseSign() not supported
+    // - interval '23 hours' ==> interval '-23:00:00'
+    // TODO Currently Interval's inverseSign() not supported
 
     // date '2001-10-01' - date '2001-09-28' ==>	integer '3'
-    datum = new DateDatum(2001, 10, 01);
-    result = datum.minus(new DateDatum(2001, 9, 28));
+    datum = DatumFactory.createDate(2001, 10, 1);
+    result = datum.minus(DatumFactory.createDate(2001, 9, 28));
     assertEquals(TajoDataTypes.Type.INT4, result.type());
     assertEquals(new Int4Datum(3), result);
 
     // date '2001-10-01' - integer '7' ==>	date '2001-09-24'
-    datum = new DateDatum(2001, 10, 01);
+    datum = DatumFactory.createDate(2001, 10, 1);
     for (int i = 0; i < datums.length; i++) {
       Datum result2 = datum.minus(datums[i]);
       assertEquals(TajoDataTypes.Type.DATE, result2.type());
-      assertEquals(new DateDatum(2001, 9, 24), result2);
+      assertEquals(DatumFactory.createDate(2001, 9, 24), result2);
     }
 
     // date '2001-09-28' - interval '1 hour' ==> timestamp '2001-09-27 23:00:00'
-    datum = new DateDatum(2001, 9, 28);
+    datum = DatumFactory.createDate(2001, 9, 28);
     result = datum.minus(new IntervalDatum(1 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 27, 23, 0, 0, 0)), result);
+    assertEquals("2001-09-27 23:00:00", ((TimestampDatum)result).asChars(TajoConf.getCurrentTimeZone(), false));
 
     // date '2001-09-28' - interval '1 day 1 hour' ==> timestamp '2001-09-26 23:00:00'
-    datum = new DateDatum(2001, 9, 28);
+    // In this case all datums are UTC
+    datum = DatumFactory.createDate(2001, 9, 28);
     result = datum.minus(new IntervalDatum(IntervalDatum.DAY_MILLIS + 1 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 26, 23, 0, 0, 0)), result);
+    assertEquals("2001-09-26 23:00:00",  ((TimestampDatum)result).asChars(TajoConf.getCurrentTimeZone(), false));
 
     // time '05:00' - time '03:00' ==>	interval '02:00:00'
-    datum = new TimeDatum(new LocalTime(5, 0, 0, 0));
-    result = datum.minus(new TimeDatum(new LocalTime(3, 0, 0, 0)));
+    datum = new TimeDatum(DateTimeUtil.toTime(5, 0, 0, 0));
+    result = datum.minus(new TimeDatum(DateTimeUtil.toTime(3, 0, 0, 0)));
     assertEquals(TajoDataTypes.Type.INTERVAL, result.type());
     assertEquals(new IntervalDatum(2 * 60 * 60 * 1000), result);
 
     // time '05:00' - interval '2 hours' ==>	time '03:00:00'
-    datum = new TimeDatum(new LocalTime(5, 0, 0, 0));
+    datum = new TimeDatum(DateTimeUtil.toTime(5, 0, 0, 0));
     result = datum.minus(new IntervalDatum(2 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIME, result.type());
-    assertEquals(new TimeDatum(3, 0, 0, 0), result);
+    assertEquals(new TimeDatum(DateTimeUtil.toTime(3, 0, 0, 0)), result);
 
     // timestamp '2001-09-28 23:00' - interval '23 hours' ==>	timestamp '2001-09-28 00:00:00'
-    datum = new TimestampDatum(new DateTime(2001, 9, 28, 23, 0, 0, 0));
+    // In this case all datums are UTC
+    datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2001, 9, 28, 23, 0, 0, 0));
     result = datum.minus(new IntervalDatum(23 * 60 * 60 * 1000));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 28, 0, 0, 0, 0)), result);
+    assertEquals("2001-09-28 00:00:00", result.asChars());
 
     // interval '1 day' - interval '1 hour'	==> interval '1 day -01:00:00'
     datum = new IntervalDatum(IntervalDatum.DAY_MILLIS);
@@ -158,8 +168,8 @@
     assertEquals(new IntervalDatum(23 * 60 * 60 * 1000), result);
 
     // timestamp '2001-09-29 03:00' - timestamp '2001-09-27 12:00' ==>	interval '1 day 15:00:00'
-    datum = new TimestampDatum(new DateTime(2001, 9, 29, 3, 0, 0, 0));
-    result = datum.minus(new TimestampDatum(new DateTime(2001, 9, 27, 12, 0, 0, 0)));
+    datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2001, 9, 29, 3, 0, 0, 0));
+    result = datum.minus(new TimestampDatum(DateTimeUtil.toJulianTimestamp(2001, 9, 27, 12, 0, 0, 0)));
     assertEquals(TajoDataTypes.Type.INTERVAL, result.type());
     assertEquals(new IntervalDatum(IntervalDatum.DAY_MILLIS + 15 * 60 * 60 * 1000), result);
 
@@ -191,9 +201,10 @@
     assertEquals(new IntervalDatum(40 * 60 * 1000), result);
 
     // timestamp '2001-08-31 01:00:00' + interval '1 mons' ==> timestamp 2001-09-30 01:00:00
-    datum = new TimestampDatum(new DateTime(2001, 8, 31, 1, 0, 0, 0));
+    // In this case all datums are UTC
+    datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2001, 8, 31, 1, 0, 0, 0));
     result = datum.plus(new IntervalDatum(1, 0));
     assertEquals(TajoDataTypes.Type.TIMESTAMP, result.type());
-    assertEquals(new TimestampDatum(new DateTime(2001, 9, 30, 1, 0, 0, 0)), result);
+    assertEquals("2001-09-30 01:00:00", result.asChars());
   }
 }
diff --git a/tajo-common/src/test/java/org/apache/tajo/datum/TestTimeDatum.java b/tajo-common/src/test/java/org/apache/tajo/datum/TestTimeDatum.java
index c405d68..ea641ec 100644
--- a/tajo-common/src/test/java/org/apache/tajo/datum/TestTimeDatum.java
+++ b/tajo-common/src/test/java/org/apache/tajo/datum/TestTimeDatum.java
@@ -70,13 +70,6 @@
   }
 
   @Test
-  public final void testAsByteArray() {
-    TimeDatum d = DatumFactory.createTime(TIME);
-    TimeDatum copy = new TimeDatum(d.asByteArray());
-    assertEquals(d.asInt8(), copy.asInt8());
-  }
-
-  @Test
   public final void testSize() {
     Datum d = DatumFactory.createTime(TIME);
     assertEquals(TimeDatum.SIZE, d.asByteArray().length);
diff --git a/tajo-common/src/test/java/org/apache/tajo/datum/TestTimestampDatum.java b/tajo-common/src/test/java/org/apache/tajo/datum/TestTimestampDatum.java
index 6551077..818c296 100644
--- a/tajo-common/src/test/java/org/apache/tajo/datum/TestTimestampDatum.java
+++ b/tajo-common/src/test/java/org/apache/tajo/datum/TestTimestampDatum.java
@@ -19,105 +19,162 @@
 package org.apache.tajo.datum;
 
 import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.exception.InvalidCastException;
 import org.apache.tajo.json.CommonGsonHelper;
+import org.apache.tajo.util.datetime.DateTimeUtil;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.util.Calendar;
+import java.util.TimeZone;
+
 import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 
 public class TestTimestampDatum {
-
-  private static int timestamp;
+  private static long javatime;
+  private static int unixtime;
+  private static Calendar calendar;
 
   @BeforeClass
   public static void setUp() {
-    timestamp = (int) (System.currentTimeMillis() / 1000);
+    javatime = System.currentTimeMillis();
+    calendar = Calendar.getInstance(TajoConf.getCurrentTimeZone());
+    calendar.setTimeInMillis(javatime);
+    unixtime = (int) (javatime / 1000);
   }
 
 	@Test
 	public final void testType() {
-		Datum d = DatumFactory.createTimeStamp(timestamp);
-        assertEquals(Type.TIMESTAMP, d.type());
+		Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
+    assertEquals(Type.TIMESTAMP, d.type());
 	}
 
 	@Test(expected = InvalidCastException.class)
 	public final void testAsInt4() {
-    Datum d = DatumFactory.createTimeStamp(timestamp);
+    Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
     d.asInt4();
 	}
 
   @Test
 	public final void testAsInt8() {
-    Datum d = DatumFactory.createTimeStamp(timestamp);
-    long javaTime = timestamp * 1000l;
-    assertEquals(javaTime, d.asInt8());
+    Datum d = DatumFactory.createTimestmpDatumWithJavaMillis(unixtime * 1000);
+    long javaTime = unixtime * 1000;
+    assertEquals(DateTimeUtil.javaTimeToJulianTime(javaTime), d.asInt8());
 	}
 
   @Test(expected = InvalidCastException.class)
 	public final void testAsFloat4() {
-    Datum d = DatumFactory.createTimeStamp(timestamp);
+    Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
     d.asFloat4();
 	}
 
   @Test(expected = InvalidCastException.class)
 	public final void testAsFloat8() {
     int instance = 1386577582;
-    Datum d = DatumFactory.createTimeStamp(instance);
+    Datum d = DatumFactory.createTimestmpDatumWithUnixTime(instance);
     d.asFloat8();
 	}
 
 	@Test
 	public final void testAsText() {
-    Datum d = DatumFactory.createTimeStamp("1980-04-01 01:50:01");
-    Datum copy = DatumFactory.createTimeStamp(d.asChars());
+    Datum d = DatumFactory.createTimestamp("1980-04-01 01:50:01");
+    Datum copy = DatumFactory.createTimestamp(d.asChars());
+    assertEquals(d, copy);
+
+    d = DatumFactory.createTimestamp("1980-04-01 01:50:01.10");
+    copy = DatumFactory.createTimestamp(d.asChars());
     assertEquals(d, copy);
 	}
 
-  @Test
-  public final void testAsByteArray() {
-    TimestampDatum d = DatumFactory.createTimeStamp(timestamp);
-    TimestampDatum copy = new TimestampDatum(d.asByteArray());
-    assertEquals(d, copy);
-  }
-
 	@Test
   public final void testSize() {
-    Datum d = DatumFactory.createTimeStamp(timestamp);
+    Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
     assertEquals(TimestampDatum.SIZE, d.asByteArray().length);
   }
 
   @Test
   public final void testAsTextBytes() {
-    Datum d = DatumFactory.createTimeStamp("1980-04-01 01:50:01");
+    Datum d = DatumFactory.createTimestamp("1980-04-01 01:50:01");
     assertArrayEquals(d.toString().getBytes(), d.asTextBytes());
 
-    d = DatumFactory.createTimeStamp("1980-04-01 01:50:01.578");
+    d = DatumFactory.createTimestamp("1980-04-01 01:50:01.578");
     assertArrayEquals(d.toString().getBytes(), d.asTextBytes());
   }
 
   @Test
   public final void testToJson() {
-    Datum d = DatumFactory.createTimeStamp(timestamp);
+    Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
     Datum copy = CommonGsonHelper.fromJson(d.toJson(), Datum.class);
     assertEquals(d, copy);
   }
 
   @Test
-  public final void testGetFields() {
-    TimestampDatum d = DatumFactory.createTimeStamp("1980-04-01 01:50:01");
-    assertEquals(1980, d.getYear());
-    assertEquals(4, d.getMonthOfYear());
-    assertEquals(1, d.getDayOfMonth());
-    assertEquals(1, d.getHourOfDay());
-    assertEquals(50, d.getMinuteOfHour());
-    assertEquals(01, d.getSecondOfMinute());
+  public final void testTimeZone() {
+    TimestampDatum datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2014, 5, 1, 15, 20, 30, 0));
+    assertEquals("2014-05-01 15:20:30", datum.asChars());
+    assertEquals("2014-05-02 00:20:30+09", datum.asChars(TimeZone.getTimeZone("GMT+9"), true));
+  }
+
+  @Test
+  public final void testTimestampConstructor() {
+    TimestampDatum datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(2014, 5, 1, 10, 20, 30, 0));
+    assertEquals(2014, datum.getYear());
+    assertEquals(5, datum.getMonthOfYear());
+    assertEquals(1, datum.getDayOfMonth());
+    assertEquals(10, datum.getHourOfDay());
+    assertEquals(20, datum.getMinuteOfHour());
+    assertEquals(30, datum.getSecondOfMinute());
+
+    TimestampDatum datum2 = DatumFactory.createTimestamp("2014-05-01 10:20:30");
+    assertEquals(datum2, datum);
+
+    datum = DatumFactory.createTimestamp("1980-04-01 01:50:01.123");
+    assertEquals(1980, datum.getYear());
+    assertEquals(4, datum.getMonthOfYear());
+    assertEquals(1, datum.getDayOfMonth());
+    assertEquals(1, datum.getHourOfDay());
+    assertEquals(50, datum.getMinuteOfHour());
+    assertEquals(1, datum.getSecondOfMinute());
+    assertEquals(123, datum.getMillisOfSecond());
+
+    datum = new TimestampDatum(DateTimeUtil.toJulianTimestamp(1014, 5, 1, 10, 20, 30, 0));
+    assertEquals(1014, datum.getYear());
+    assertEquals(5, datum.getMonthOfYear());
+    assertEquals(1, datum.getDayOfMonth());
+    assertEquals(10, datum.getHourOfDay());
+    assertEquals(20, datum.getMinuteOfHour());
+    assertEquals(30, datum.getSecondOfMinute());
+
+    datum2 = DatumFactory.createTimestamp("1014-05-01 10:20:30");
+    assertEquals(datum2, datum);
+
+    for (int i = 0; i < 100; i++) {
+      Calendar cal = Calendar.getInstance();
+      long jTime = System.currentTimeMillis();
+      int uTime = (int)(jTime / 1000);
+      cal.setTimeInMillis(jTime);
+
+      long julianTimestamp = DateTimeUtil.javaTimeToJulianTime(jTime);
+      assertEquals(uTime, DateTimeUtil.julianTimeToEpoch(julianTimestamp));
+      assertEquals(jTime, DateTimeUtil.julianTimeToJavaTime(julianTimestamp));
+
+      TimestampDatum datum3 = DatumFactory.createTimestmpDatumWithJavaMillis(jTime);
+      assertEquals(cal.get(Calendar.YEAR), datum3.getYear());
+      assertEquals(cal.get(Calendar.MONTH) + 1, datum3.getMonthOfYear());
+      assertEquals(cal.get(Calendar.DAY_OF_MONTH), datum3.getDayOfMonth());
+
+      datum3 = DatumFactory.createTimestmpDatumWithUnixTime(uTime);
+      assertEquals(cal.get(Calendar.YEAR), datum3.getYear());
+      assertEquals(cal.get(Calendar.MONTH) + 1, datum3.getMonthOfYear());
+      assertEquals(cal.get(Calendar.DAY_OF_MONTH), datum3.getDayOfMonth());
+    }
   }
 
   @Test
   public final void testNull() {
-   Datum d = DatumFactory.createTimeStamp(timestamp);
+   Datum d = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
    assertEquals(Boolean.FALSE,d.equals(DatumFactory.createNullDatum()));
    assertEquals(DatumFactory.createNullDatum(),d.equalsTo(DatumFactory.createNullDatum()));
    assertEquals(-1,d.compareTo(DatumFactory.createNullDatum()));
diff --git a/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeFormat.java b/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeFormat.java
new file mode 100644
index 0000000..0bfe708
--- /dev/null
+++ b/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeFormat.java
@@ -0,0 +1,158 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util;
+
+import org.apache.tajo.datum.TimestampDatum;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.TimeMeta;
+import org.joda.time.DateTime;
+import org.joda.time.format.DateTimeFormatter;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+public class TestDateTimeFormat {
+  @Test
+  public void testToTimestamp() {
+    evalToTimestampAndAssert("1997-12-30 11:40:50.345", "YYYY-MM-DD HH24:MI:SS.MS", 1997, 12, 30, 11, 40, 50, 345);
+    evalToTimestampAndAssert("1997-12-30 11:40:50.345 PM", "YYYY-MM-DD HH24:MI:SS.MS PM", 1997, 12, 30, 23, 40, 50, 345);
+    evalToTimestampAndAssert("0097/Feb/16 --> 08:14:30", "YYYY/Mon/DD --> HH:MI:SS", 97, 2, 16, 8, 14, 30, 0);
+    evalToTimestampAndAssert("97/2/16 8:14:30", "FMYYYY/FMMM/FMDD FMHH:FMMI:FMSS", 97, 2, 16, 8, 14, 30, 0);
+    evalToTimestampAndAssert("1985 September 12", "YYYY FMMonth DD", 1985, 9, 12, 0, 0, 0, 0);
+    evalToTimestampAndAssert("1,582nd VIII 21", "Y,YYYth FMRM DD", 1582, 8, 21, 0, 0, 0, 0);
+    evalToTimestampAndAssert("05121445482000", "MMDDHH24MISSYYYY", 2000, 5, 12, 14, 45, 48, 0);
+    evalToTimestampAndAssert("2000January09Sunday", "YYYYFMMonthDDFMDay", 2000, 1, 9, 0, 0, 0, 0);
+    evalToTimestampAndAssert("97/Feb/16", "YY/Mon/DD", 1997, 2, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("19971116", "YYYYMMDD", 1997, 11, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("20000-1116", "YYYY-MMDD", 20000, 11, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("9-1116", "Y-MMDD", 2009, 11, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("95-1116", "YY-MMDD", 1995, 11, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("995-1116", "YYY-MMDD", 1995, 11, 16, 0, 0, 0, 0);
+    evalToTimestampAndAssert("2005426", "YYYYWWD", 2005, 10, 15, 0, 0, 0, 0);
+    evalToTimestampAndAssert("2005300", "YYYYDDD", 2005, 10, 27, 0, 0, 0, 0);
+    evalToTimestampAndAssert("2005527", "IYYYIWID", 2006, 1, 1, 0, 0, 0, 0);
+    evalToTimestampAndAssert("005527", "IYYIWID", 2006, 1, 1, 0, 0, 0, 0);
+    evalToTimestampAndAssert("05527", "IYIWID", 2006, 1, 1, 0, 0, 0, 0);
+    evalToTimestampAndAssert("5527", "IIWID", 2006, 1, 1, 0, 0, 0, 0);
+    evalToTimestampAndAssert("2005364", "IYYYIDDD", 2006, 1, 1, 0, 0, 0, 0);
+    evalToTimestampAndAssert("20050302", "YYYYMMDD", 2005, 3, 2, 0, 0, 0, 0);
+    evalToTimestampAndAssert("2005 03 02", "YYYYMMDD", 2005, 3, 2, 0, 0, 0, 0);
+    evalToTimestampAndAssert(" 2005 03 02", "YYYYMMDD", 2005, 3, 2, 0, 0, 0, 0);
+    evalToTimestampAndAssert("  20050302", "YYYYMMDD", 2005, 3, 2, 0, 0, 0, 0);
+
+    //In the case of using Format Cache
+    evalToTimestampAndAssert("1998-02-28 10:20:30.123 PM", "YYYY-MM-DD HH24:MI:SS.MS PM", 1998, 2, 28, 22, 20, 30, 123);
+
+
+    try {
+      evalToTimestampAndAssert("97/Feb/16", "YYMonDD", 1997, 2, 16, 0, 0, 0, 0);
+      fail("Should be throw exception");
+    } catch (Exception e) {
+      //Invalid value /Fe for Mon. The given value did not match any of the allowed values for this field.
+    }
+
+    try {
+      evalToTimestampAndAssert("2005527", "YYYYIWID", 2005, 5, 27, 0, 0, 0, 0);
+      fail("Should be throw exception");
+    } catch (Exception e) {
+      //Do not mix Gregorian and ISO week date conventions in a formatting template.
+    }
+    try {
+      evalToTimestampAndAssert("19971", "YYYYMMDD", 1997, 1, 1, 0, 0, 0, 0);
+      fail("Should be throw exception");
+    } catch (Exception e) {
+      //If your source string is not fixed-width, try using the "FM" modifier.
+    }
+
+    TimeMeta tm = DateTimeFormat.parseDateTime("10:09:37.5", "HH24:MI:SS.MS");
+    assertEquals(10, tm.hours);
+  }
+
+  @Test
+  public void testToChar() {
+    evalToCharAndAssert("1970-01-17 10:09:37", "YYYY-MM-DD HH24:MI:SS", "1970-01-17 10:09:37");
+    evalToCharAndAssert("1997-12-30 11:40:50.345", "YYYY-MM-DD HH24:MI:SS.MS", "1997-12-30 11:40:50.345");
+    evalToCharAndAssert("1997-12-30 11:40:50.345 PM", "YYYY-MM-DD HH24:MI:SS.MS PM", "1997-12-30 23:40:50.345 PM");
+    evalToCharAndAssert("0097/Feb/16 --> 08:14:30", "YYYY/Mon/DD --> HH:MI:SS", "0097/Feb/16 --> 08:14:30");
+    evalToCharAndAssert("97/2/16 8:14:30", "FMYYYY/FMMM/FMDD FMHH:FMMI:FMSS", "97/2/16 8:14:30");
+    evalToCharAndAssert("1985 September 12", "YYYY FMMonth DD", "1985 September 12");
+    evalToCharAndAssert("1,582nd VIII 21", "Y,YYYth FMRM DD", "1,582nd VIII 21");
+    evalToCharAndAssert("05121445482000", "MMDDHH24MISSYYYY", "05121445482000");
+    evalToCharAndAssert("2000January09Sunday", "YYYYFMMonthDDFMDay", "2000January09Sunday");
+    evalToCharAndAssert("97/Feb/16", "YY/Mon/DD", "97/Feb/16");
+    evalToCharAndAssert("19971116", "YYYYMMDD", "19971116");
+    evalToCharAndAssert("20000-1116", "YYYY-MMDD", "20000-1116");
+    evalToCharAndAssert("9-1116", "Y-MMDD", "9-1116");
+    evalToCharAndAssert("95-1116", "YY-MMDD", "95-1116");
+    evalToCharAndAssert("995-1116", "YYY-MMDD", "995-1116");
+    evalToCharAndAssert("2005426", "YYYYWWD", "2005427");
+    evalToCharAndAssert("2005300", "YYYYDDD", "2005300");
+    evalToCharAndAssert("2005527", "IYYYIWID", "2005527");
+    evalToCharAndAssert("005527", "IYYIWID", "005527");
+    evalToCharAndAssert("05527", "IYIWID", "05527");
+    evalToCharAndAssert("5527", "IIWID", "5527");
+    evalToCharAndAssert("2005364", "IYYYIDDD", "2005364");
+    evalToCharAndAssert("20050302", "YYYYMMDD", "20050302");
+    evalToCharAndAssert("2005 03 02", "YYYYMMDD", "YYYY MM DD", "2005 03 02");
+    evalToCharAndAssert(" 2005 03 02", "YYYYMMDD", " YYYY MM DD", " 2005 03 02");
+    evalToCharAndAssert("  20050302", "YYYYMMDD", "  YYYYMMDD", "  20050302");
+  }
+
+  @Test
+  public void testPerformance() {
+    DateTimeFormatter jodaFormat = org.joda.time.format.DateTimeFormat.forPattern("YYYY-MM-DD HH:mm:ss.SSS");
+    long startTime = System.currentTimeMillis();
+    for (int i = 0; i < 10000000; i++) {
+      DateTime dateTime = jodaFormat.parseDateTime("1997-12-30 11:40:50.345");
+    }
+    long endTime = System.currentTimeMillis();
+    System.out.println("total parse time with JodaTime:" + (endTime - startTime) + " ms");
+
+    startTime = System.currentTimeMillis();
+    for (int i = 0; i < 10000000; i++) {
+      TimestampDatum datum = DateTimeFormat.toTimestamp("1997-12-30 11:40:50.345", "YYYY-MM-DD HH24:MI:SS.MS");
+    }
+    endTime = System.currentTimeMillis();
+    System.out.println("total parse time with TajoDateTimeFormat:" + (endTime - startTime) + " ms");
+  }
+
+  private void evalToTimestampAndAssert(String dateTimeText, String formatText,
+                                        int year, int month, int day, int hour, int minute, int sec, int msec) {
+    TimestampDatum datum = DateTimeFormat.toTimestamp(dateTimeText, formatText);
+    assertEquals(year, datum.getYear());
+    assertEquals(month, datum.getMonthOfYear());
+    assertEquals(day, datum.getDayOfMonth());
+    assertEquals(hour, datum.getHourOfDay());
+    assertEquals(minute, datum.getMinuteOfHour());
+    assertEquals(sec, datum.getSecondOfMinute());
+    assertEquals(msec, datum.getMillisOfSecond());
+  }
+
+  private void evalToCharAndAssert(String dateTimeText, String toTimestampFormatText, String expected) {
+    evalToCharAndAssert(dateTimeText, toTimestampFormatText, toTimestampFormatText, expected);
+  }
+
+  private void evalToCharAndAssert(String dateTimeText,
+                                   String toTimestampFormatText, String toCharFormatText, String expected) {
+    TimestampDatum datum = DateTimeFormat.toTimestamp(dateTimeText, toTimestampFormatText);
+    String toCharResult = DateTimeFormat.to_char(datum.toTimeMeta(), toCharFormatText);
+    assertEquals(expected, toCharResult);
+  }
+}
diff --git a/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeUtil.java b/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeUtil.java
new file mode 100644
index 0000000..e953d44
--- /dev/null
+++ b/tajo-common/src/test/java/org/apache/tajo/util/TestDateTimeUtil.java
@@ -0,0 +1,439 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util;
+
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.util.datetime.DateTimeConstants;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+import org.junit.Test;
+
+import java.util.Calendar;
+import java.util.TimeZone;
+
+import static org.junit.Assert.*;
+
+public class TestDateTimeUtil {
+  private static final int TEST_YEAR = 2014;
+  private static final int TEST_MONTH_OF_YEAR = 4;
+  private static final int TEST_DAY_OF_MONTH = 18;
+  private static final int TEST_HOUR_OF_DAY = 0;
+  private static final int TEST_MINUTE_OF_HOUR = 15;
+  private static final int TEST_SECOND_OF_MINUTE = 25;
+  private static final DateTime TEST_DATETIME = new DateTime(TEST_YEAR, TEST_MONTH_OF_YEAR, TEST_DAY_OF_MONTH,
+      TEST_HOUR_OF_DAY, TEST_MINUTE_OF_HOUR, TEST_SECOND_OF_MINUTE, DateTimeZone.UTC);
+  private static final DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
+
+  @Test
+  public void testDecodeDateTime() {
+    // http://www.postgresql.org/docs/9.1/static/datatype-datetime.html
+    TimeMeta tm = DateTimeUtil.decodeDateTime("2014-01-07 14:12:54+09");
+    assertEquals(2014, tm.years);
+    assertEquals(1, tm.monthOfYear);
+    assertEquals(7, tm.dayOfMonth);
+    assertEquals(14, tm.hours);
+    assertEquals(12, tm.minutes);
+    assertEquals(54, tm.secs);
+    assertEquals(0, tm.fsecs);
+
+    tm = DateTimeUtil.decodeDateTime("1999-01-08 04:05:06.789");
+    assertEquals(1999, tm.years);
+    assertEquals(1, tm.monthOfYear);
+    assertEquals(8, tm.dayOfMonth);
+    assertEquals(4, tm.hours);
+    assertEquals(5, tm.minutes);
+    assertEquals(6, tm.secs);
+    assertEquals(7 * 100000 + 8 * 10000 + 9 * 1000, tm.fsecs);
+
+    TimeMeta tm2 = DateTimeUtil.decodeDateTime("January 8, 1999 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    try {
+      tm2 = DateTimeUtil.decodeDateTime("January 8, 99 04:05:06.789");
+      assertEquals(tm, tm2);
+      fail("error in YMD mode");
+    } catch (Exception e) {
+      //throws Exception in YMD mode
+      //BAD Format: day overflow:99
+    }
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_MDY);
+    tm2 = DateTimeUtil.decodeDateTime("January 8, 99 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_YMD);
+    tm2 = DateTimeUtil.decodeDateTime("1999/1/8 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    tm2 = DateTimeUtil.decodeDateTime("1999/01/08 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    //January 2, 2003 in MDY mode; February 1, 2003 in DMY mode; February 3, 2001 in YMD mode
+    tm2 = DateTimeUtil.decodeDateTime("01/02/03 04:05:06.789");
+    assertEquals(2001, tm2.years);
+    assertEquals(2, tm2.monthOfYear);
+    assertEquals(3, tm2.dayOfMonth);
+    assertEquals(4, tm2.hours);
+    assertEquals(5, tm2.minutes);
+    assertEquals(6, tm2.secs);
+    assertEquals(7 * 100000 + 8 * 10000 + 9 * 1000, tm2.fsecs);
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_MDY);
+    tm2 = DateTimeUtil.decodeDateTime("01/02/03 04:05:06.789");
+    assertEquals(2003, tm2.years);
+    assertEquals(1, tm2.monthOfYear);
+    assertEquals(2, tm2.dayOfMonth);
+    assertEquals(4, tm2.hours);
+    assertEquals(5, tm2.minutes);
+    assertEquals(6, tm2.secs);
+    assertEquals(7 * 100000 + 8 * 10000 + 9 * 1000, tm2.fsecs);
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_DMY);
+    tm2 = DateTimeUtil.decodeDateTime("01/02/03 04:05:06.789");
+    assertEquals(2003, tm2.years);
+    assertEquals(2, tm2.monthOfYear);
+    assertEquals(1, tm2.dayOfMonth);
+    assertEquals(4, tm2.hours);
+    assertEquals(5, tm2.minutes);
+    assertEquals(6, tm2.secs);
+    assertEquals(7 * 100000 + 8 * 10000 + 9 * 1000, tm2.fsecs);
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_YMD);
+    tm2 = DateTimeUtil.decodeDateTime("1999-Jan-08 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    tm2 = DateTimeUtil.decodeDateTime("Jan-08-1999 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    tm2 = DateTimeUtil.decodeDateTime("08-Jan-1999 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    tm2 = DateTimeUtil.decodeDateTime("99-Jan-08 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    //January 8, except error in YMD mode
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_MDY);
+    tm2 = DateTimeUtil.decodeDateTime("08-Jan-99 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    //January 8, except error in YMD mode
+    tm2 = DateTimeUtil.decodeDateTime("Jan-08-99 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    TajoConf.setDateOrder(DateTimeConstants.DATEORDER_YMD);
+    tm2 = DateTimeUtil.decodeDateTime("19990108 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    tm2 = DateTimeUtil.decodeDateTime("990108 04:05:06.789");
+    assertEquals(tm, tm2);
+
+    //year and day of year
+    tm2 = DateTimeUtil.decodeDateTime("1999.008");
+    assertEquals(1999, tm2.years);
+    assertEquals(1, tm2.monthOfYear);
+    assertEquals(8, tm2.dayOfMonth);
+
+    //BC
+    tm = DateTimeUtil.decodeDateTime("19990108 BC 04:05:06.789");
+    assertEquals(-1998, tm.years);
+    assertEquals(1, tm.monthOfYear);
+    assertEquals(8, tm.dayOfMonth);
+    assertEquals(4, tm.hours);
+    assertEquals(5, tm.minutes);
+    assertEquals(6, tm.secs);
+    assertEquals(7 * 100000 + 8 * 10000 + 9 * 1000, tm.fsecs);
+
+    //PM
+    tm = DateTimeUtil.decodeDateTime("2013-04-25 10:20:30.4 PM");
+    assertEquals(2013, tm.years);
+    assertEquals(4, tm.monthOfYear);
+    assertEquals(25, tm.dayOfMonth);
+    assertEquals(22, tm.hours);
+    assertEquals(20, tm.minutes);
+    assertEquals(30, tm.secs);
+    assertEquals(4 * 100000, tm.fsecs);
+
+    // date only
+    tm = DateTimeUtil.decodeDateTime("1980-04-01");
+    assertEquals(1980, tm.years);
+    assertEquals(4, tm.monthOfYear);
+    assertEquals(1, tm.dayOfMonth);
+  }
+
+  @Test
+  public void testToJulianTimestamp() {
+    long julian = DateTimeUtil.toJulianTimestamp("2013-04-25");
+    assertEquals(julian, DateTimeUtil.toJulianTimestamp("2013-4-25"));
+    assertEquals(julian, DateTimeUtil.toJulianTimestamp("2013.4.25"));
+  }
+
+  @Test
+  public void testTimestampToJavaOrUnix() {
+    Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
+    long javaTime = System.currentTimeMillis();
+    cal.setTimeInMillis(javaTime);
+
+    long julianTimestamp = DateTimeUtil.javaTimeToJulianTime(cal.getTimeInMillis());
+
+    assertEquals(javaTime, DateTimeUtil.julianTimeToJavaTime(julianTimestamp));
+    assertEquals(javaTime/1000, DateTimeUtil.julianTimeToEpoch(julianTimestamp));
+  }
+
+  @Test
+  public void testLeapYear() {
+    assertTrue(DateTimeUtil.isLeapYear(2000));
+    assertTrue(DateTimeUtil.isLeapYear(2004));
+    assertTrue(DateTimeUtil.isLeapYear(1600));
+    assertFalse(DateTimeUtil.isLeapYear(1900));
+    assertFalse(DateTimeUtil.isLeapYear(2005));
+  }
+
+  @Test
+  public void testAddMonthsToTimeMeta() {
+    // Leap year
+    String dateTimeStr = "2000-01-29 23:11:50.123";
+    TimeMeta tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusMonths(1);
+    assertEquals("2000-02-29 23:11:50.123", tm.toString());
+
+    // Non leap year
+    dateTimeStr = "1999-01-29 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusMonths(1);
+    assertEquals("1999-02-28 23:11:50.123", tm.toString());
+
+    // changing year
+    dateTimeStr = "2013-09-30 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusMonths(5);
+    assertEquals("2014-02-28 23:11:50.123", tm.toString());
+
+    // minus value
+    dateTimeStr = "2013-03-30 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusMonths(-5);
+    assertEquals("2012-10-30 23:11:50.123", tm.toString());
+  }
+
+  @Test
+  public void testAddDaysToTimeMeta() {
+    // Leap year
+    String dateTimeStr = "2000-02-29 23:11:50.123";
+    TimeMeta tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusDays(1);
+    assertEquals("2000-03-01 23:11:50.123", tm.toString());
+
+    // Non leap year
+    dateTimeStr = "1999-01-29 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusDays(1);
+    assertEquals("1999-01-30 23:11:50.123", tm.toString());
+
+    // changing year
+    dateTimeStr = "2013-12-25 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusDays(7);
+    assertEquals("2014-01-01 23:11:50.123", tm.toString());
+
+    // minus value
+    dateTimeStr = "2000-03-05 23:11:50.123";
+    tm = DateTimeUtil.decodeDateTime(dateTimeStr);
+    tm.plusDays(-10);
+    assertEquals("2000-02-24 23:11:50.123", tm.toString());
+  }
+
+  @Test
+  public void testEncodeDateTime() throws Exception {
+    //DateTimeUtil.encodeDateTime()
+
+  }
+
+  @Test
+  public void testAppendSeconds() throws Exception {
+    String[] fractions = new String[]{".999999", ".99999", ".9999", ".999", ".99", ".9", ""};
+
+    for (int i = 0; i < fractions.length; i++) {
+      StringBuilder sb = new StringBuilder("13:52:");
+      DateTimeUtil.appendSecondsToEncodeOutput(sb, 23, 999999, 6 - i, false);
+      assertEquals("13:52:23" + fractions[i], sb.toString());
+    }
+
+    fractions = new String[]{".1", ".01", ".001", ".0001", ".00001", ".000001"};
+    for (int i = 0; i < fractions.length; i++) {
+      StringBuilder sb = new StringBuilder("13:52:");
+      DateTimeUtil.appendSecondsToEncodeOutput(sb, 23, (int)Math.pow(10, (5 - i)), 6, false);
+      assertEquals("13:52:23" + fractions[i], sb.toString());
+    }
+  }
+
+  @Test
+  public void testTrimTrailingZeros() throws Exception {
+    StringBuilder sb1 = new StringBuilder("1.1200");
+    DateTimeUtil.trimTrailingZeros(sb1);
+    assertEquals("1.12", sb1.toString());
+
+    StringBuilder sb2 = new StringBuilder("1.12000120");
+    DateTimeUtil.trimTrailingZeros(sb2);
+    assertEquals("1.1200012", sb2.toString());
+
+    StringBuilder sb3 = new StringBuilder(".12000120");
+    DateTimeUtil.trimTrailingZeros(sb3);
+    assertEquals(".1200012", sb3.toString());
+  }
+
+  @Test
+  public void testTimeMeta() {
+    TimeMeta tm = DateTimeUtil.decodeDateTime("2014-12-31");
+    assertEquals(365, tm.getDayOfYear());
+
+    tm = DateTimeUtil.decodeDateTime("2000-03-01");
+    assertEquals(61, tm.getDayOfYear());
+
+    tm = DateTimeUtil.decodeDateTime("2014-01-01");
+    assertEquals(3, tm.getDayOfWeek());
+    assertEquals(1, tm.getWeekOfYear());
+    assertEquals(21, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("2000-03-01");
+    assertEquals(3, tm.getDayOfWeek());
+    assertEquals(9, tm.getWeekOfYear());
+    assertEquals(20, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("1752-09-14");
+    assertEquals(4, tm.getDayOfWeek());
+    assertEquals(37, tm.getWeekOfYear());
+    assertEquals(18, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("1752-09-02");
+    assertEquals(6, tm.getDayOfWeek());
+    assertEquals(35, tm.getWeekOfYear());
+    assertEquals(18, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("1200-04-01");
+    assertEquals(6, tm.getDayOfWeek());
+    assertEquals(13, tm.getWeekOfYear());
+    assertEquals(12, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("400-04-20");
+    assertEquals(4, tm.getDayOfWeek());
+    assertEquals(16, tm.getWeekOfYear());
+    assertEquals(4, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("310-12-31");
+    assertEquals(6, tm.getDayOfWeek());
+    assertEquals(52, tm.getWeekOfYear());
+    assertEquals(4, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("0080-02-29");
+    assertEquals(4, tm.getDayOfWeek());
+    assertEquals(9, tm.getWeekOfYear());
+    assertEquals(1, tm.getCenturyOfEra());
+
+    tm = DateTimeUtil.decodeDateTime("400-03-01 BC");
+    assertEquals(4, tm.getDayOfWeek());
+    assertEquals(9, tm.getWeekOfYear());
+    assertEquals(-4, tm.getCenturyOfEra());
+  }
+
+  @Test
+  public void testStrtoi() {
+    StringBuilder sb = new StringBuilder();
+    int intVal = 12345;
+    String textVal = "test";
+
+    int value = DateTimeUtil.strtoi(intVal + textVal, 0, sb);
+    assertEquals(intVal, value);
+    assertEquals(textVal, sb.toString());
+
+    textVal = "";
+
+    value = DateTimeUtil.strtoi(intVal + textVal, 0, sb);
+    assertEquals(intVal, value);
+    assertEquals(textVal, sb.toString());
+  }
+
+  @Test
+  public void testGetCenturyOfEra() {
+    assertEquals(1, DateTimeUtil.getCenturyOfEra(1));
+    assertEquals(1, DateTimeUtil.getCenturyOfEra(100));
+    assertEquals(2, DateTimeUtil.getCenturyOfEra(101));
+    assertEquals(10, DateTimeUtil.getCenturyOfEra(1000));
+    assertEquals(20, DateTimeUtil.getCenturyOfEra(1998));
+    assertEquals(20, DateTimeUtil.getCenturyOfEra(1999));
+    assertEquals(20, DateTimeUtil.getCenturyOfEra(2000));
+    assertEquals(21, DateTimeUtil.getCenturyOfEra(2001));
+    assertEquals(21, DateTimeUtil.getCenturyOfEra(2100));
+    assertEquals(22, DateTimeUtil.getCenturyOfEra(2101));
+
+    assertEquals(-6, DateTimeUtil.getCenturyOfEra(-600));
+    assertEquals(-6, DateTimeUtil.getCenturyOfEra(-501));
+    assertEquals(-5, DateTimeUtil.getCenturyOfEra(-500));
+    assertEquals(-5, DateTimeUtil.getCenturyOfEra(-455));
+    assertEquals(-1, DateTimeUtil.getCenturyOfEra(-1));
+  }
+
+  @Test
+  public void testGetTimeZoneDisplayTime() {
+    assertEquals("", DateTimeUtil.getTimeZoneDisplayTime(TimeZone.getTimeZone("GMT")));
+    assertEquals("+09", DateTimeUtil.getTimeZoneDisplayTime(TimeZone.getTimeZone("GMT+9")));
+    assertEquals("+09:10", DateTimeUtil.getTimeZoneDisplayTime(TimeZone.getTimeZone("GMT+9:10")));
+    assertEquals("-09", DateTimeUtil.getTimeZoneDisplayTime(TimeZone.getTimeZone("GMT-9")));
+    assertEquals("-09:10", DateTimeUtil.getTimeZoneDisplayTime(TimeZone.getTimeZone("GMT-9:10")));
+  }
+
+  @Test
+  public void testGetYear() {
+    assertEquals(DateTime.parse("2014-01-01 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getYear(TEST_DATETIME));
+  }
+
+  @Test
+  public void testGetMonth() {
+    assertEquals(DateTime.parse("2014-04-01 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getMonth(TEST_DATETIME));
+  }
+
+  @Test
+  public void testGetDay() {
+    assertEquals(DateTime.parse("2014-04-18 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getDay(TEST_DATETIME));
+  }
+
+  @Test
+  public void testGetHour() {
+    assertEquals(DateTime.parse("2014-04-18 00:00:00",fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getHour(TEST_DATETIME));
+  }
+
+  @Test
+  public void testGetMinute() {
+    assertEquals(DateTime.parse("2014-04-18 00:15:00",fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getMinute(TEST_DATETIME));
+  }
+
+  @Test
+  public void testGetSecond() {
+    assertEquals(DateTime.parse("2014-04-18 00:15:25",fmt.withZoneUTC()).getMillis() * 1000,
+        DateTimeUtil.getSecond(TEST_DATETIME));
+  }
+}
diff --git a/tajo-common/src/test/java/org/apache/tajo/util/TestStringUtil.java b/tajo-common/src/test/java/org/apache/tajo/util/TestStringUtil.java
new file mode 100644
index 0000000..5c13f8f
--- /dev/null
+++ b/tajo-common/src/test/java/org/apache/tajo/util/TestStringUtil.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.util;
+
+import org.apache.commons.lang.CharUtils;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+
+public class TestStringUtil {
+
+  @Test
+  public void testUnicodeEscapedDelimiter() {
+    for (int i = 0; i < 128; i++) {
+      char c = (char) i;
+      String delimiter = CharUtils.unicodeEscaped(c);
+      String escapedDelimiter = StringUtils.unicodeEscapedDelimiter(delimiter);
+      assertEquals(delimiter, escapedDelimiter);
+      assertEquals(1, StringEscapeUtils.unescapeJava(escapedDelimiter).length());
+      assertEquals(c, StringEscapeUtils.unescapeJava(escapedDelimiter).charAt(0));
+    }
+  }
+
+  @Test
+  public void testUnescapedDelimiter() {
+    for (int i = 0; i < 128; i++) {
+      char c = (char) i;
+      String delimiter = String.valueOf(c);
+      String escapedDelimiter = StringUtils.unicodeEscapedDelimiter(delimiter);
+      assertEquals(CharUtils.unicodeEscaped(c), escapedDelimiter);
+      assertEquals(1, StringEscapeUtils.unescapeJava(escapedDelimiter).length());
+      assertEquals(c, StringEscapeUtils.unescapeJava(escapedDelimiter).charAt(0));
+    }
+  }
+
+  @Test
+  public void testVariousDelimiter() {
+    /*
+    * Character         ASCII    Unicode
+    *
+    * Horizontal tab    9        <U0009>
+    * Space Bar         32       <U0020>
+    * 1                 49       <U0031>
+    * |                 124      <U007c>
+    *
+    * */
+
+
+    String escapedDelimiter = "\\u0031";
+
+    assertEquals(escapedDelimiter, StringUtils.unicodeEscapedDelimiter("1"));
+    assertEquals(escapedDelimiter, StringUtils.unicodeEscapedDelimiter("\\1"));
+    assertEquals(escapedDelimiter, StringUtils.unicodeEscapedDelimiter("\\u0031"));
+    assertEquals(escapedDelimiter, StringUtils.unicodeEscapedDelimiter((char)49));
+    assertNotEquals(escapedDelimiter, StringUtils.unicodeEscapedDelimiter('\001'));
+
+    String delimiter = "|";
+    assertEquals("\\u007c", StringUtils.unicodeEscapedDelimiter(delimiter));
+    assertEquals(delimiter, StringEscapeUtils.unescapeJava(StringUtils.unicodeEscapedDelimiter(delimiter)));
+
+
+    String commaDelimiter = ",";
+    assertEquals("\\u002c", StringUtils.unicodeEscapedDelimiter(commaDelimiter));
+    assertEquals(commaDelimiter, StringEscapeUtils.unescapeJava(StringUtils.unicodeEscapedDelimiter(commaDelimiter)));
+
+    String tabDelimiter = "\t";
+    assertEquals("\\u0009", StringUtils.unicodeEscapedDelimiter(tabDelimiter));
+    assertEquals(tabDelimiter, StringEscapeUtils.unescapeJava(StringUtils.unicodeEscapedDelimiter(tabDelimiter)));
+
+    String spaceDelimiter = " ";
+    assertEquals("\\u0020", StringUtils.unicodeEscapedDelimiter(spaceDelimiter));
+    assertEquals(spaceDelimiter, StringEscapeUtils.unescapeJava(StringUtils.unicodeEscapedDelimiter(spaceDelimiter)));
+  }
+}
diff --git a/tajo-common/src/test/java/org/apache/tajo/util/TestTimeStampUtil.java b/tajo-common/src/test/java/org/apache/tajo/util/TestTimeStampUtil.java
deleted file mode 100644
index 353063d..0000000
--- a/tajo-common/src/test/java/org/apache/tajo/util/TestTimeStampUtil.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.util;
-
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-import org.joda.time.DateTime;
-import org.junit.Test;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestTimeStampUtil {
-  private static final int TEST_YEAR = 2014;
-  private static final int TEST_MONTH_OF_YEAR = 4;
-  private static final int TEST_DAY_OF_MONTH = 18;
-  private static final int TEST_HOUR_OF_DAY = 0;
-  private static final int TEST_MINUTE_OF_HOUR = 15;
-  private static final int TEST_SECOND_OF_MINUTE = 25;
-  private static final DateTime TEST_DATETIME = new DateTime(TEST_YEAR, TEST_MONTH_OF_YEAR, TEST_DAY_OF_MONTH,
-      TEST_HOUR_OF_DAY, TEST_MINUTE_OF_HOUR, TEST_SECOND_OF_MINUTE, DateTimeZone.UTC);
-  private static final DateTimeFormatter fmt = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss");
-
-  @Test
-  public void testGetYear() {
-    assertEquals(DateTime.parse("2014-01-01 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getYear(TEST_DATETIME));
-  }
-
-  @Test
-  public void testGetMonth() {
-    assertEquals(DateTime.parse("2014-04-01 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getMonth(TEST_DATETIME));
-  }
-
-  @Test
-  public void testGetDay() {
-    assertEquals(DateTime.parse("2014-04-18 00:00:00", fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getDay(TEST_DATETIME));
-  }
-
-  @Test
-  public void testGetHour() {
-    assertEquals(DateTime.parse("2014-04-18 00:00:00",fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getHour(TEST_DATETIME));
-  }
-
-  @Test
-  public void testGetMinute() {
-    assertEquals(DateTime.parse("2014-04-18 00:15:00",fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getMinute(TEST_DATETIME));
-  }
-
-  @Test
-  public void testGetSecond() {
-    assertEquals(DateTime.parse("2014-04-18 00:15:25",fmt.withZoneUTC()).getMillis() * 1000,
-        TimeStampUtil.getSecond(TEST_DATETIME));
-  }
-}
diff --git a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLLexer.g4 b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLLexer.g4
deleted file mode 100644
index f7b76ef..0000000
--- a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLLexer.g4
+++ /dev/null
@@ -1,390 +0,0 @@
-/**
-   Licensed to the Apache Software Foundation (ASF) under one or more 
-   contributor license agreements.  See the NOTICE file distributed with 
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with 
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-*/
-lexer grammar HiveQLLexer;
-
-
-// Keywords
-
-KW_TRUE : 'TRUE';
-KW_FALSE : 'FALSE';
-KW_ALL : 'ALL';
-KW_AND : 'AND';
-KW_OR : 'OR';
-KW_NOT : 'NOT' | '!';
-KW_LIKE : 'LIKE';
-
-KW_IF : 'IF';
-KW_EXISTS : 'EXISTS';
-
-KW_ASC : 'ASC';
-KW_DESC : 'DESC';
-KW_ORDER : 'ORDER';
-KW_GROUP : 'GROUP';
-KW_BY : 'BY';
-KW_HAVING : 'HAVING';
-KW_WHERE : 'WHERE';
-KW_FROM : 'FROM';
-KW_AS : 'AS';
-KW_SELECT : 'SELECT';
-KW_DISTINCT : 'DISTINCT';
-KW_INSERT : 'INSERT';
-KW_OVERWRITE : 'OVERWRITE';
-KW_OUTER : 'OUTER';
-KW_UNIQUEJOIN : 'UNIQUEJOIN';
-KW_PRESERVE : 'PRESERVE';
-KW_JOIN : 'JOIN';
-KW_LEFT : 'LEFT';
-KW_RIGHT : 'RIGHT';
-KW_FULL : 'FULL';
-KW_ON : 'ON';
-KW_PARTITION : 'PARTITION';
-KW_PARTITIONS : 'PARTITIONS';
-KW_TABLE: 'TABLE';
-KW_TABLES: 'TABLES';
-KW_COLUMNS: 'COLUMNS';
-KW_INDEX: 'INDEX';
-KW_INDEXES: 'INDEXES';
-KW_REBUILD: 'REBUILD';
-KW_FUNCTIONS: 'FUNCTIONS';
-KW_SHOW: 'SHOW';
-KW_MSCK: 'MSCK';
-KW_REPAIR: 'REPAIR';
-KW_DIRECTORY: 'DIRECTORY';
-KW_LOCAL: 'LOCAL';
-KW_TRANSFORM : 'TRANSFORM';
-KW_USING: 'USING';
-KW_CLUSTER: 'CLUSTER';
-KW_DISTRIBUTE: 'DISTRIBUTE';
-KW_SORT: 'SORT';
-KW_UNION: 'UNION';
-KW_LOAD: 'LOAD';
-KW_EXPORT: 'EXPORT';
-KW_IMPORT: 'IMPORT';
-KW_DATA: 'DATA';
-KW_INPATH: 'INPATH';
-KW_IS: 'IS';
-KW_NULL: 'NULL';
-KW_CREATE: 'CREATE';
-KW_EXTERNAL: 'EXTERNAL';
-KW_ALTER: 'ALTER';
-KW_CHANGE: 'CHANGE';
-KW_COLUMN: 'COLUMN';
-KW_FIRST: 'FIRST';
-KW_AFTER: 'AFTER';
-KW_DESCRIBE: 'DESCRIBE';
-KW_DROP: 'DROP';
-KW_RENAME: 'RENAME';
-KW_IGNORE: 'IGNORE';
-KW_PROTECTION: 'PROTECTION';
-KW_TO: 'TO';
-KW_COMMENT: 'COMMENT';
-KW_BOOLEAN: 'BOOLEAN';
-KW_TINYINT: 'TINYINT';
-KW_SMALLINT: 'SMALLINT';
-KW_INT: 'INT';
-KW_BIGINT: 'BIGINT';
-KW_FLOAT: 'FLOAT';
-KW_DOUBLE: 'DOUBLE';
-KW_DATE: 'DATE';
-KW_DATETIME: 'DATETIME';
-KW_TIMESTAMP: 'TIMESTAMP';
-KW_DECIMAL: 'DECIMAL';
-KW_STRING: 'STRING';
-KW_ARRAY: 'ARRAY';
-KW_STRUCT: 'STRUCT';
-KW_MAP: 'MAP';
-KW_UNIONTYPE: 'UNIONTYPE';
-KW_REDUCE: 'REDUCE';
-KW_PARTITIONED: 'PARTITIONED';
-KW_CLUSTERED: 'CLUSTERED';
-KW_SORTED: 'SORTED';
-KW_INTO: 'INTO';
-KW_BUCKETS: 'BUCKETS';
-KW_ROW: 'ROW';
-KW_ROWS: 'ROWS';
-KW_FORMAT: 'FORMAT';
-KW_DELIMITED: 'DELIMITED';
-KW_FIELDS: 'FIELDS';
-KW_TERMINATED: 'TERMINATED';
-KW_ESCAPED: 'ESCAPED';
-KW_COLLECTION: 'COLLECTION';
-KW_ITEMS: 'ITEMS';
-KW_KEYS: 'KEYS';
-KW_KEY_TYPE: '$KEY$';
-KW_LINES: 'LINES';
-KW_STORED: 'STORED';
-KW_FILEFORMAT: 'FILEFORMAT';
-KW_SEQUENCEFILE: 'SEQUENCEFILE';
-KW_TEXTFILE: 'TEXTFILE';
-KW_RCFILE: 'RCFILE';
-KW_ORCFILE: 'ORC';
-KW_INPUTFORMAT: 'INPUTFORMAT';
-KW_OUTPUTFORMAT: 'OUTPUTFORMAT';
-KW_INPUTDRIVER: 'INPUTDRIVER';
-KW_OUTPUTDRIVER: 'OUTPUTDRIVER';
-KW_OFFLINE: 'OFFLINE';
-KW_ENABLE: 'ENABLE';
-KW_DISABLE: 'DISABLE';
-KW_READONLY: 'READONLY';
-KW_NO_DROP: 'NO_DROP';
-KW_LOCATION: 'LOCATION';
-KW_TABLESAMPLE: 'TABLESAMPLE';
-KW_BUCKET: 'BUCKET';
-KW_OUT: 'OUT';
-KW_OF: 'OF';
-KW_PERCENT: 'PERCENT';
-KW_CAST: 'CAST';
-KW_ADD: 'ADD';
-KW_REPLACE: 'REPLACE';
-KW_RLIKE: 'RLIKE';
-KW_REGEXP: 'REGEXP';
-KW_TEMPORARY: 'TEMPORARY';
-KW_FUNCTION: 'FUNCTION';
-KW_EXPLAIN: 'EXPLAIN';
-KW_EXTENDED: 'EXTENDED';
-KW_FORMATTED: 'FORMATTED';
-KW_PRETTY: 'PRETTY';
-KW_DEPENDENCY: 'DEPENDENCY';
-KW_SERDE: 'SERDE';
-KW_WITH: 'WITH';
-KW_DEFERRED: 'DEFERRED';
-KW_SERDEPROPERTIES: 'SERDEPROPERTIES';
-KW_DBPROPERTIES: 'DBPROPERTIES';
-KW_LIMIT: 'LIMIT';
-KW_SET: 'SET';
-KW_UNSET: 'UNSET';
-KW_TBLPROPERTIES: 'TBLPROPERTIES';
-KW_IDXPROPERTIES: 'IDXPROPERTIES';
-KW_VALUE_TYPE: '$VALUE$';
-KW_ELEM_TYPE: '$ELEM$';
-KW_CASE: 'CASE';
-KW_WHEN: 'WHEN';
-KW_THEN: 'THEN';
-KW_ELSE: 'ELSE';
-KW_END: 'END';
-KW_MAPJOIN: 'MAPJOIN';
-KW_STREAMTABLE: 'STREAMTABLE';
-KW_HOLD_DDLTIME: 'HOLD_DDLTIME';
-KW_CLUSTERSTATUS: 'CLUSTERSTATUS';
-KW_UTC: 'UTC';
-KW_UTCTIMESTAMP: 'UTC_TMESTAMP';
-KW_LONG: 'LONG';
-KW_DELETE: 'DELETE';
-KW_PLUS: 'PLUS';
-KW_MINUS: 'MINUS';
-KW_FETCH: 'FETCH';
-KW_INTERSECT: 'INTERSECT';
-KW_VIEW: 'VIEW';
-KW_IN: 'IN';
-KW_DATABASE: 'DATABASE';
-KW_DATABASES: 'DATABASES';
-KW_MATERIALIZED: 'MATERIALIZED';
-KW_SCHEMA: 'SCHEMA';
-KW_SCHEMAS: 'SCHEMAS';
-KW_GRANT: 'GRANT';
-KW_REVOKE: 'REVOKE';
-KW_SSL: 'SSL';
-KW_UNDO: 'UNDO';
-KW_LOCK: 'LOCK';
-KW_LOCKS: 'LOCKS';
-KW_UNLOCK: 'UNLOCK';
-KW_SHARED: 'SHARED';
-KW_EXCLUSIVE: 'EXCLUSIVE';
-KW_PROCEDURE: 'PROCEDURE';
-KW_UNSIGNED: 'UNSIGNED';
-KW_WHILE: 'WHILE';
-KW_READ: 'READ';
-KW_READS: 'READS';
-KW_PURGE: 'PURGE';
-KW_RANGE: 'RANGE';
-KW_ANALYZE: 'ANALYZE';
-KW_BEFORE: 'BEFORE';
-KW_BETWEEN: 'BETWEEN';
-KW_BOTH: 'BOTH';
-KW_BINARY: 'BINARY';
-KW_CROSS: 'CROSS';
-KW_CONTINUE: 'CONTINUE';
-KW_CURSOR: 'CURSOR';
-KW_TRIGGER: 'TRIGGER';
-KW_RECORDREADER: 'RECORDREADER';
-KW_RECORDWRITER: 'RECORDWRITER';
-KW_SEMI: 'SEMI';
-KW_LATERAL: 'LATERAL';
-KW_TOUCH: 'TOUCH';
-KW_ARCHIVE: 'ARCHIVE';
-KW_UNARCHIVE: 'UNARCHIVE';
-KW_COMPUTE: 'COMPUTE';
-KW_STATISTICS: 'STATISTICS';
-KW_USE: 'USE';
-KW_OPTION: 'OPTION';
-KW_CONCATENATE: 'CONCATENATE';
-KW_SHOW_DATABASE: 'SHOW_DATABASE';
-KW_UPDATE: 'UPDATE';
-KW_RESTRICT: 'RESTRICT';
-KW_CASCADE: 'CASCADE';
-KW_SKEWED: 'SKEWED';
-KW_ROLLUP: 'ROLLUP';
-KW_CUBE: 'CUBE';
-KW_DIRECTORIES: 'DIRECTORIES';
-KW_FOR: 'FOR';
-KW_WINDOW: 'WINDOW';
-KW_UNBOUNDED: 'UNBOUNDED';
-KW_PRECEDING: 'PRECEDING';
-KW_FOLLOWING: 'FOLLOWING';
-KW_CURRENT: 'CURRENT';
-KW_LESS: 'LESS';
-KW_MORE: 'MORE';
-KW_OVER: 'OVER';
-KW_GROUPING: 'GROUPING';
-KW_SETS: 'SETS';
-KW_TRUNCATE: 'TRUNCATE';
-KW_NOSCAN: 'NOSCAN';
-KW_PARTIALSCAN: 'PARTIALSCAN';
-KW_USER: 'USER';
-KW_ROLE: 'ROLE';
-KW_INNER: 'INNER';
-
-// Operators
-// NOTE: if you add a new function/operator, add it to sysFuncNames so that describe function _FUNC_ will work.
-
-DOT : '.'; // generated as a part of Number rule
-COLON : ':' ;
-COMMA : ',' ;
-SEMICOLON : ';' ;
-
-LPAREN : '(' ;
-RPAREN : ')' ;
-LSQUARE : '[' ;
-RSQUARE : ']' ;
-LCURLY : '{';
-RCURLY : '}';
-
-EQUAL : '=' | '==';
-EQUAL_NS : '<=>';
-NOTEQUAL : '<>' | '!=';
-LESSTHANOREQUALTO : '<=';
-LESSTHAN : '<';
-GREATERTHANOREQUALTO : '>=';
-GREATERTHAN : '>';
-
-DIVIDE : '/';
-PLUS : '+';
-MINUS : '-';
-STAR : '*';
-MOD : '%';
-DIV : 'DIV';
-
-AMPERSAND : '&';
-TILDE : '~';
-BITWISEOR : '|';
-BITWISEXOR : '^';
-QUESTION : '?';
-DOLLAR : '$';
-
-// LITERALS
-fragment
-Letter
-    : 'a'..'z' | 'A'..'Z'
-    ;
-
-fragment
-HexDigit
-    : 'a'..'f' | 'A'..'F'
-    ;
-
-fragment
-Digit
-    :
-    '0'..'9'
-    ;
-
-fragment
-Exponent
-    :
-    ('e' | 'E') ( PLUS|MINUS )? (Digit)+
-    ;
-
-fragment
-RegexComponent
-    : 'a'..'z' | 'A'..'Z' | '0'..'9' | '_'
-    | PLUS | STAR | QUESTION | MINUS | DOT
-    | LPAREN | RPAREN | LSQUARE | RSQUARE | LCURLY | RCURLY
-    | BITWISEXOR | BITWISEOR | DOLLAR
-    ;
-
-StringLiteral
-    :
-    ( '\'' ( ~('\''|'\\') | ('\\' .) )* '\''
-    | '\"' ( ~('\"'|'\\') | ('\\' .) )* '\"'
-    )+
-    ;
-
-CharSetLiteral
-    :
-    StringLiteral
-    | '0' 'X' (HexDigit|Digit)+
-    ;
-
-BigintLiteral
-    :
-    (Digit)+ 'L'
-    ;
-
-SmallintLiteral
-    :
-    (Digit)+ 'S'
-    ;
-
-TinyintLiteral
-    :
-    (Digit)+ 'Y'
-    ;
-
-DecimalLiteral
-    :
-    Number 'B' 'D'
-    ;
-
-ByteLengthLiteral
-    :
-    (Digit)+ ('b' | 'B' | 'k' | 'K' | 'm' | 'M' | 'g' | 'G')
-    ;
-
-Number
-    :
-    (Digit)+ ( DOT (Digit)* (Exponent)? | Exponent)?
-    ;
-    
-Identifier
-    : (Letter | Digit) (Letter | Digit | '_')* { setText(getText().toLowerCase()); }
-    ;
-
-CharSetName
-    :
-    '_' (Letter | Digit | '_' | '-' | '.' | ':' )+
-    ;
-
-WS  :
-(' '|'\r'|'\t'|'\n') -> skip
-    ;
-
-COMMENT
-  : '--' (~('\n'|'\r'))*  -> skip
-  ;
-
-
-
diff --git a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLParser.g4 b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLParser.g4
deleted file mode 100644
index 6a85695..0000000
--- a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/HiveQLParser.g4
+++ /dev/null
@@ -1,2067 +0,0 @@
-/**
-   Licensed to the Apache Software Foundation (ASF) under one or more 
-   contributor license agreements.  See the NOTICE file distributed with 
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with 
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-*/
-parser grammar HiveQLParser;
-
-options
-{
-tokenVocab=HiveQLLexer;
-language=Java;
-}
-
-
-// Package headers
-@header {
-import java.util.*;
-}
-
-
-@members {
-  Stack msgs = new Stack<String>();
-}
-
-@rulecatch {
-catch (RecognitionException e) {
- reportError(e);
-  throw e;
-}
-}
-
-//----------------------- Rules for parsing selectClause -----------------------------
-// select a,b,c ...
-selectClause
-@init { msgs.push("select clause"); }
-@after { msgs.pop(); }
-    :
-    KW_SELECT hintClause? (((KW_ALL | dist=KW_DISTINCT)? selectList)
-                          | (transform=KW_TRANSFORM selectTrfmClause))
-    |
-    trfmClause
-    ;
-
-selectList
-@init { msgs.push("select list"); }
-@after { msgs.pop(); }
-    :
-    selectItem ( COMMA  selectItem )* 
-    ;
-
-selectTrfmClause
-@init { msgs.push("transform clause"); }
-@after { msgs.pop(); }
-    :
-    LPAREN selectExpressionList RPAREN
-    inSerde=rowFormat inRec=recordWriter
-    KW_USING StringLiteral
-    ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)))?
-    outSerde=rowFormat outRec=recordReader
-    ;
-
-hintClause
-@init { msgs.push("hint clause"); }
-@after { msgs.pop(); }
-    :
-    DIVIDE STAR PLUS hintList STAR DIVIDE 
-    ;
-
-hintList
-@init { msgs.push("hint list"); }
-@after { msgs.pop(); }
-    :
-    hintItem (COMMA hintItem)* 
-    ;
-
-hintItem
-@init { msgs.push("hint item"); }
-@after { msgs.pop(); }
-    :
-    hintName (LPAREN hintArgs RPAREN)? 
-    ;
-
-hintName
-@init { msgs.push("hint name"); }
-@after { msgs.pop(); }
-    :
-    KW_MAPJOIN 
-    | KW_STREAMTABLE 
-    | KW_HOLD_DDLTIME 
-    ;
-
-hintArgs
-@init { msgs.push("hint arguments"); }
-@after { msgs.pop(); }
-    :
-    hintArgName (COMMA hintArgName)* 
-    ;
-
-hintArgName
-@init { msgs.push("hint argument name"); }
-@after { msgs.pop(); }
-    :
-    identifier
-    ;
-
-selectItem
-@init { msgs.push("selection target"); }
-@after { msgs.pop(); }
-    :
-    ( selectExpression (KW_OVER ws=window_specification )?
-      ((KW_AS? identifier) | (KW_AS LPAREN identifier (COMMA identifier)* RPAREN))?
-    ) 
-    ;
-
-trfmClause
-@init { msgs.push("transform clause"); }
-@after { msgs.pop(); }
-    :
-    (   KW_MAP    selectExpressionList
-      | KW_REDUCE selectExpressionList )
-    inSerde=rowFormat inRec=recordWriter
-    KW_USING StringLiteral
-    ( KW_AS ((LPAREN (aliasList | columnNameTypeList) RPAREN) | (aliasList | columnNameTypeList)))?
-    outSerde=rowFormat outRec=recordReader
-    ;
-
-selectExpression
-@init { msgs.push("select expression"); }
-@after { msgs.pop(); }
-    :
-    expression | tableAllColumns
-    ;
-
-selectExpressionList
-@init { msgs.push("select expression list"); }
-@after { msgs.pop(); }
-    :
-    selectExpression (COMMA selectExpression)* 
-    ;
-
-
-//---------------------- Rules for windowing clauses -------------------------------
-window_clause 
-@init { msgs.push("window_clause"); }
-@after { msgs.pop(); } 
-:
-  KW_WINDOW window_defn (COMMA window_defn)* 
-;  
-
-window_defn 
-@init { msgs.push("window_defn"); }
-@after { msgs.pop(); } 
-:
-  Identifier KW_AS window_specification 
-;  
-
-window_specification 
-@init { msgs.push("window_specification"); }
-@after { msgs.pop(); } 
-:
-  (Identifier | ( LPAREN Identifier? partitioningSpec? window_frame? RPAREN)) 
-;
-
-window_frame :
- window_range_expression |
- window_value_expression
-;
-
-window_range_expression 
-@init { msgs.push("window_range_expression"); }
-@after { msgs.pop(); } 
-:
- KW_ROWS sb=window_frame_start_boundary 
- KW_ROWS KW_BETWEEN s=window_frame_boundary KW_AND end=window_frame_boundary 
-;
-
-window_value_expression 
-@init { msgs.push("window_value_expression"); }
-@after { msgs.pop(); } 
-:
- KW_RANGE sb=window_frame_start_boundary 
- KW_RANGE KW_BETWEEN s=window_frame_boundary KW_AND end=window_frame_boundary 
-;
-
-window_frame_start_boundary 
-@init { msgs.push("windowframestartboundary"); }
-@after { msgs.pop(); } 
-:
-  KW_UNBOUNDED KW_PRECEDING  
-  KW_CURRENT KW_ROW  
-  Number KW_PRECEDING 
-;
-
-window_frame_boundary 
-@init { msgs.push("windowframeboundary"); }
-@after { msgs.pop(); } 
-:
-  KW_UNBOUNDED (r=KW_PRECEDING|r=KW_FOLLOWING)  
-  KW_CURRENT KW_ROW  
-  Number (d=KW_PRECEDING | d=KW_FOLLOWING ) 
-;   
-
-
-tableAllColumns
-    : STAR
-    | tableName DOT STAR
-    ;
-
-// (table|column)
-tableOrColumn
-@init { msgs.push("table or column identifier"); }
-@after { msgs.pop(); }
-    :
-    identifier 
-    ;
-
-expressionList
-@init { msgs.push("expression list"); }
-@after { msgs.pop(); }
-    :
-    expression (COMMA expression)* 
-    ;
-
-aliasList
-@init { msgs.push("alias list"); }
-@after { msgs.pop(); }
-    :
-    identifier (COMMA identifier)* 
-    ;
-
-
-//----------------------- Rules for parsing fromClause ------------------------------
-// from [col1, col2, col3] table1, [col4, col5] table2
-fromClause
-@init { msgs.push("from clause"); }
-@after { msgs.pop(); }
-    :
-    KW_FROM joinSource 
-    ;
-
-joinSource
-@init { msgs.push("join source"); }
-@after { msgs.pop(); }
-    : fromSource ( joinToken fromSource (KW_ON expression)? 
-    )*
-    | uniqueJoinToken uniqueJoinSource (COMMA uniqueJoinSource)+
-    ;
-
-uniqueJoinSource
-@init { msgs.push("join source"); }
-@after { msgs.pop(); }
-    : KW_PRESERVE? fromSource uniqueJoinExpr
-    ;
-
-uniqueJoinExpr
-@init { msgs.push("unique join expression list"); }
-@after { msgs.pop(); }
-    : LPAREN e1+=expression (COMMA e1+=expression)* RPAREN
-    ;
-
-uniqueJoinToken
-@init { msgs.push("unique join"); }
-@after { msgs.pop(); }
-    : KW_UNIQUEJOIN 
-;
-
-joinToken
-@init { msgs.push("join type specifier"); }
-@after { msgs.pop(); }
-    :
-      KW_JOIN                    
-    | KW_INNER  KW_JOIN            
-    | KW_CROSS KW_JOIN            
-    | KW_LEFT  KW_OUTER KW_JOIN   
-    | KW_RIGHT KW_OUTER KW_JOIN  
-    | KW_FULL  KW_OUTER KW_JOIN  
-    | KW_LEFT  KW_SEMI  KW_JOIN  
-    ;
-
-lateralView
-@init {msgs.push("lateral view"); }
-@after {msgs.pop(); }
-	:
-	KW_LATERAL KW_VIEW function tableAlias KW_AS identifier (COMMA identifier)* 
-	;
-
-tableAlias
-@init {msgs.push("table alias"); }
-@after {msgs.pop(); }
-    :
-    identifier 
-    ;
-
-fromSource
-@init { msgs.push("from source"); }
-@after { msgs.pop(); }
-    :
-    ((Identifier LPAREN) | tableSource | subQuerySource) (lateralView)*
-    ;
-
-tableBucketSample
-@init { msgs.push("table bucket sample specification"); }
-@after { msgs.pop(); }
-    :
-    KW_TABLESAMPLE LPAREN KW_BUCKET (numerator=Number) KW_OUT KW_OF (denominator=Number) (KW_ON expr+=expression (COMMA expr+=expression)*)? RPAREN 
-    ;
-
-splitSample
-@init { msgs.push("table split sample specification"); }
-@after { msgs.pop(); }
-    :
-    KW_TABLESAMPLE LPAREN  (numerator=Number) (percent=KW_PERCENT|KW_ROWS) RPAREN
-    |
-    KW_TABLESAMPLE LPAREN  (numerator=ByteLengthLiteral) RPAREN
-    ;
-
-tableSample
-@init { msgs.push("table sample specification"); }
-@after { msgs.pop(); }
-    :
-    tableBucketSample |
-    splitSample
-    ;
-
-tableSource
-@init { msgs.push("table source"); }
-@after { msgs.pop(); }
-    : tabname=tableName (ts=tableSample)? (alias=identifier)?
-    ;
-
-tableName
-@init { msgs.push("table name"); }
-@after { msgs.pop(); }
-    :
-    db=identifier DOT tab=identifier
-    |
-    tab=identifier
-    ;
-
-viewName
-@init { msgs.push("view name"); }
-@after { msgs.pop(); }
-    :
-    (db=identifier DOT)? view=identifier
-    ;
-
-subQuerySource
-@init { msgs.push("subquery source"); }
-@after { msgs.pop(); }
-    :
-    LPAREN queryStatementExpression RPAREN identifier 
-    ;
-
-//---------------------- Rules for parsing PTF clauses -----------------------------
-partitioningSpec
-@init { msgs.push("partitioningSpec clause"); }
-@after { msgs.pop(); } 
-   :
-   partitionByClause orderByClause? 
-   orderByClause 
-   distributeByClause sortByClause? 
-   sortByClause 
-   clusterByClause 
-   ;
-
-partitionTableFunctionSource
-@init { msgs.push("partitionTableFunctionSource clause"); }
-@after { msgs.pop(); } 
-   :
-   subQuerySource |
-   tableSource |
-   partitionedTableFunction
-   ;
-
-partitionedTableFunction
-@init { msgs.push("ptf clause"); }
-@after { msgs.pop(); } 
-   :
-   name=Identifier
-   LPAREN KW_ON ptfsrc=partitionTableFunctionSource partitioningSpec?
-     ((Identifier LPAREN expression RPAREN ) )? 
-   RPAREN alias=Identifier? 
-   ;
-
-//----------------------- Rules for parsing whereClause -----------------------------
-// where a=b and ...
-whereClause
-@init { msgs.push("where clause"); }
-@after { msgs.pop(); }
-    :
-    KW_WHERE searchCondition 
-    ;
-
-searchCondition
-@init { msgs.push("search condition"); }
-@after { msgs.pop(); }
-    :
-    expression
-    ;
-
-//-----------------------------------------------------------------------------------
-
-
-// group by a,b
-groupByClause
-@init { msgs.push("group by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_GROUP KW_BY
-    groupByExpression
-    ( COMMA groupByExpression )*
-    ((rollup=KW_WITH KW_ROLLUP) | (cube=KW_WITH KW_CUBE)) ?
-    (sets=KW_GROUPING KW_SETS 
-    LPAREN groupingSetExpression ( COMMA groupingSetExpression)*  RPAREN ) ?
-    ;
-
-groupingSetExpression
-@init {msgs.push("grouping set expression"); }
-@after {msgs.pop(); }
-   :
-   groupByExpression
-   |
-   LPAREN 
-   groupByExpression (COMMA groupByExpression)*
-   RPAREN
-   |
-   LPAREN
-   RPAREN
-   ;
-
-
-groupByExpression
-@init { msgs.push("group by expression"); }
-@after { msgs.pop(); }
-    :
-    expression
-    ;
-
-havingClause
-@init { msgs.push("having clause"); }
-@after { msgs.pop(); }
-    :
-    KW_HAVING havingCondition 
-    ;
-
-havingCondition
-@init { msgs.push("having condition"); }
-@after { msgs.pop(); }
-    :
-    expression
-    ;
-
-// order by a,b
-orderByClause
-@init { msgs.push("order by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_ORDER KW_BY
-    LPAREN columnRefOrder
-    ( COMMA columnRefOrder)* RPAREN 
-    |
-    KW_ORDER KW_BY
-    columnRefOrder
-    ( COMMA columnRefOrder)* 
-    ;
-
-clusterByClause
-@init { msgs.push("cluster by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_CLUSTER KW_BY
-    LPAREN expression (COMMA expression)* RPAREN
-    |
-    KW_CLUSTER KW_BY
-    expression
-    ((COMMA))*
-    ;
-
-partitionByClause
-@init  { msgs.push("partition by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_PARTITION KW_BY
-    LPAREN expression (COMMA expression)* RPAREN
-    |
-    KW_PARTITION KW_BY
-    expression ((COMMA))*
-    ;
-
-distributeByClause
-@init { msgs.push("distribute by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_DISTRIBUTE KW_BY
-    LPAREN expression (COMMA expression)* RPAREN
-    |
-    KW_DISTRIBUTE KW_BY
-    expression ((COMMA))*
-    ;
-
-sortByClause
-@init { msgs.push("sort by clause"); }
-@after { msgs.pop(); }
-    :
-    KW_SORT KW_BY
-    LPAREN columnRefOrder
-    ( COMMA columnRefOrder)* RPAREN 
-    |
-    KW_SORT KW_BY
-    columnRefOrder
-    ( (COMMA))*
-    ;
-
-// fun(par1, par2, par3)
-function
-@init { msgs.push("function specification"); }
-@after { msgs.pop(); }
-    :
-    functionName
-    LPAREN
-      (
-        (star=STAR)
-        | (dist=KW_DISTINCT)? (selectExpression (COMMA selectExpression)*)?
-      )
-    RPAREN 
-    ;
-
-functionName
-@init { msgs.push("function name"); }
-@after { msgs.pop(); }
-    : // Keyword IF is also a function name
-    KW_IF | KW_ARRAY | KW_MAP | KW_STRUCT | KW_UNIONTYPE | identifier
-    ;
-
-castExpression
-@init { msgs.push("cast expression"); }
-@after { msgs.pop(); }
-    :
-    KW_CAST
-    LPAREN
-          expression
-          KW_AS
-          primitiveType
-    RPAREN 
-    ;
-
-caseExpression
-@init { msgs.push("case expression"); }
-@after { msgs.pop(); }
-    :
-    KW_CASE expression
-    (KW_WHEN expression KW_THEN expression)+
-    (KW_ELSE expression)?
-    KW_END 
-    ;
-
-whenExpression
-@init { msgs.push("case expression"); }
-@after { msgs.pop(); }
-    :
-    KW_CASE
-     ( KW_WHEN expression KW_THEN expression)+
-    (KW_ELSE expression)?
-    KW_END 
-    ;
-
-constant
-@init { msgs.push("constant"); }
-@after { msgs.pop(); }
-    :
-    Number
-    | StringLiteral
-    | stringLiteralSequence
-    | BigintLiteral
-    | SmallintLiteral
-    | TinyintLiteral
-    | DecimalLiteral
-    | charSetStringLiteral
-    | booleanValue
-    ;
-
-stringLiteralSequence
-    :
-    StringLiteral StringLiteral+ 
-    ;
-
-charSetStringLiteral
-@init { msgs.push("character string literal"); }
-@after { msgs.pop(); }
-    :
-    csName=CharSetName csLiteral=CharSetLiteral 
-    ;
-
-expression
-@init { msgs.push("expression specification"); }
-@after { msgs.pop(); }
-    :
-    precedenceOrExpression
-    ;
-
-atomExpression
-    :
-    KW_NULL 
-    | constant
-    | function
-    | castExpression
-    | caseExpression
-    | whenExpression
-    | tableOrColumn
-    | LPAREN expression RPAREN
-    ;
-
-
-precedenceFieldExpression
-    :
-    atomExpression ((LSQUARE expression RSQUARE) | (DOT identifier))*
-    ;
-
-precedenceUnaryOperator
-    :
-    PLUS | MINUS | TILDE
-    ;
-
-nullCondition
-    :
-    KW_NULL     
-    | KW_NOT KW_NULL     
-    ;
-
-precedenceUnaryPrefixExpression
-    :
-    (precedenceUnaryOperator)* precedenceFieldExpression
-    ;
-
-precedenceUnarySuffixExpression
-    : precedenceUnaryPrefixExpression (a=KW_IS nullCondition)?
-    ;
-
-
-precedenceBitwiseXorOperator
-    :
-    BITWISEXOR
-    ;
-
-precedenceBitwiseXorExpression
-    :
-    precedenceUnarySuffixExpression (precedenceBitwiseXorOperator precedenceUnarySuffixExpression)*
-    ;
-
-
-precedenceStarOperator
-    :
-    STAR | DIVIDE | MOD | DIV
-    ;
-
-precedenceStarExpression
-    :
-    precedenceBitwiseXorExpression (precedenceStarOperator precedenceBitwiseXorExpression)*
-    ;
-
-
-precedencePlusOperator
-    :
-    PLUS | MINUS
-    ;
-
-precedencePlusExpression
-    :
-    precedenceStarExpression (precedencePlusOperator precedenceStarExpression)*
-    ;
-
-
-precedenceAmpersandOperator
-    :
-    AMPERSAND
-    ;
-
-precedenceAmpersandExpression
-    :
-    precedencePlusExpression (precedenceAmpersandOperator precedencePlusExpression)*
-    ;
-
-
-precedenceBitwiseOrOperator
-    :
-    BITWISEOR
-    ;
-
-precedenceBitwiseOrExpression
-    :
-    precedenceAmpersandExpression (precedenceBitwiseOrOperator precedenceAmpersandExpression)*
-    ;
-
-
-// Equal operators supporting NOT prefix
-precedenceEqualNegatableOperator
-    :
-    KW_LIKE | KW_RLIKE | KW_REGEXP
-    ;
-
-precedenceEqualOperator
-    :
-    precedenceEqualNegatableOperator | EQUAL | EQUAL_NS | NOTEQUAL | LESSTHANOREQUALTO | LESSTHAN | GREATERTHANOREQUALTO | GREATERTHAN
-    ;
-
-precedenceEqualExpression
-    :
-    (left=precedenceBitwiseOrExpression     
-    )
-    (
-       (KW_NOT precedenceEqualNegatableOperator notExpr=precedenceBitwiseOrExpression) 
-    | (precedenceEqualOperator equalExpr=precedenceBitwiseOrExpression)
-    | (KW_NOT KW_IN expressions) 
-    | (KW_IN expressions) 
-    | ( KW_NOT KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND (max=precedenceBitwiseOrExpression) ) 
-    | ( KW_BETWEEN (min=precedenceBitwiseOrExpression) KW_AND (max=precedenceBitwiseOrExpression) )
-    )*
-    ;
-
-expressions
-    :
-    LPAREN expression (COMMA expression)* RPAREN 
-    ;
-
-precedenceNotOperator
-    :
-    KW_NOT
-    ;
-
-precedenceNotExpression
-    :
-    (precedenceNotOperator)* precedenceEqualExpression
-    ;
-
-
-precedenceAndOperator
-    :
-    KW_AND
-    ;
-
-precedenceAndExpression
-    :
-    precedenceNotExpression (precedenceAndOperator precedenceNotExpression)*
-    ;
-
-
-precedenceOrOperator
-    :
-    KW_OR
-    ;
-
-precedenceOrExpression
-    :
-    precedenceAndExpression (precedenceOrOperator precedenceAndExpression)*
-    ;
-
-
-booleanValue
-    :
-    KW_TRUE | KW_FALSE
-    ;
-
-tableOrPartition
-   :
-   tableName partitionSpec? 
-   ;
-
-partitionSpec
-    :
-    KW_PARTITION
-     LPAREN partitionVal (COMMA  partitionVal )* RPAREN 
-    ;
-
-partitionVal
-    :
-    identifier (EQUAL constant)? 
-    ;
-
-dropPartitionSpec
-    :
-    KW_PARTITION
-     LPAREN dropPartitionVal (COMMA  dropPartitionVal )* RPAREN 
-    ;
-
-dropPartitionVal
-    :
-    identifier dropPartitionOperator constant 
-    ;
-
-dropPartitionOperator
-    :
-    EQUAL | NOTEQUAL | LESSTHANOREQUALTO | LESSTHAN | GREATERTHANOREQUALTO | GREATERTHAN
-    ;
-
-sysFuncNames
-    :
-      KW_AND
-    | KW_OR
-    | KW_NOT
-    | KW_LIKE
-    | KW_IF
-    | KW_CASE
-    | KW_WHEN
-    | KW_TINYINT
-    | KW_SMALLINT
-    | KW_INT
-    | KW_BIGINT
-    | KW_FLOAT
-    | KW_DOUBLE
-    | KW_BOOLEAN
-    | KW_STRING
-    | KW_BINARY
-    | KW_ARRAY
-    | KW_MAP
-    | KW_STRUCT
-    | KW_UNIONTYPE
-    | EQUAL
-    | EQUAL_NS
-    | NOTEQUAL
-    | LESSTHANOREQUALTO
-    | LESSTHAN
-    | GREATERTHANOREQUALTO
-    | GREATERTHAN
-    | DIVIDE
-    | PLUS
-    | MINUS
-    | STAR
-    | MOD
-    | DIV
-    | AMPERSAND
-    | TILDE
-    | BITWISEOR
-    | BITWISEXOR
-    | KW_RLIKE
-    | KW_REGEXP
-    | KW_IN
-    | KW_BETWEEN
-    ;
-
-descFuncNames
-    :
-      sysFuncNames
-    | StringLiteral
-    | identifier
-    ;
-
-identifier
-    :
-    Identifier
-    | nonReserved 
-    ;
-    
-nonReserved
-    :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_INNER
-    ;
-
-//-----------------------------------------------------------------------------------
-
-// starting rule
-statement
-	: explainStatement EOF
-	| execStatement EOF
-	;
-
-explainStatement
-@init { msgs.push("explain statement"); }
-@after { msgs.pop(); }
-	: KW_EXPLAIN (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY)? execStatement
-	;
-
-execStatement
-@init { msgs.push("statement"); }
-@after { msgs.pop(); }
-    : queryStatementExpression
-    | loadStatement
-    | exportStatement
-    | importStatement
-    | ddlStatement
-    ;
-
-loadStatement
-@init { msgs.push("load statement"); }
-@after { msgs.pop(); }
-    : KW_LOAD KW_DATA (islocal=KW_LOCAL)? KW_INPATH (path=StringLiteral) (isoverwrite=KW_OVERWRITE)? KW_INTO KW_TABLE (tab=tableOrPartition)
-    ;
-
-exportStatement
-@init { msgs.push("export statement"); }
-@after { msgs.pop(); }
-    : KW_EXPORT KW_TABLE (tab=tableOrPartition) KW_TO (path=StringLiteral)
-    ;
-
-importStatement
-@init { msgs.push("import statement"); }
-@after { msgs.pop(); }
-	: KW_IMPORT ((ext=KW_EXTERNAL)? KW_TABLE (tab=tableOrPartition))? KW_FROM (path=StringLiteral) tableLocation?
-    ;
-
-ddlStatement
-@init { msgs.push("ddl statement"); }
-@after { msgs.pop(); }
-    : createDatabaseStatement
-    | switchDatabaseStatement
-    | dropDatabaseStatement
-    | createTableStatement
-    | dropTableStatement
-    | truncateTableStatement
-    | alterStatement
-    | descStatement
-    | showStatement
-    | metastoreCheck
-    | createViewStatement
-    | dropViewStatement
-    | createFunctionStatement
-    | createIndexStatement
-    | dropIndexStatement
-    | dropFunctionStatement
-    | analyzeStatement
-    | lockStatement
-    | unlockStatement
-    | createRoleStatement
-    | dropRoleStatement
-    | grantPrivileges
-    | revokePrivileges
-    | showGrants
-    | showRoleGrants
-    | grantRole
-    | revokeRole
-    ;
-
-ifExists
-@init { msgs.push("if exists clause"); }
-@after { msgs.pop(); }
-    : KW_IF KW_EXISTS
-    ;
-
-restrictOrCascade
-@init { msgs.push("restrict or cascade clause"); }
-@after { msgs.pop(); }
-    : KW_RESTRICT
-    | KW_CASCADE
-    ;
-
-ifNotExists
-@init { msgs.push("if not exists clause"); }
-@after { msgs.pop(); }
-    : KW_IF KW_NOT KW_EXISTS
-    ;
-
-storedAsDirs
-@init { msgs.push("stored as directories"); }
-@after { msgs.pop(); }
-    : KW_STORED KW_AS KW_DIRECTORIES
-    ;
-
-orReplace
-@init { msgs.push("or replace clause"); }
-@after { msgs.pop(); }
-    : KW_OR KW_REPLACE
-    ;
-
-ignoreProtection
-@init { msgs.push("ignore protection clause"); }
-@after { msgs.pop(); }
-        : KW_IGNORE KW_PROTECTION
-        ;
-
-createDatabaseStatement
-@init { msgs.push("create database statement"); }
-@after { msgs.pop(); }
-    : KW_CREATE (KW_DATABASE|KW_SCHEMA)
-        ifNotExists?
-        name=identifier
-        databaseComment?
-        dbLocation?
-        (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
-    ;
-
-dbLocation
-@init { msgs.push("database location specification"); }
-@after { msgs.pop(); }
-    :
-      KW_LOCATION locn=StringLiteral 
-    ;
-
-dbProperties
-@init { msgs.push("dbproperties"); }
-@after { msgs.pop(); }
-    :
-      LPAREN dbPropertiesList RPAREN 
-    ;
-
-dbPropertiesList
-@init { msgs.push("database properties list"); }
-@after { msgs.pop(); }
-    :
-      keyValueProperty (COMMA keyValueProperty)* 
-    ;
-
-
-switchDatabaseStatement
-@init { msgs.push("switch database statement"); }
-@after { msgs.pop(); }
-    : KW_USE identifier
-    ;
-
-dropDatabaseStatement
-@init { msgs.push("drop database statement"); }
-@after { msgs.pop(); }
-    : KW_DROP (KW_DATABASE|KW_SCHEMA) ifExists? identifier restrictOrCascade?
-    ;
-
-databaseComment
-@init { msgs.push("database's comment"); }
-@after { msgs.pop(); }
-    : KW_COMMENT comment=StringLiteral
-    ;
-
-createTableStatement
-@init { msgs.push("create table statement"); }
-@after { msgs.pop(); }
-    : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE ifNotExists? name=tableName
-      (  like=KW_LIKE likeName=tableName
-         tableLocation?
-         tablePropertiesPrefixed?
-       | (LPAREN columnNameTypeList RPAREN)?
-         tableComment?
-         tablePartition?
-         tableBuckets?
-         tableSkewed?
-         tableRowFormat?
-         tableFileFormat?
-         tableLocation?
-         tablePropertiesPrefixed?
-         (KW_AS selectStatement)?
-      )
-    ;
-
-truncateTableStatement
-@init { msgs.push("truncate table statement"); }
-@after { msgs.pop(); }
-    : KW_TRUNCATE KW_TABLE tablePartitionPrefix 
-;
-
-createIndexStatement
-@init { msgs.push("create index statement");}
-@after {msgs.pop();}
-    : KW_CREATE KW_INDEX indexName=identifier
-      KW_ON KW_TABLE tab=tableName LPAREN indexedCols=columnNameList RPAREN
-      KW_AS typeName=StringLiteral
-      autoRebuild?
-      indexPropertiesPrefixed?
-      indexTblName?
-      tableRowFormat?
-      tableFileFormat?
-      tableLocation?
-      tablePropertiesPrefixed?
-      indexComment?
-    ;
-
-indexComment
-@init { msgs.push("comment on an index");}
-@after {msgs.pop();}
-        :
-                KW_COMMENT comment=StringLiteral  
-        ;
-
-autoRebuild
-@init { msgs.push("auto rebuild index");}
-@after {msgs.pop();}
-    : KW_WITH KW_DEFERRED KW_REBUILD
-    ;
-
-indexTblName
-@init { msgs.push("index table name");}
-@after {msgs.pop();}
-    : KW_IN KW_TABLE indexTbl=tableName
-    ;
-
-indexPropertiesPrefixed
-@init { msgs.push("table properties with prefix"); }
-@after { msgs.pop(); }
-    :
-        KW_IDXPROPERTIES indexProperties
-    ;
-
-indexProperties
-@init { msgs.push("index properties"); }
-@after { msgs.pop(); }
-    :
-      LPAREN indexPropertiesList RPAREN 
-    ;
-
-indexPropertiesList
-@init { msgs.push("index properties list"); }
-@after { msgs.pop(); }
-    :
-      keyValueProperty (COMMA keyValueProperty)* 
-    ;
-
-dropIndexStatement
-@init { msgs.push("drop index statement");}
-@after {msgs.pop();}
-    : KW_DROP KW_INDEX ifExists? indexName=identifier KW_ON tab=tableName
-    ;
-
-dropTableStatement
-@init { msgs.push("drop statement"); }
-@after { msgs.pop(); }
-    : KW_DROP KW_TABLE ifExists? tableName 
-    ;
-
-alterStatement
-@init { msgs.push("alter statement"); }
-@after { msgs.pop(); }
-    : 
-    KW_ALTER
-        (
-            KW_TABLE alterTableStatementSuffix
-        |
-            KW_VIEW alterViewStatementSuffix
-        |
-            KW_INDEX alterIndexStatementSuffix
-        |
-            KW_DATABASE alterDatabaseStatementSuffix
-        )
-    ;
-
-alterTableStatementSuffix
-@init { msgs.push("alter table statement"); }
-@after { msgs.pop(); }
-    : alterStatementSuffixRename
-    | alterStatementSuffixAddCol
-    | alterStatementSuffixRenameCol
-    | alterStatementSuffixDropPartitions
-    | alterStatementSuffixAddPartitions
-    | alterStatementSuffixTouch
-    | alterStatementSuffixArchive
-    | alterStatementSuffixUnArchive
-    | alterStatementSuffixProperties
-    | alterTblPartitionStatement
-    | alterStatementSuffixSkewedby
-    ;
-
-alterViewStatementSuffix
-@init { msgs.push("alter view statement"); }
-@after { msgs.pop(); }
-    : alterViewSuffixProperties
-    | alterStatementSuffixRename
-    | alterStatementSuffixAddPartitions
-    | alterStatementSuffixDropPartitions
-    | name=tableName KW_AS selectStatement
-    ;
-
-alterIndexStatementSuffix
-@init { msgs.push("alter index statement"); }
-@after { msgs.pop(); }
-    : indexName=identifier
-      (KW_ON tableNameId=identifier)
-      partitionSpec?
-    (
-      KW_REBUILD
-    |
-      KW_SET KW_IDXPROPERTIES
-      indexProperties
-    )
-    ;
-
-alterDatabaseStatementSuffix
-@init { msgs.push("alter database statement"); }
-@after { msgs.pop(); }
-    : alterDatabaseSuffixProperties
-    ;
-
-alterDatabaseSuffixProperties
-@init { msgs.push("alter database properties statement"); }
-@after { msgs.pop(); }
-    : name=identifier KW_SET KW_DBPROPERTIES dbProperties
-    ;
-
-alterStatementSuffixRename
-@init { msgs.push("rename statement"); }
-@after { msgs.pop(); }
-    : oldName=identifier KW_RENAME KW_TO newName=identifier
-    ;
-
-alterStatementSuffixAddCol
-@init { msgs.push("add column statement"); }
-@after { msgs.pop(); }
-    : identifier (add=KW_ADD | replace=KW_REPLACE) KW_COLUMNS LPAREN columnNameTypeList RPAREN
-    ;
-
-alterStatementSuffixRenameCol
-@init { msgs.push("rename column name"); }
-@after { msgs.pop(); }
-    : identifier KW_CHANGE KW_COLUMN? oldName=identifier newName=identifier colType (KW_COMMENT comment=StringLiteral)? alterStatementChangeColPosition?
-    ;
-
-alterStatementChangeColPosition
-    : first=KW_FIRST|KW_AFTER afterCol=identifier
-    ;
-
-alterStatementSuffixAddPartitions
-@init { msgs.push("add partition statement"); }
-@after { msgs.pop(); }
-    : identifier KW_ADD ifNotExists? partitionSpec partitionLocation? (partitionSpec partitionLocation?)*
-    ;
-
-alterStatementSuffixTouch
-@init { msgs.push("touch statement"); }
-@after { msgs.pop(); }
-    : identifier KW_TOUCH (partitionSpec)*
-    ;
-
-alterStatementSuffixArchive
-@init { msgs.push("archive statement"); }
-@after { msgs.pop(); }
-    : identifier KW_ARCHIVE (partitionSpec)*
-    ;
-
-alterStatementSuffixUnArchive
-@init { msgs.push("unarchive statement"); }
-@after { msgs.pop(); }
-    : identifier KW_UNARCHIVE (partitionSpec)*
-    ;
-
-partitionLocation
-@init { msgs.push("partition location"); }
-@after { msgs.pop(); }
-    :
-      KW_LOCATION locn=StringLiteral 
-    ;
-
-alterStatementSuffixDropPartitions
-@init { msgs.push("drop partition statement"); }
-@after { msgs.pop(); }
-    : identifier KW_DROP ifExists? dropPartitionSpec (COMMA dropPartitionSpec)* ignoreProtection?
-    ;
-
-alterStatementSuffixProperties
-@init { msgs.push("alter properties statement"); }
-@after { msgs.pop(); }
-    : name=identifier KW_SET KW_TBLPROPERTIES tableProperties
-    | name=identifier KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    ;
-
-alterViewSuffixProperties
-@init { msgs.push("alter view properties statement"); }
-@after { msgs.pop(); }
-    : name=identifier KW_SET KW_TBLPROPERTIES tableProperties
-    | name=identifier KW_UNSET KW_TBLPROPERTIES ifExists? tableProperties
-    ;
-
-alterStatementSuffixSerdeProperties
-@init { msgs.push("alter serdes statement"); }
-@after { msgs.pop(); }
-    : KW_SET KW_SERDE serdeName=StringLiteral (KW_WITH KW_SERDEPROPERTIES tableProperties)?
-    | KW_SET KW_SERDEPROPERTIES tableProperties
-    ;
-
-tablePartitionPrefix
-@init {msgs.push("table partition prefix");}
-@after {msgs.pop();}
-  :name=identifier partitionSpec?
-  ;
-
-alterTblPartitionStatement
-@init {msgs.push("alter table partition statement");}
-@after {msgs.pop();}
-  : tablePartitionPrefix alterTblPartitionStatementSuffix
-  |Identifier KW_PARTITION KW_COLUMN LPAREN columnNameType RPAREN
-  ;
-
-alterTblPartitionStatementSuffix
-@init {msgs.push("alter table partition statement suffix");}
-@after {msgs.pop();}
-  : alterStatementSuffixFileFormat
-  | alterStatementSuffixLocation
-  | alterStatementSuffixProtectMode
-  | alterStatementSuffixMergeFiles
-  | alterStatementSuffixSerdeProperties
-  | alterStatementSuffixRenamePart
-  | alterStatementSuffixBucketNum
-  | alterTblPartitionStatementSuffixSkewedLocation
-  | alterStatementSuffixClusterbySortby
-  ;
-
-alterStatementSuffixFileFormat
-@init {msgs.push("alter fileformat statement"); }
-@after {msgs.pop();}
-	: KW_SET KW_FILEFORMAT fileFormat
-	;
-
-alterStatementSuffixClusterbySortby
-@init {msgs.push("alter partition cluster by sort by statement");}
-@after {msgs.pop();}
-  : KW_NOT KW_CLUSTERED 
-  | KW_NOT KW_SORTED 
-  | tableBuckets 
-  ;
-
-alterTblPartitionStatementSuffixSkewedLocation
-@init {msgs.push("alter partition skewed location");}
-@after {msgs.pop();}
-  : KW_SET KW_SKEWED KW_LOCATION skewedLocations
-  ;
-  
-skewedLocations
-@init { msgs.push("skewed locations"); }
-@after { msgs.pop(); }
-    :
-      LPAREN skewedLocationsList RPAREN 
-    ;
-
-skewedLocationsList
-@init { msgs.push("skewed locations list"); }
-@after { msgs.pop(); }
-    :
-      skewedLocationMap (COMMA skewedLocationMap)* 
-    ;
-
-skewedLocationMap
-@init { msgs.push("specifying skewed location map"); }
-@after { msgs.pop(); }
-    :
-      key=skewedValueLocationElement EQUAL value=StringLiteral 
-    ;
-
-alterStatementSuffixLocation
-@init {msgs.push("alter location");}
-@after {msgs.pop();}
-  : KW_SET KW_LOCATION newLoc=StringLiteral
-  ;
-
-	
-alterStatementSuffixSkewedby
-@init {msgs.push("alter skewed by statement");}
-@after{msgs.pop();}
-	:name=identifier tableSkewed
-	|
-	name=identifier KW_NOT KW_SKEWED
-	|
-	name=identifier KW_NOT storedAsDirs
-	;
-
-alterStatementSuffixProtectMode
-@init { msgs.push("alter partition protect mode statement"); }
-@after { msgs.pop(); }
-    : alterProtectMode
-    ;
-
-alterStatementSuffixRenamePart
-@init { msgs.push("alter table rename partition statement"); }
-@after { msgs.pop(); }
-    : KW_RENAME KW_TO partitionSpec
-    ;
-
-alterStatementSuffixMergeFiles
-@init { msgs.push(""); }
-@after { msgs.pop(); }
-    : KW_CONCATENATE
-    ;
-
-alterProtectMode
-@init { msgs.push("protect mode specification enable"); }
-@after { msgs.pop(); }
-    : KW_ENABLE alterProtectModeMode  
-    | KW_DISABLE alterProtectModeMode  
-    ;
-
-alterProtectModeMode
-@init { msgs.push("protect mode specification enable"); }
-@after { msgs.pop(); }
-    : KW_OFFLINE  
-    | KW_NO_DROP KW_CASCADE? 
-    | KW_READONLY  
-    ;
-
-alterStatementSuffixBucketNum
-@init { msgs.push(""); }
-@after { msgs.pop(); }
-    : KW_INTO num=Number KW_BUCKETS
-    ;
-
-fileFormat
-@init { msgs.push("file format specification"); }
-@after { msgs.pop(); }
-    : KW_SEQUENCEFILE  
-    | KW_TEXTFILE  
-    | KW_RCFILE  
-    | KW_ORCFILE 
-    | KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
-    | genericSpec=identifier 
-    ;
-
-tabTypeExpr
-@init { msgs.push("specifying table types"); }
-@after { msgs.pop(); }
-
-   : 
-   identifier (DOT (KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier))*
-   ;
-
-descTabTypeExpr
-@init { msgs.push("specifying describe table types"); }
-@after { msgs.pop(); }
-
-   : 
-   identifier (DOT (KW_ELEM_TYPE | KW_KEY_TYPE | KW_VALUE_TYPE | identifier))* identifier?
-   ;
-
-partTypeExpr
-@init { msgs.push("specifying table partitions"); }
-@after { msgs.pop(); }
-    :  tabTypeExpr partitionSpec? 
-    ;
-
-descPartTypeExpr
-@init { msgs.push("specifying describe table partitions"); }
-@after { msgs.pop(); }
-    :  descTabTypeExpr partitionSpec? 
-    ;
-
-descStatement
-@init { msgs.push("describe statement"); }
-@after { msgs.pop(); }
-    : (KW_DESCRIBE|KW_DESC) (descOptions=KW_FORMATTED|descOptions=KW_EXTENDED|descOptions=KW_PRETTY)? (parttype=descPartTypeExpr) 
-    | (KW_DESCRIBE|KW_DESC) KW_FUNCTION KW_EXTENDED? (name=descFuncNames) 
-    | (KW_DESCRIBE|KW_DESC) KW_DATABASE KW_EXTENDED? (dbName=identifier) 
-    ;
-
-analyzeStatement
-@init { msgs.push("analyze statement"); }
-@after { msgs.pop(); }
-    : KW_ANALYZE KW_TABLE (parttype=tableOrPartition) KW_COMPUTE KW_STATISTICS ((noscan=KW_NOSCAN) | (partialscan=KW_PARTIALSCAN) | (KW_FOR KW_COLUMNS statsColumnName=columnNameList))? 
-    ;
-
-showStatement
-@init { msgs.push("show statement"); }
-@after { msgs.pop(); }
-    : KW_SHOW (KW_DATABASES|KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? 
-    | KW_SHOW KW_TABLES ((KW_FROM|KW_IN) db_name=identifier)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?  
-    | KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tabname=tableName ((KW_FROM|KW_IN) db_name=identifier)? 
-    | KW_SHOW KW_FUNCTIONS showStmtIdentifier?  
-    | KW_SHOW KW_PARTITIONS identifier partitionSpec? 
-    | KW_SHOW KW_CREATE KW_TABLE tabName=tableName 
-    | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec?
-    | KW_SHOW KW_TBLPROPERTIES tblName=identifier (LPAREN prptyName=StringLiteral RPAREN)? 
-    | KW_SHOW KW_LOCKS (parttype=partTypeExpr)? (isExtended=KW_EXTENDED)? 
-    | KW_SHOW (showOptions=KW_FORMATTED)? (KW_INDEX|KW_INDEXES) KW_ON showStmtIdentifier ((KW_FROM|KW_IN) db_name=identifier)?
-    ;
-
-lockStatement
-@init { msgs.push("lock statement"); }
-@after { msgs.pop(); }
-    : KW_LOCK KW_TABLE tableName partitionSpec? lockMode 
-    ;
-
-lockMode
-@init { msgs.push("lock mode"); }
-@after { msgs.pop(); }
-    : KW_SHARED | KW_EXCLUSIVE
-    ;
-
-unlockStatement
-@init { msgs.push("unlock statement"); }
-@after { msgs.pop(); }
-    : KW_UNLOCK KW_TABLE tableName partitionSpec?  
-    ;
-
-createRoleStatement
-@init { msgs.push("create role"); }
-@after { msgs.pop(); }
-    : KW_CREATE KW_ROLE roleName=identifier
-    ;
-
-dropRoleStatement
-@init {msgs.push("drop role");}
-@after {msgs.pop();}
-    : KW_DROP KW_ROLE roleName=identifier
-    ;
-
-grantPrivileges
-@init {msgs.push("grant privileges");}
-@after {msgs.pop();}
-    : KW_GRANT privList=privilegeList
-      privilegeObject?
-      KW_TO principalSpecification
-      (KW_WITH withOption)?
-    ;
-
-revokePrivileges
-@init {msgs.push("revoke privileges");}
-@afer {msgs.pop();}
-    : KW_REVOKE privilegeList privilegeObject? KW_FROM principalSpecification
-    ;
-
-grantRole
-@init {msgs.push("grant role");}
-@after {msgs.pop();}
-    : KW_GRANT KW_ROLE identifier (COMMA identifier)* KW_TO principalSpecification
-    ;
-
-revokeRole
-@init {msgs.push("revoke role");}
-@after {msgs.pop();}
-    : KW_REVOKE KW_ROLE identifier (COMMA identifier)* KW_FROM principalSpecification
-    ;
-
-showRoleGrants
-@init {msgs.push("show role grants");}
-@after {msgs.pop();}
-    : KW_SHOW KW_ROLE KW_GRANT principalName
-    ;
-
-showGrants
-@init {msgs.push("show grants");}
-@after {msgs.pop();}
-    : KW_SHOW KW_GRANT principalName privilegeIncludeColObject?
-    ;
-
-privilegeIncludeColObject
-@init {msgs.push("privilege object including columns");}
-@after {msgs.pop();}
-    : KW_ON (table=KW_TABLE|KW_DATABASE) identifier (LPAREN cols=columnNameList RPAREN)? partitionSpec?
-    ;
-
-privilegeObject
-@init {msgs.push("privilege subject");}
-@after {msgs.pop();}
-    : KW_ON (table=KW_TABLE|KW_DATABASE) identifier partitionSpec?
-    ;
-
-privilegeList
-@init {msgs.push("grant privilege list");}
-@after {msgs.pop();}
-    : privlegeDef (COMMA privlegeDef)*
-    ;
-
-privlegeDef
-@init {msgs.push("grant privilege");}
-@after {msgs.pop();}
-    : privilegeType (LPAREN cols=columnNameList RPAREN)?
-    ;
-
-privilegeType
-@init {msgs.push("privilege type");}
-@after {msgs.pop();}
-    : KW_ALL 
-    | KW_ALTER 
-    | KW_UPDATE 
-    | KW_CREATE 
-    | KW_DROP 
-    | KW_INDEX 
-    | KW_LOCK 
-    | KW_SELECT 
-    | KW_SHOW_DATABASE 
-    ;
-
-principalSpecification
-@init { msgs.push("user/group/role name list"); }
-@after { msgs.pop(); }
-    : principalName (COMMA principalName)* 
-    ;
-
-principalName
-@init {msgs.push("user|group|role name");}
-@after {msgs.pop();}
-    : KW_USER identifier 
-    | KW_GROUP identifier 
-    | KW_ROLE identifier 
-    ;
-
-withOption
-@init {msgs.push("grant with option");}
-@after {msgs.pop();}
-    : KW_GRANT KW_OPTION
-    ;
-
-metastoreCheck
-@init { msgs.push("metastore check statement"); }
-@after { msgs.pop(); }
-    : KW_MSCK (repair=KW_REPAIR)? (KW_TABLE table=identifier partitionSpec? (COMMA partitionSpec)*)?
-    ;
-
-createFunctionStatement
-@init { msgs.push("create function statement"); }
-@after { msgs.pop(); }
-    : KW_CREATE KW_TEMPORARY KW_FUNCTION identifier KW_AS StringLiteral
-    ;
-
-dropFunctionStatement
-@init { msgs.push("drop temporary function statement"); }
-@after { msgs.pop(); }
-    : KW_DROP KW_TEMPORARY KW_FUNCTION ifExists? identifier
-    ;
-
-createViewStatement
-@init {
-    msgs.push("create view statement");
-}
-@after { msgs.pop(); }
-    : KW_CREATE (orReplace)? KW_VIEW (ifNotExists)? name=tableName
-        (LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition?
-        tablePropertiesPrefixed?
-        KW_AS
-        selectStatement
-    ;
-
-viewPartition
-@init { msgs.push("view partition specification"); }
-@after { msgs.pop(); }
-    : KW_PARTITIONED KW_ON LPAREN columnNameList RPAREN
-    ;
-
-dropViewStatement
-@init { msgs.push("drop view statement"); }
-@after { msgs.pop(); }
-    : KW_DROP KW_VIEW ifExists? viewName 
-    ;
-
-showStmtIdentifier
-@init { msgs.push("identifier for show statement"); }
-@after { msgs.pop(); }
-    : identifier
-    | StringLiteral
-    ;
-
-tableComment
-@init { msgs.push("table's comment"); }
-@after { msgs.pop(); }
-    :
-      KW_COMMENT comment=StringLiteral  
-    ;
-
-tablePartition
-@init { msgs.push("table partition specification"); }
-@after { msgs.pop(); }
-    : KW_PARTITIONED KW_BY LPAREN columnNameTypeList RPAREN
-    ;
-
-tableBuckets
-@init { msgs.push("table buckets specification"); }
-@after { msgs.pop(); }
-    :
-      KW_CLUSTERED KW_BY LPAREN bucketCols=columnNameList RPAREN (KW_SORTED KW_BY LPAREN sortCols=columnNameOrderList RPAREN)? KW_INTO num=Number KW_BUCKETS
-    ;
-
-tableSkewed
-@init { msgs.push("table skewed specification"); }
-@after { msgs.pop(); }
-    :
-     KW_SKEWED KW_BY LPAREN skewedCols=columnNameList RPAREN KW_ON LPAREN (skewedValues=skewedValueElement) RPAREN (storedAsDirs)?
-    ;
-
-rowFormat
-@init { msgs.push("serde specification"); }
-@after { msgs.pop(); }
-    : rowFormatSerde 
-    | rowFormatDelimited 
-    ;
-
-recordReader
-@init { msgs.push("record reader specification"); }
-@after { msgs.pop(); }
-    : KW_RECORDREADER StringLiteral 
-    ;
-
-recordWriter
-@init { msgs.push("record writer specification"); }
-@after { msgs.pop(); }
-    : KW_RECORDWRITER StringLiteral 
-    ;
-
-rowFormatSerde
-@init { msgs.push("serde format specification"); }
-@after { msgs.pop(); }
-    : KW_ROW KW_FORMAT KW_SERDE name=StringLiteral (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
-    ;
-
-rowFormatDelimited
-@init { msgs.push("serde properties specification"); }
-@after { msgs.pop(); }
-    :
-      KW_ROW KW_FORMAT KW_DELIMITED tableRowFormatFieldIdentifier? tableRowFormatCollItemsIdentifier? tableRowFormatMapKeysIdentifier? tableRowFormatLinesIdentifier?
-    ;
-
-tableRowFormat
-@init { msgs.push("table row format specification"); }
-@after { msgs.pop(); }
-    :
-      rowFormatDelimited
-    | rowFormatSerde
-    ;
-
-tablePropertiesPrefixed
-@init { msgs.push("table properties with prefix"); }
-@after { msgs.pop(); }
-    :
-        KW_TBLPROPERTIES tableProperties
-    ;
-
-tableProperties
-@init { msgs.push("table properties"); }
-@after { msgs.pop(); }
-    :
-      LPAREN tablePropertiesList RPAREN 
-    ;
-
-tablePropertiesList
-@init { msgs.push("table properties list"); }
-@after { msgs.pop(); }
-    :
-      keyValueProperty (COMMA keyValueProperty)* 
-    |
-      keyProperty (COMMA keyProperty)* 
-    ;
-
-keyValueProperty
-@init { msgs.push("specifying key/value property"); }
-@after { msgs.pop(); }
-    :
-      key=StringLiteral EQUAL value=StringLiteral 
-    ;
-
-keyProperty
-@init { msgs.push("specifying key property"); }
-@after { msgs.pop(); }
-    :
-      key=StringLiteral 
-    ;
-
-tableRowFormatFieldIdentifier
-@init { msgs.push("table row format's field separator"); }
-@after { msgs.pop(); }
-    :
-      KW_FIELDS KW_TERMINATED KW_BY fldIdnt=StringLiteral (KW_ESCAPED KW_BY fldEscape=StringLiteral)?
-    ;
-
-tableRowFormatCollItemsIdentifier
-@init { msgs.push("table row format's column separator"); }
-@after { msgs.pop(); }
-    :
-      KW_COLLECTION KW_ITEMS KW_TERMINATED KW_BY collIdnt=StringLiteral
-    ;
-
-tableRowFormatMapKeysIdentifier
-@init { msgs.push("table row format's map key separator"); }
-@after { msgs.pop(); }
-    :
-      KW_MAP KW_KEYS KW_TERMINATED KW_BY mapKeysIdnt=StringLiteral
-    ;
-
-tableRowFormatLinesIdentifier
-@init { msgs.push("table row format's line separator"); }
-@after { msgs.pop(); }
-    :
-      KW_LINES KW_TERMINATED KW_BY linesIdnt=StringLiteral
-    ;
-
-tableFileFormat
-@init { msgs.push("table file format specification"); }
-@after { msgs.pop(); }
-    :
-      KW_STORED KW_AS KW_SEQUENCEFILE  
-      | KW_STORED KW_AS KW_TEXTFILE  
-      | KW_STORED KW_AS KW_RCFILE  
-      | KW_STORED KW_AS KW_ORCFILE 
-      | KW_STORED KW_AS KW_INPUTFORMAT inFmt=StringLiteral KW_OUTPUTFORMAT outFmt=StringLiteral (KW_INPUTDRIVER inDriver=StringLiteral KW_OUTPUTDRIVER outDriver=StringLiteral)?
-      | KW_STORED KW_BY storageHandler=StringLiteral
-         (KW_WITH KW_SERDEPROPERTIES serdeprops=tableProperties)?
-      | KW_STORED KW_AS genericSpec=identifier
-    ;
-
-tableLocation
-@init { msgs.push("table location specification"); }
-@after { msgs.pop(); }
-    :
-      KW_LOCATION locn=StringLiteral 
-    ;
-
-columnNameTypeList
-@init { msgs.push("column name type list"); }
-@after { msgs.pop(); }
-    : columnNameType (COMMA columnNameType)* 
-    ;
-
-columnNameColonTypeList
-@init { msgs.push("column name type list"); }
-@after { msgs.pop(); }
-    : columnNameColonType (COMMA columnNameColonType)* 
-    ;
-
-columnNameList
-@init { msgs.push("column name list"); }
-@after { msgs.pop(); }
-    : columnName (COMMA columnName)* 
-    ;
-
-columnName
-@init { msgs.push("column name"); }
-@after { msgs.pop(); }
-    :
-      identifier
-    ;
-
-columnNameOrderList
-@init { msgs.push("column name order list"); }
-@after { msgs.pop(); }
-    : columnNameOrder (COMMA columnNameOrder)* 
-    ;
-
-skewedValueElement
-@init { msgs.push("skewed value element"); }
-@after { msgs.pop(); }
-    : 
-      skewedColumnValues
-     | skewedColumnValuePairList
-    ;
-
-skewedColumnValuePairList
-@init { msgs.push("column value pair list"); }
-@after { msgs.pop(); }
-    : skewedColumnValuePair (COMMA skewedColumnValuePair)* 
-    ;
-
-skewedColumnValuePair
-@init { msgs.push("column value pair"); }
-@after { msgs.pop(); }
-    : 
-      LPAREN colValues=skewedColumnValues RPAREN 
-    ;
-
-skewedColumnValues
-@init { msgs.push("column values"); }
-@after { msgs.pop(); }
-    : skewedColumnValue (COMMA skewedColumnValue)* 
-    ;
-
-skewedColumnValue
-@init { msgs.push("column value"); }
-@after { msgs.pop(); }
-    :
-      constant
-    ;
-
-skewedValueLocationElement
-@init { msgs.push("skewed value location element"); }
-@after { msgs.pop(); }
-    : 
-      skewedColumnValue
-     | skewedColumnValuePair
-    ;
-    
-columnNameOrder
-@init { msgs.push("column name order"); }
-@after { msgs.pop(); }
-    : identifier (asc=KW_ASC | desc=KW_DESC)?
-    ;
-
-columnNameCommentList
-@init { msgs.push("column name comment list"); }
-@after { msgs.pop(); }
-    : columnNameComment (COMMA columnNameComment)* 
-    ;
-
-columnNameComment
-@init { msgs.push("column name comment"); }
-@after { msgs.pop(); }
-    : colName=identifier (KW_COMMENT comment=StringLiteral)?
-    ;
-
-columnRefOrder
-@init { msgs.push("column order"); }
-@after { msgs.pop(); }
-    : expression (asc=KW_ASC | desc=KW_DESC)?
-    ;
-
-columnNameType
-@init { msgs.push("column specification"); }
-@after { msgs.pop(); }
-    : colName=identifier colType (KW_COMMENT comment=StringLiteral)?
-    ;
-
-columnNameColonType
-@init { msgs.push("column specification"); }
-@after { msgs.pop(); }
-    : colName=identifier COLON colType (KW_COMMENT comment=StringLiteral)?
-    ;
-
-colType
-@init { msgs.push("column type"); }
-@after { msgs.pop(); }
-    : type
-    ;
-
-colTypeList
-@init { msgs.push("column type list"); }
-@after { msgs.pop(); }
-    : colType (COMMA colType)* 
-    ;
-
-type
-    : primitiveType
-    | listType
-    | structType
-    | mapType
-    | unionType;
-
-primitiveType
-@init { msgs.push("primitive type specification"); }
-@after { msgs.pop(); }
-    : KW_TINYINT       
-    | KW_SMALLINT      
-    | KW_INT           
-    | KW_BIGINT        
-    | KW_BOOLEAN       
-    | KW_FLOAT         
-    | KW_DOUBLE        
-    | KW_DATE          
-    | KW_DATETIME      
-    | KW_TIMESTAMP     
-    | KW_STRING        
-    | KW_BINARY        
-    | KW_DECIMAL       
-    ;
-
-listType
-@init { msgs.push("list type"); }
-@after { msgs.pop(); }
-    : KW_ARRAY LESSTHAN type GREATERTHAN   
-    ;
-
-structType
-@init { msgs.push("struct type"); }
-@after { msgs.pop(); }
-    : KW_STRUCT LESSTHAN columnNameColonTypeList GREATERTHAN 
-    ;
-
-mapType
-@init { msgs.push("map type"); }
-@after { msgs.pop(); }
-    : KW_MAP LESSTHAN left=primitiveType COMMA right=type GREATERTHAN
-    ;
-
-unionType
-@init { msgs.push("uniontype type"); }
-@after { msgs.pop(); }
-    : KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN 
-    ;
-
-queryOperator
-@init { msgs.push("query operator"); }
-@after { msgs.pop(); }
-    : KW_UNION KW_ALL 
-    ;
-
-// select statement select ... from ... where ... group by ... order by ...
-queryStatementExpression
-    : 
-    queryStatement (queryOperator queryStatement)*
-    ;
-
-queryStatement
-    :
-    fromClause
-    ( b+=body )+ 
-    | regular_body
-    ;
-
-regular_body
-   :
-   insertClause
-   selectClause
-   fromClause
-   whereClause?
-   groupByClause?
-   havingClause?
-   orderByClause?
-   clusterByClause?
-   distributeByClause?
-   sortByClause?
-   window_clause?
-   limitClause? 
-   |
-   selectStatement
-   ;
-
-selectStatement
-   :
-   selectClause
-   fromClause
-   whereClause?
-   groupByClause?
-   havingClause?
-   orderByClause?
-   clusterByClause?
-   distributeByClause?
-   sortByClause?
-   window_clause?
-   limitClause? 
-   ;
-
-
-body
-   :
-   insertClause
-   selectClause
-   whereClause?
-   groupByClause?
-   havingClause?
-   orderByClause?
-   clusterByClause?
-   distributeByClause?
-   sortByClause?
-   window_clause?
-   limitClause? 
-   |
-   selectClause
-   whereClause?
-   groupByClause?
-   havingClause?
-   orderByClause?
-   clusterByClause?
-   distributeByClause?
-   sortByClause?
-   window_clause?
-   limitClause? 
-   ;
-
-insertClause
-@init { msgs.push("insert clause"); }
-@after { msgs.pop(); }
-   :
-     KW_INSERT KW_OVERWRITE destination ifNotExists? 
-   | KW_INSERT KW_INTO KW_TABLE tableOrPartition
-   ;
-
-destination
-@init { msgs.push("destination specification"); }
-@after { msgs.pop(); }
-   :
-     KW_LOCAL KW_DIRECTORY StringLiteral tableRowFormat? tableFileFormat? 
-   | KW_DIRECTORY StringLiteral 
-   | KW_TABLE tableOrPartition 
-   ;
-
-limitClause
-@init { msgs.push("limit clause"); }
-@after { msgs.pop(); }
-   :
-   KW_LIMIT num=Number 
-   ;
-
-    
\ No newline at end of file
diff --git a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLLexer.g4 b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLLexer.g4
index ea5a7a3..4bdbc3d 100644
--- a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLLexer.g4
+++ b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLLexer.g4
@@ -128,6 +128,10 @@
 CAST : C A S T;
 CREATE : C R E A T E;
 CROSS : C R O S S;
+CURRENT_DATE: C U R R E N T UNDERLINE D A T E;
+CURRENT_TIME: C U R R E N T UNDERLINE T I M E;
+CURRENT_TIMESTAMP: C U R R E N T UNDERLINE T I M E S T A M P;
+
 
 DESC : D E S C;
 DISTINCT : D I S T I N C T;
@@ -299,6 +303,7 @@
 TIMEZONE_MINUTE: T I M E Z O N E UNDERLINE M I N U T E;
 TRIM : T R I M;
 TO : T O;
+TRUNCATE : T R U N C A T E;
 
 UNBOUNDED : U N B O U N D E D;
 UNKNOWN : U N K N O W N;
diff --git a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLParser.g4 b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLParser.g4
index 5060dbc..beba248 100644
--- a/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLParser.g4
+++ b/tajo-core/src/main/antlr4/org/apache/tajo/engine/parser/SQLParser.g4
@@ -64,6 +64,7 @@
   | drop_table_statement
   | alter_tablespace_statement
   | alter_table_statement
+  | truncate_table_statement
   ;
 
 index_statement
@@ -94,6 +95,7 @@
     (param_clause)? (table_partitioning_clauses)? (AS query_expression)?
   | CREATE TABLE (if_not_exists)? table_name (USING file_type=identifier)?
     (param_clause)? (table_partitioning_clauses)? AS query_expression
+  | CREATE TABLE (if_not_exists)? table_name LIKE like_table_name=table_name
   ;
 
 table_elements
@@ -185,6 +187,10 @@
   : identifier
   ;
 
+truncate_table_statement
+  : TRUNCATE (TABLE)? table_name (COMMA table_name)*
+  ;
+
 /*
 ===============================================================================
   11.21 <data types>
@@ -694,6 +700,7 @@
 common_value_expression
   : numeric_value_expression
   | string_value_expression
+  | datetime_value_expression
   | NULL
   ;
 
@@ -755,8 +762,7 @@
   ;
 
 extract_source
-  : column_reference
-  | datetime_literal
+  : datetime_value_expression
   ;
 
 /*
@@ -807,6 +813,53 @@
 
 /*
 ===============================================================================
+  6.30 <datetime_value_expression>
+===============================================================================
+*/
+datetime_value_expression
+  : datetime_term
+  ;
+datetime_term
+  : datetime_factor
+  ;
+
+datetime_factor
+  : datetime_primary
+  ;
+
+datetime_primary
+  : value_expression_primary
+  | datetime_value_function
+  ;
+
+/*
+===============================================================================
+  6.31 <datetime_value_function>
+===============================================================================
+*/
+
+datetime_value_function
+  : current_date_value_function
+  | current_time_value_function
+  | current_timestamp_value_function
+  ;
+
+current_date_value_function
+  : CURRENT_DATE
+  | CURRENT_DATE LEFT_PAREN RIGHT_PAREN
+  ;
+
+current_time_value_function
+  : CURRENT_TIME
+  | CURRENT_TIME LEFT_PAREN RIGHT_PAREN
+  ;
+
+current_timestamp_value_function
+  : CURRENT_TIMESTAMP
+  ;
+
+/*
+===============================================================================
   6.34 <boolean value expression>
 ===============================================================================
 */
@@ -846,7 +899,7 @@
   ;
 
 boolean_predicand
-  : parenthesized_boolean_value_expression 
+  : parenthesized_boolean_value_expression
   | nonparenthesized_value_expression_primary
   ;
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/eval/AlgebraicUtil.java b/tajo-core/src/main/java/org/apache/tajo/engine/eval/AlgebraicUtil.java
index 1cb37db..d993b27 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/eval/AlgebraicUtil.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/eval/AlgebraicUtil.java
@@ -171,8 +171,13 @@
     public EvalNode visitFuncCall(Object context, GeneralFunctionEval evalNode, Stack<EvalNode> stack) {
       boolean constant = true;
 
-      for (EvalNode arg : evalNode.getArgs()) {
-        constant &= (arg.getType() == EvalType.CONST);
+      if ("sleep".equals(evalNode.funcDesc.getSignature())) {
+        constant = false;
+      } else {
+        for (EvalNode arg : evalNode.getArgs()) {
+          arg = visit(context, arg, stack);
+          constant &= (arg.getType() == EvalType.CONST);
+        }
       }
 
       if (constant) {
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/eval/CaseWhenEval.java b/tajo-core/src/main/java/org/apache/tajo/engine/eval/CaseWhenEval.java
index 6b330f5..cf1acdf 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/eval/CaseWhenEval.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/eval/CaseWhenEval.java
@@ -24,6 +24,7 @@
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.common.TajoDataTypes.DataType;
+import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.engine.json.CoreGsonHelper;
@@ -64,7 +65,18 @@
 
   @Override
   public DataType getValueType() {
-    return whens.get(0).getResult().getValueType();
+    // Find not null type
+    for (IfThenEval eachWhen: whens) {
+      if (eachWhen.getResult().getValueType().getType() != Type.NULL_TYPE) {
+        return eachWhen.getResult().getValueType();
+      }
+    }
+
+    if (elseResult != null) { // without else clause
+      return elseResult.getValueType();
+    }
+
+    return NullDatum.getDataType();
   }
 
   @Override
@@ -228,8 +240,8 @@
     @Override
     public Object clone() throws CloneNotSupportedException {
       IfThenEval ifThenEval = (IfThenEval) super.clone();
-      ifThenEval.condition = condition;
-      ifThenEval.result = result;
+      ifThenEval.condition = (EvalNode)condition.clone();
+      ifThenEval.result = (EvalNode)result.clone();
       return ifThenEval;
     }
   }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/eval/EvalTreeUtil.java b/tajo-core/src/main/java/org/apache/tajo/engine/eval/EvalTreeUtil.java
index 8982bd5..3921a7d 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/eval/EvalTreeUtil.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/eval/EvalTreeUtil.java
@@ -248,14 +248,17 @@
       }
 
       BinaryEval binaryEval = (BinaryEval) expr;
-      boolean isBothTermFields =
-          binaryEval.getLeftExpr().getType() == EvalType.FIELD &&
-          binaryEval.getRightExpr().getType() == EvalType.FIELD;
+      boolean isBothTermFields = isSingleColumn(binaryEval.getLeftExpr()) && isSingleColumn(binaryEval.getRightExpr());
+
       return joinComparator && isBothTermFields;
     } else {
       return false;
     }
   }
+
+  static boolean isSingleColumn(EvalNode evalNode) {
+    return EvalTreeUtil.findUniqueColumns(evalNode).size() == 1;
+  }
   
   public static class ChangeColumnRefVisitor implements EvalNodeVisitor {    
     private final String findColumn;
@@ -364,6 +367,12 @@
     return (Collection<T>) finder.evalNodes;
   }
 
+  public static <T extends EvalNode> Collection<T> findOuterJoinSensitiveEvals(EvalNode evalNode) {
+    OuterJoinSensitiveEvalFinder finder = new OuterJoinSensitiveEvalFinder();
+    finder.visitChild(null, evalNode, new Stack<EvalNode>());
+    return (Collection<T>) finder.evalNodes;
+  }
+
   public static class EvalFinder extends BasicEvalNodeVisitor<Object, Object> {
     private EvalType targetType;
     List<EvalNode> evalNodes = TUtil.newList();
@@ -384,6 +393,28 @@
     }
   }
 
+  public static class OuterJoinSensitiveEvalFinder extends BasicEvalNodeVisitor<Object, Object> {
+    private List<EvalNode> evalNodes = TUtil.newList();
+
+    @Override
+    public Object visitChild(Object context, EvalNode evalNode, Stack<EvalNode> stack) {
+      super.visitChild(context, evalNode, stack);
+
+      if (evalNode.type == EvalType.CASE) {
+        evalNodes.add(evalNode);
+      } else if (evalNode.type == EvalType.FUNCTION) {
+        FunctionEval functionEval = (FunctionEval)evalNode;
+        if ("coalesce".equals(functionEval.getName())) {
+          evalNodes.add(evalNode);
+        }
+      } else if (evalNode.type == EvalType.IS_NULL) {
+        evalNodes.add(evalNode);
+      }
+
+      return evalNode;
+    }
+  }
+
   public static boolean checkIfCanBeConstant(EvalNode evalNode) {
     return findUniqueColumns(evalNode).size() == 0 && findDistinctAggFunction(evalNode).size() == 0;
   }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/eval/FieldEval.java b/tajo-core/src/main/java/org/apache/tajo/engine/eval/FieldEval.java
index ea2b031..20af854 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/eval/FieldEval.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/eval/FieldEval.java
@@ -42,7 +42,12 @@
 	@Override
 	public Datum eval(Schema schema, Tuple tuple) {
 	  if (fieldId == -1) {
-	    fieldId = schema.getColumnId(column.getQualifiedName());
+      // TODO - column namespace should be improved to simplify name handling and resolving.
+      if (column.hasQualifier()) {
+        fieldId = schema.getColumnId(column.getQualifiedName());
+      } else {
+        fieldId = schema.getColumnIdByName(column.getSimpleName());
+      }
       if (fieldId == -1) {
         throw new IllegalStateException("No Such Column Reference: " + column + ", schema: " + schema);
       }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgDouble.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgDouble.java
index df5cc80..493c098 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgDouble.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgDouble.java
@@ -24,6 +24,7 @@
 import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.datum.ProtobufDatum;
 import org.apache.tajo.engine.function.AggFunction;
 import org.apache.tajo.engine.function.FunctionContext;
@@ -64,7 +65,11 @@
   @Override
   public void merge(FunctionContext ctx, Tuple part) {
     AvgContext avgCtx = (AvgContext) ctx;
-    ProtobufDatum datum = (ProtobufDatum) part.get(0);
+    Datum d = part.get(0);
+    if (d instanceof NullDatum) {
+      return;
+    }
+    ProtobufDatum datum = (ProtobufDatum) d;
     AvgDoubleProto proto = (AvgDoubleProto) datum.get();
     avgCtx.sum += proto.getSum();
     avgCtx.count += proto.getCount();
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgLong.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgLong.java
index 5bb5ff9..4794f4d 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgLong.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/AvgLong.java
@@ -22,10 +22,7 @@
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.common.TajoDataTypes.DataType;
 import org.apache.tajo.common.TajoDataTypes.Type;
-import org.apache.tajo.datum.Datum;
-import org.apache.tajo.datum.DatumFactory;
-import org.apache.tajo.datum.Float8Datum;
-import org.apache.tajo.datum.ProtobufDatum;
+import org.apache.tajo.datum.*;
 import org.apache.tajo.engine.function.AggFunction;
 import org.apache.tajo.engine.function.FunctionContext;
 import org.apache.tajo.engine.function.annotation.Description;
@@ -63,7 +60,11 @@
   @Override
   public void merge(FunctionContext ctx, Tuple part) {
     AvgContext avgCtx = (AvgContext) ctx;
-    ProtobufDatum datum = (ProtobufDatum) part.get(0);
+    Datum d = part.get(0);
+    if (d instanceof NullDatum) {
+      return;
+    }
+    ProtobufDatum datum = (ProtobufDatum) d;
     AvgLongProto proto = (AvgLongProto) datum.get();
     avgCtx.sum += proto.getSum();
     avgCtx.count += proto.getCount();
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceBoolean.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceBoolean.java
new file mode 100644
index 0000000..8c714c5
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceBoolean.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.builtin;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+
+@Description(
+    functionName = "coalesce",
+    description = "Returns the first of its arguments that is not null.",
+    detail = "Like a CASE expression, COALESCE only evaluates the arguments that are needed to determine the result; " +
+        "that is, arguments to the right of the first non-null argument are not evaluated",
+    example = "> SELECT coalesce(null, null, true);\n"
+        + "true",
+    returnType = Type.BOOLEAN,
+    paramTypes = {@ParamTypes(paramTypes = {Type.BOOLEAN, TajoDataTypes.Type.BOOLEAN_ARRAY})}
+)
+public class CoalesceBoolean extends Coalesce {
+  public CoalesceBoolean() {
+    super(new Column[] {
+        new Column("column", TajoDataTypes.Type.BOOLEAN),
+        new Column("params", TajoDataTypes.Type.BOOLEAN_ARRAY),
+    });
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceDate.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceDate.java
new file mode 100644
index 0000000..23f8f0c
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceDate.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.builtin;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+
+@Description(
+    functionName = "coalesce",
+    description = "Returns the first of its arguments that is not null.",
+    detail = "Like a CASE expression, COALESCE only evaluates the arguments that are needed to determine the result; " +
+        "that is, arguments to the right of the first non-null argument are not evaluated",
+    example = "> SELECT coalesce(null, null, date '2014-01-01');\n"
+        + "2014-01-01",
+    returnType = Type.DATE,
+    paramTypes = {@ParamTypes(paramTypes = {Type.DATE, Type.DATE_ARRAY})}
+)
+public class CoalesceDate extends Coalesce {
+  public CoalesceDate() {
+    super(new Column[] {
+        new Column("column", TajoDataTypes.Type.DATE),
+        new Column("params", TajoDataTypes.Type.DATE_ARRAY),
+    });
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTime.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTime.java
new file mode 100644
index 0000000..01bb6de
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTime.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.builtin;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+
+@Description(
+    functionName = "coalesce",
+    description = "Returns the first of its arguments that is not null.",
+    detail = "Like a CASE expression, COALESCE only evaluates the arguments that are needed to determine the result; " +
+        "that is, arguments to the right of the first non-null argument are not evaluated",
+    example = "> SELECT coalesce(null, null, time '12:10:00');\n"
+        + "12:10:00",
+    returnType = Type.TIME,
+    paramTypes = {@ParamTypes(paramTypes = {Type.TIME, Type.TIME_ARRAY})}
+)
+public class CoalesceTime extends Coalesce {
+  public CoalesceTime() {
+    super(new Column[] {
+        new Column("column", TajoDataTypes.Type.TIME),
+        new Column("params", TajoDataTypes.Type.TIME_ARRAY),
+    });
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTimestamp.java
new file mode 100644
index 0000000..2609717
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/CoalesceTimestamp.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.builtin;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+
+@Description(
+    functionName = "coalesce",
+    description = "Returns the first of its arguments that is not null.",
+    detail = "Like a CASE expression, COALESCE only evaluates the arguments that are needed to determine the result; " +
+        "that is, arguments to the right of the first non-null argument are not evaluated",
+    example = "> SELECT coalesce(null, null, timestamp '2014-01-01');\n"
+        + "2014-01-01 00:00:00",
+    returnType = Type.TIMESTAMP,
+    paramTypes = {@ParamTypes(paramTypes = {Type.TIMESTAMP, Type.TIMESTAMP_ARRAY})}
+)
+public class CoalesceTimestamp extends Coalesce {
+  public CoalesceTimestamp() {
+    super(new Column[] {
+        new Column("column", TajoDataTypes.Type.TIMESTAMP),
+        new Column("params", TajoDataTypes.Type.TIMESTAMP_ARRAY),
+    });
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/SumInt.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/SumInt.java
index fff3a23..ce98128 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/SumInt.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/SumInt.java
@@ -39,7 +39,7 @@
   functionName = "sum",
   description = "the sum of a set of numbers",
   example = "> SELECT sum(expr);",
-  returnType = Type.INT4,
+  returnType = Type.INT8,
   paramTypes = {@ParamTypes(paramTypes = {Type.INT4})}
 )
 public class SumInt extends AggFunction<Datum> {
@@ -63,20 +63,20 @@
 
   @Override
   public Datum getPartialResult(FunctionContext ctx) {
-    return DatumFactory.createInt4(((SumIntContext) ctx).sum);
+    return DatumFactory.createInt8(((SumIntContext) ctx).sum);
   }
 
   @Override
   public DataType getPartialResultType() {
-    return CatalogUtil.newSimpleDataType(Type.INT4);
+    return CatalogUtil.newSimpleDataType(Type.INT8);
   }
 
   @Override
   public Datum terminate(FunctionContext ctx) {
-    return DatumFactory.createInt4(((SumIntContext) ctx).sum);
+    return DatumFactory.createInt8(((SumIntContext) ctx).sum);
   }
 
   private class SumIntContext implements FunctionContext {
-    int sum;
+    long sum;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentDate.java
similarity index 60%
copy from tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
copy to tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentDate.java
index 1cf6870..2b3fcdf 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentDate.java
@@ -18,38 +18,40 @@
 
 package org.apache.tajo.engine.function.datetime;
 
-import org.apache.tajo.catalog.Column;
 import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.datum.DateDatum;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.datum.DatumFactory;
-import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.engine.function.GeneralFunction;
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
-
-import static org.apache.tajo.common.TajoDataTypes.Type.INT4;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 @Description(
-  functionName = "to_timestamp",
-  description = "Convert UNIX epoch to time stamp",
-  example = "> SELECT to_timestamp(1389071574);\n"
-        + "2014-01-07 14:12:54",
-  returnType = TajoDataTypes.Type.TIMESTAMP,
-  paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.INT4}),
-      @ParamTypes(paramTypes = {TajoDataTypes.Type.INT8})}
+    functionName = "current_date",
+    description = "Get current date. Result is DATE type.",
+    example = "> SELECT current_date();\n2014-04-18",
+    returnType = TajoDataTypes.Type.DATE,
+    paramTypes = {@ParamTypes(paramTypes = {})}
 )
-public class ToTimestamp extends GeneralFunction {
-  public ToTimestamp() {
-    super(new Column[] {new Column("timestamp", INT4)});
+public class CurrentDate extends GeneralFunction {
+  DateDatum datum;
+
+  public CurrentDate() {
+    super(NoArgs);
   }
 
   @Override
   public Datum eval(Tuple params) {
-    Datum value = params.get(0);
-    if (value instanceof NullDatum) {
-      return NullDatum.get();
+    if (datum == null) {
+      long julianTimestamp = DateTimeUtil.javaTimeToJulianTime(System.currentTimeMillis());
+      TimeMeta tm = new TimeMeta();
+      DateTimeUtil.toJulianTimeMeta(julianTimestamp, tm);
+      DateTimeUtil.toUserTimezone(tm);
+      datum = DatumFactory.createDate(tm.years, tm.monthOfYear, tm.dayOfMonth);
     }
-    return DatumFactory.createTimeStamp(value.asInt4());
+    return datum;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentTime.java
similarity index 62%
copy from tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
copy to tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentTime.java
index 1cf6870..1efdfa9 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/CurrentTime.java
@@ -18,38 +18,39 @@
 
 package org.apache.tajo.engine.function.datetime;
 
-import org.apache.tajo.catalog.Column;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.datum.DatumFactory;
-import org.apache.tajo.datum.NullDatum;
+import org.apache.tajo.datum.TimeDatum;
 import org.apache.tajo.engine.function.GeneralFunction;
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
-
-import static org.apache.tajo.common.TajoDataTypes.Type.INT4;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 @Description(
-  functionName = "to_timestamp",
-  description = "Convert UNIX epoch to time stamp",
-  example = "> SELECT to_timestamp(1389071574);\n"
-        + "2014-01-07 14:12:54",
-  returnType = TajoDataTypes.Type.TIMESTAMP,
-  paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.INT4}),
-      @ParamTypes(paramTypes = {TajoDataTypes.Type.INT8})}
+    functionName = "current_time",
+    description = "Get current time. Result is TIME type.",
+    example = "> SELECT current_time();\n12:30:40",
+    returnType = TajoDataTypes.Type.TIME,
+    paramTypes = {@ParamTypes(paramTypes = {})}
 )
-public class ToTimestamp extends GeneralFunction {
-  public ToTimestamp() {
-    super(new Column[] {new Column("timestamp", INT4)});
+public class CurrentTime extends GeneralFunction {
+  TimeDatum datum;
+
+  public CurrentTime() {
+    super(NoArgs);
   }
 
   @Override
   public Datum eval(Tuple params) {
-    Datum value = params.get(0);
-    if (value instanceof NullDatum) {
-      return NullDatum.get();
+    if (datum == null) {
+      long julianTimestamp = DateTimeUtil.javaTimeToJulianTime(System.currentTimeMillis());
+      TimeMeta tm = new TimeMeta();
+      DateTimeUtil.toJulianTimeMeta(julianTimestamp, tm);
+      datum = DatumFactory.createTime(DateTimeUtil.toTime(tm));
     }
-    return DatumFactory.createTimeStamp(value.asInt4());
+    return datum;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromDate.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromDate.java
index a010a7d..288fbe1 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromDate.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromDate.java
@@ -49,12 +49,12 @@
   @Override
   public Datum eval(Tuple params) {
     Datum target = params.get(0);
-    DateDatum date = null;
 
     if(target instanceof NullDatum || params.get(1) instanceof NullDatum) {
       return NullDatum.get();
     }
 
+    DateDatum date;
     if(params.get(1) instanceof DateDatum) {
       date = (DateDatum)(params.get(1));
     } else {
@@ -124,8 +124,7 @@
   private class DowExtractorFromDate implements DatePartExtractorFromDate {
     @Override
     public Datum extract(DateDatum date) {
-      Integer tdow = date.getDayOfWeek();
-      return DatumFactory.createFloat8((double) ((tdow == 7) ? 0 : tdow));
+      return DatumFactory.createFloat8((double) date.getDayOfWeek());
     }
   }
 
@@ -139,7 +138,7 @@
   private class ISODowExtractorFromDate implements DatePartExtractorFromDate {
     @Override
     public Datum extract(DateDatum date) {
-      return DatumFactory.createFloat8((double) date.getDayOfWeek());
+      return DatumFactory.createFloat8((double) date.getISODayOfWeek());
     }
   }
 
@@ -174,7 +173,7 @@
   private class WeekExtractorFromDate implements DatePartExtractorFromDate {
     @Override
     public Datum extract(DateDatum date) {
-      return DatumFactory.createFloat8((double) date.getWeekOfWeekyear());
+      return DatumFactory.createFloat8((double) date.getWeekOfYear());
     }
   }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTime.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTime.java
index 28e14fb..b3184e3 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTime.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTime.java
@@ -20,13 +20,20 @@
 
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.datum.*;
+import org.apache.tajo.datum.Datum;
+import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.datum.NullDatum;
+import org.apache.tajo.datum.TimeDatum;
 import org.apache.tajo.engine.function.GeneralFunction;
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.util.datetime.DateTimeConstants;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
-import static org.apache.tajo.common.TajoDataTypes.Type.*;
+import static org.apache.tajo.common.TajoDataTypes.Type.FLOAT8;
+import static org.apache.tajo.common.TajoDataTypes.Type.TEXT;
 
 @Description(
     functionName = "date_part",
@@ -85,55 +92,57 @@
       }
     }
 
-    return extractor.extract(time);
+    TimeMeta tm = time.toTimeMeta();
+    DateTimeUtil.toUserTimezone(tm);
+    return extractor.extract(tm);
   }
 
   private interface DatePartExtractorFromTime {
-    public Datum extract(TimeDatum time);
+    public Datum extract(TimeMeta tm);
   }
 
   private class HourExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
-      return DatumFactory.createFloat8((double) time.getHourOfDay());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.hours);
     }
   }
 
   private class MicrosecondsExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
-      return DatumFactory.createFloat8((double) (time.getSecondOfMinute() * 1000000 + time.getMillisOfSecond() * 1000));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (tm.secs * 1000000 + tm.fsecs));
     }
   }
 
   private class MillisecondsExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
-      return DatumFactory.createFloat8((double) (time.getSecondOfMinute() * 1000 + time.getMillisOfSecond()));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (tm.secs * 1000 + tm.fsecs / 1000.0));
     }
   }
 
   private class MinuteExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
-      return DatumFactory.createFloat8((double) time.getMinuteOfHour());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.minutes);
     }
   }
 
   private class SecondExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
-      if (time.getMillisOfSecond() != 0) {
-        return DatumFactory.createFloat8(time.getSecondOfMinute() + (((double) time.getMillisOfSecond()) / 1000));
+    public Datum extract(TimeMeta tm) {
+      if (tm.fsecs != 0) {
+        return DatumFactory.createFloat8(tm.secs + (((double) tm.fsecs) / (double)DateTimeConstants.USECS_PER_SEC));
       } else {
-        return DatumFactory.createFloat8((double) time.getSecondOfMinute());
+        return DatumFactory.createFloat8((double) tm.secs);
       }
     }
   }
 
   private class NullExtractorFromTime implements DatePartExtractorFromTime {
     @Override
-    public Datum extract(TimeDatum time) {
+    public Datum extract(TimeMeta tm) {
       return NullDatum.get();
     }
   }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTimestamp.java
index 3b46929..98900ef 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTimestamp.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DatePartFromTimestamp.java
@@ -25,6 +25,9 @@
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.util.datetime.DateTimeConstants;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import static org.apache.tajo.common.TajoDataTypes.Type.*;
 
@@ -49,7 +52,7 @@
   @Override
   public Datum eval(Tuple params) {
     Datum target = params.get(0);
-    TimestampDatum timestamp = null;
+    TimestampDatum timestamp;
 
     if(target instanceof NullDatum || params.get(1) instanceof NullDatum) {
       return NullDatum.get();
@@ -111,147 +114,150 @@
       }
     }
 
-    return extractor.extract(timestamp);
+    TimeMeta tm = timestamp.toTimeMeta();
+    DateTimeUtil.toUserTimezone(tm);
+
+    return extractor.extract(tm);
   }
 
   private interface DatePartExtractorFromTimestamp {
-    public Datum extract(TimestampDatum timestamp);
+    public Datum extract(TimeMeta tm);
   }
 
   private class CenturyExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getCenturyOfEra());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.getCenturyOfEra());
     }
   } 
 
   private class DayExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getDayOfMonth());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.dayOfMonth);
     }
   }
 
   private class DecadeExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) (timestamp.getYear() / 10));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (tm.years / 10));
     }
   }
 
   private class DowExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      Integer tdow = timestamp.getDayOfWeek();
+    public Datum extract(TimeMeta tm) {
+      Integer tdow = tm.getDayOfWeek();
       return DatumFactory.createFloat8((double) ((tdow == 7) ? 0 : tdow));
     }
   }
 
   private class DoyExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getDayOfYear());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double)tm.getDayOfYear());
     }
   }
 
   private class EpochExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getUnixTime());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double)DateTimeUtil.julianTimeToEpoch(DateTimeUtil.toJulianTimestamp(tm)));
     }
   }
 
   private class HourExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getHourOfDay());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.hours);
     }
   }
 
   private class ISODowExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getDayOfWeek());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.getISODayOfWeek());
     }
   }
 
   private class ISOYearExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getWeekyear());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.getWeekyear());
     }
   }
 
   private class MicrosecondsExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) (timestamp.getSecondOfMinute() * 1000000 + timestamp.getMillisOfSecond() * 1000));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (tm.secs * 1000000 + tm.fsecs));
     }
   }
 
   private class MillenniumExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) (((timestamp.getYear() - 1) / 1000) + 1));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (((tm.years - 1) / 1000) + 1));
     }
   }
 
   private class MillisecondsExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) (timestamp.getSecondOfMinute() * 1000 + timestamp.getMillisOfSecond()));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (tm.secs * 1000 + tm.fsecs / 1000.0));
     }
   }
 
   private class MinuteExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getMinuteOfHour());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.minutes);
     }
   }
 
   private class MonthExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getMonthOfYear());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.monthOfYear);
     }
   }
 
   private class QuarterExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) (((timestamp.getMonthOfYear() - 1) / 3) + 1));
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) (((tm.monthOfYear - 1) / 3) + 1));
     }
   }
 
   private class SecondExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      if (timestamp.getMillisOfSecond() != 0) {
-        return DatumFactory.createFloat8(timestamp.getSecondOfMinute() + (((double) timestamp.getMillisOfSecond()) / 1000));
+    public Datum extract(TimeMeta tm) {
+      if (tm.fsecs != 0) {
+        return DatumFactory.createFloat8(tm.secs + (((double) tm.fsecs) / (double) DateTimeConstants.USECS_PER_SEC));
       } else {
-        return DatumFactory.createFloat8((double) timestamp.getSecondOfMinute());
+        return DatumFactory.createFloat8((double) tm.secs);
       }
     }
   }
 
   private class WeekExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getWeekOfWeekyear());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.getWeekOfYear());
     }
   }
 
   private class YearExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
-      return DatumFactory.createFloat8((double) timestamp.getYear());
+    public Datum extract(TimeMeta tm) {
+      return DatumFactory.createFloat8((double) tm.years);
     }
   }
 
   private class NullExtractorFromTimestamp implements DatePartExtractorFromTimestamp {
     @Override
-    public Datum extract(TimestampDatum timestamp) {
+    public Datum extract(TimeMeta tm) {
       return NullDatum.get();
     }
   }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimeStamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimeStamp.java
deleted file mode 100644
index 6aaded0..0000000
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimeStamp.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.engine.function.datetime;
-
-import org.apache.tajo.catalog.Column;
-import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.datum.*;
-import org.apache.tajo.engine.function.GeneralFunction;
-import org.apache.tajo.engine.function.annotation.Description;
-import org.apache.tajo.engine.function.annotation.ParamTypes;
-import org.apache.tajo.storage.Tuple;
-import org.apache.tajo.util.TimeStampUtil;
-import org.joda.time.DateTime;
-
-import static org.apache.tajo.common.TajoDataTypes.Type.*;
-
-
-@Description(
-        functionName = "utc_usec_to",
-        description = "Extract field from time",
-        example = "> SELECT utc_usec_to('day', 1274259481071200);\n"
-                + "1274227200000000",
-        returnType = TajoDataTypes.Type.INT8,
-        paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.INT8}),
-                @ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.INT8, TajoDataTypes.Type.INT4})}
-)
-public class DateTimePartFromUnixTimeStamp extends GeneralFunction {
-
-    private DateTimePartExtractorFromUnixTime extractor = null;
-    private WeekPartExtractorFromUnixTime weekExtractor = null;
-
-    public DateTimePartFromUnixTimeStamp() {
-        super(new Column[]{
-                new Column("target", TEXT),
-                new Column("source", INT8),
-                new Column("dayOfWeek", INT4),
-
-        });
-    }
-
-    @Override
-    public Datum eval(Tuple params) {
-
-        Datum target = params.get(0);
-        DateTime dateTime;
-        Int4Datum dayOfWeek = null;
-
-        if (target instanceof NullDatum || params.get(1) instanceof NullDatum) {
-            return NullDatum.get();
-        }
-
-        if (params.get(1) instanceof Int8Datum) {
-            dateTime = TimeStampUtil.getUTCDateTime((Int8Datum) (params.get(1)));
-        } else {
-            return NullDatum.get();
-        }
-
-
-        if ( null == extractor || null == weekExtractor) {
-
-            String extractType = target.asChars().toLowerCase();
-
-            if (extractType.equals("day")) {
-                extractor = new DayExtractorFromTime();
-            } else if (extractType.equals("hour")) {
-                extractor = new HourExtractorFromTime();
-            } else if (extractType.equals("month")) {
-                extractor = new MonthExtractorFromTime();
-            } else if (extractType.equals("year")) {
-                extractor = new YearExtractorFromTime();
-            } else if (extractType.equals("week")) {
-                if (params.get(2) instanceof NullDatum) {
-                    return NullDatum.get();
-                }
-                dayOfWeek = (Int4Datum) params.get(2);
-                weekExtractor = new WeekExtractorFromTime();
-            }
-        }
-
-        return null != weekExtractor ? weekExtractor.extract(dateTime, dayOfWeek.asInt4()) : extractor.extract(dateTime);
-    }
-
-    private interface DateTimePartExtractorFromUnixTime {
-        public Datum extract(DateTime dateTime);
-    }
-
-    private interface WeekPartExtractorFromUnixTime {
-        public Datum extract(DateTime dateTime, int week);
-    }
-
-    private class DayExtractorFromTime implements DateTimePartExtractorFromUnixTime {
-        @Override
-        public Datum extract(DateTime dateTime) {
-            return DatumFactory.createInt8(TimeStampUtil.getDay(dateTime));
-        }
-    }
-
-    private class HourExtractorFromTime implements DateTimePartExtractorFromUnixTime {
-        @Override
-        public Datum extract(DateTime dateTime) {
-            return DatumFactory.createInt8(TimeStampUtil.getHour(dateTime));
-        }
-    }
-
-    private class MonthExtractorFromTime implements DateTimePartExtractorFromUnixTime {
-        @Override
-        public Datum extract(DateTime dateTime) {
-            return DatumFactory.createInt8(TimeStampUtil.getMonth(dateTime));
-        }
-    }
-
-    private class YearExtractorFromTime implements DateTimePartExtractorFromUnixTime {
-        @Override
-        public Datum extract(DateTime dateTime) {
-            return DatumFactory.createInt8(TimeStampUtil.getYear(dateTime));
-        }
-    }
-
-    private class WeekExtractorFromTime implements WeekPartExtractorFromUnixTime {
-        @Override
-        public Datum extract(DateTime dateTime , int week) {
-            return DatumFactory.createInt8(TimeStampUtil.getDayOfWeek(dateTime,week));
-        }
-    }
-}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimestamp.java
new file mode 100644
index 0000000..8705b06
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/DateTimePartFromUnixTimestamp.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.datetime;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.datum.*;
+import org.apache.tajo.engine.function.GeneralFunction;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.joda.time.DateTime;
+
+import static org.apache.tajo.common.TajoDataTypes.Type.*;
+
+
+@Description(
+    functionName = "utc_usec_to",
+    description = "Extract field from time",
+    example = "> SELECT utc_usec_to('day', 1274259481071200);\n"
+        + "1274227200000000",
+    returnType = TajoDataTypes.Type.INT8,
+    paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.INT8}),
+        @ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.INT8, TajoDataTypes.Type.INT4})}
+)
+public class DateTimePartFromUnixTimestamp extends GeneralFunction {
+
+  private DateTimePartExtractorFromUnixTime extractor = null;
+  private WeekPartExtractorFromUnixTime weekExtractor = null;
+
+  public DateTimePartFromUnixTimestamp() {
+    super(new Column[]{
+        new Column("target", TEXT),
+        new Column("source", INT8),
+        new Column("dayOfWeek", INT4),
+
+    });
+  }
+
+  @Override
+  public Datum eval(Tuple params) {
+
+    Datum target = params.get(0);
+    DateTime dateTime;
+    Int4Datum dayOfWeek = null;
+
+    if (target instanceof NullDatum || params.get(1) instanceof NullDatum) {
+      return NullDatum.get();
+    }
+
+    if (params.get(1) instanceof Int8Datum) {
+      dateTime = DateTimeUtil.getUTCDateTime((Int8Datum) (params.get(1)));
+    } else {
+      return NullDatum.get();
+    }
+
+
+    if ( null == extractor || null == weekExtractor) {
+
+      String extractType = target.asChars().toLowerCase();
+
+      if (extractType.equals("day")) {
+        extractor = new DayExtractorFromTime();
+      } else if (extractType.equals("hour")) {
+        extractor = new HourExtractorFromTime();
+      } else if (extractType.equals("month")) {
+        extractor = new MonthExtractorFromTime();
+      } else if (extractType.equals("year")) {
+        extractor = new YearExtractorFromTime();
+      } else if (extractType.equals("week")) {
+        if (params.get(2) instanceof NullDatum) {
+          return NullDatum.get();
+        }
+        dayOfWeek = (Int4Datum) params.get(2);
+        weekExtractor = new WeekExtractorFromTime();
+      }
+    }
+
+    return null != weekExtractor ? weekExtractor.extract(dateTime, dayOfWeek.asInt4()) : extractor.extract(dateTime);
+  }
+
+  private interface DateTimePartExtractorFromUnixTime {
+    public Datum extract(DateTime dateTime);
+  }
+
+  private interface WeekPartExtractorFromUnixTime {
+    public Datum extract(DateTime dateTime, int week);
+  }
+
+  private class DayExtractorFromTime implements DateTimePartExtractorFromUnixTime {
+    @Override
+    public Datum extract(DateTime dateTime) {
+      return DatumFactory.createInt8(DateTimeUtil.getDay(dateTime));
+    }
+  }
+
+  private class HourExtractorFromTime implements DateTimePartExtractorFromUnixTime {
+    @Override
+    public Datum extract(DateTime dateTime) {
+      return DatumFactory.createInt8(DateTimeUtil.getHour(dateTime));
+    }
+  }
+
+  private class MonthExtractorFromTime implements DateTimePartExtractorFromUnixTime {
+    @Override
+    public Datum extract(DateTime dateTime) {
+      return DatumFactory.createInt8(DateTimeUtil.getMonth(dateTime));
+    }
+  }
+
+  private class YearExtractorFromTime implements DateTimePartExtractorFromUnixTime {
+    @Override
+    public Datum extract(DateTime dateTime) {
+      return DatumFactory.createInt8(DateTimeUtil.getYear(dateTime));
+    }
+  }
+
+  private class WeekExtractorFromTime implements WeekPartExtractorFromUnixTime {
+    @Override
+    public Datum extract(DateTime dateTime , int week) {
+      return DatumFactory.createInt8(DateTimeUtil.getDayOfWeek(dateTime,week));
+    }
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/Today.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/NowTimestamp.java
similarity index 67%
rename from tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/Today.java
rename to tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/NowTimestamp.java
index 157e545..adc093c 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/builtin/Today.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/NowTimestamp.java
@@ -16,31 +16,36 @@
  * limitations under the License.
  */
 
-package org.apache.tajo.engine.function.builtin;
+package org.apache.tajo.engine.function.datetime;
 
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.datum.TimestampDatum;
 import org.apache.tajo.engine.function.GeneralFunction;
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
 
 @Description(
-  functionName = "today",
-  description = "get current time millis",
-  example = "> SELECT today();",
-  returnType = TajoDataTypes.Type.INT8,
-  paramTypes = {@ParamTypes(paramTypes = {})}
+    functionName = "now",
+    description = "Get current time. Result is TIMESTAMP type.",
+    example = "> SELECT now();\n2014-04-18 22:54:29.280",
+    returnType = TajoDataTypes.Type.TIMESTAMP,
+    paramTypes = {@ParamTypes(paramTypes = {})}
 )
-public class Today extends GeneralFunction {
+public class NowTimestamp extends GeneralFunction {
+  TimestampDatum datum;
 
-  public Today() {
+  public NowTimestamp() {
     super(NoArgs);
   }
 
   @Override
   public Datum eval(Tuple params) {
-    return DatumFactory.createInt8(System.currentTimeMillis());
+    if (datum == null) {
+      datum = DatumFactory.createTimestmpDatumWithJavaMillis(System.currentTimeMillis());
+    }
+    return datum;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToCharTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToCharTimestamp.java
index 2a74ff5..4ad76c4 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToCharTimestamp.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToCharTimestamp.java
@@ -29,39 +29,33 @@
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
-import static org.apache.tajo.common.TajoDataTypes.Type.INT8;
 import static org.apache.tajo.common.TajoDataTypes.Type.TEXT;
+import static org.apache.tajo.common.TajoDataTypes.Type.TIMESTAMP;
 
 @Description(
   functionName = "to_char",
-  description = "Convert time stamp to string",
-  example = "> SELECT to_char(1389071652, 'yyyy-MM');\n"
+  description = "Convert time stamp to string. Format should be a SQL standard format string.",
+  example = "> SELECT to_char(TIMESTAMP '2014-01-17 10:09:37', 'YYYY-MM');\n"
           + "2014-01",
   returnType = TajoDataTypes.Type.TEXT,
   paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.TIMESTAMP, TajoDataTypes.Type.TEXT})}
 )
 public class ToCharTimestamp extends GeneralFunction {
-  private boolean constantFormat;
-  private DateTimeFormatter formatter;
-
   public ToCharTimestamp() {
     super(new Column[] {
-        new Column("timestamp", INT8),
+        new Column("timestamp", TIMESTAMP),
         new Column("format", TEXT)
     });
   }
 
   @Override
   public void init(FunctionEval.ParamType[] paramTypes) {
-    if (paramTypes[1] == FunctionEval.ParamType.CONSTANT) {
-      constantFormat = true;
-    }
   }
 
-
   @Override
   public Datum eval(Tuple params) {
     if(params.isNull(0) || params.isNull(1)) {
@@ -69,11 +63,11 @@
     }
 
     TimestampDatum valueDatum = (TimestampDatum) params.get(0);
+    TimeMeta tm = valueDatum.toTimeMeta();
+    DateTimeUtil.toUserTimezone(tm);
+
     Datum pattern = params.get(1);
 
-    if (formatter == null || !constantFormat) {
-      formatter = DateTimeFormat.forPattern(pattern.asChars());
-    }
-    return DatumFactory.createText(valueDatum.toChars(formatter));
+    return DatumFactory.createText(DateTimeFormat.to_char(tm, pattern.asChars()));
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToDate.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToDate.java
index ba6a020..09a4395 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToDate.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToDate.java
@@ -27,24 +27,19 @@
 import org.apache.tajo.engine.function.annotation.Description;
 import org.apache.tajo.engine.function.annotation.ParamTypes;
 import org.apache.tajo.storage.Tuple;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 @Description(
     functionName = "to_date",
-    description = "Convert string to date. Format should be a java format string.",
-    example = "> SELECT to_date('2014-01-01', 'yyyy-MM-dd');\n"
+    description = "Convert string to date. Format should be a SQL standard format string.",
+    example = "> SELECT to_date('2014-01-01', 'YYYY-MM-DD');\n"
         + "2014-01-01",
     returnType = TajoDataTypes.Type.DATE,
     paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.TEXT})}
 )
 public class ToDate extends GeneralFunction {
-  private static Map<String, DateTimeFormatter> formattercCache =
-      new ConcurrentHashMap<String, DateTimeFormatter>();
-
   public ToDate() {
     super(new Column[]{
         new Column("string", TajoDataTypes.Type.TEXT),
@@ -60,12 +55,8 @@
     String value = params.get(0).asChars();
     String pattern = params.get(1).asChars();
 
-    DateTimeFormatter formatter = formattercCache.get(pattern);
-    if (formatter == null) {
-      formatter = DateTimeFormat.forPattern(pattern);
-      formattercCache.put(pattern, formatter);
-    }
+    TimeMeta tm = DateTimeFormat.parseDateTime(value, pattern);
 
-    return new DateDatum(formatter.parseDateTime(value).toLocalDate());
+    return new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth));
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampInt.java
similarity index 92%
rename from tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
rename to tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampInt.java
index 1cf6870..d14cfd6 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestamp.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampInt.java
@@ -39,8 +39,8 @@
   paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.INT4}),
       @ParamTypes(paramTypes = {TajoDataTypes.Type.INT8})}
 )
-public class ToTimestamp extends GeneralFunction {
-  public ToTimestamp() {
+public class ToTimestampInt extends GeneralFunction {
+  public ToTimestampInt() {
     super(new Column[] {new Column("timestamp", INT4)});
   }
 
@@ -50,6 +50,6 @@
     if (value instanceof NullDatum) {
       return NullDatum.get();
     }
-    return DatumFactory.createTimeStamp(value.asInt4());
+    return DatumFactory.createTimestmpDatumWithUnixTime(value.asInt4());
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampText.java b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampText.java
new file mode 100644
index 0000000..f42a171
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/function/datetime/ToTimestampText.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.function.datetime;
+
+import org.apache.tajo.catalog.Column;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.datum.*;
+import org.apache.tajo.engine.function.GeneralFunction;
+import org.apache.tajo.engine.function.annotation.Description;
+import org.apache.tajo.engine.function.annotation.ParamTypes;
+import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.util.datetime.DateTimeFormat;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
+
+import static org.apache.tajo.common.TajoDataTypes.Type.TEXT;
+
+@Description(
+    functionName = "to_timestamp",
+    description = "Convert string to time stamp",
+    detail = "Patterns for Date/Time Formatting: http://www.postgresql.org/docs/8.4/static/functions-formatting.html",
+    example = "> select to_timestamp('05 Dec 2000 15:12:02.020', 'DD Mon YYYY HH24:MI:SS.MS');\n"
+        + "2000-12-05 15:12:02.02",
+    returnType = TajoDataTypes.Type.TIMESTAMP,
+    paramTypes = {@ParamTypes(paramTypes = {TajoDataTypes.Type.TEXT, TajoDataTypes.Type.TEXT})}
+)
+public class ToTimestampText extends GeneralFunction {
+  public ToTimestampText() {
+    super(new Column[]{new Column("DateTimeText", TEXT), new Column("Pattern", TEXT)});
+  }
+
+  @Override
+  public Datum eval(Tuple params) {
+    if(params.isNull(0) || params.isNull(1)) {
+      return NullDatum.get();
+    }
+
+    TextDatum dateTimeTextDatum = (TextDatum) params.get(0);
+    TextDatum patternDatum = (TextDatum) params.get(1);
+
+    TimeMeta tm = DateTimeFormat.parseDateTime(dateTimeTextDatum.asChars(), patternDatum.asChars());
+    DateTimeUtil.toUTCTimezone(tm);
+
+    return new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm));
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/parser/HiveQLAnalyzer.java b/tajo-core/src/main/java/org/apache/tajo/engine/parser/HiveQLAnalyzer.java
deleted file mode 100644
index 3d7c7af..0000000
--- a/tajo-core/src/main/java/org/apache/tajo/engine/parser/HiveQLAnalyzer.java
+++ /dev/null
@@ -1,1551 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.engine.parser;
-
-import org.antlr.v4.runtime.ANTLRInputStream;
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.antlr.v4.runtime.tree.TerminalNodeImpl;
-import org.apache.commons.lang.math.NumberUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.tajo.algebra.*;
-import org.apache.tajo.common.TajoDataTypes;
-import org.apache.tajo.engine.parser.HiveQLParser.TableAllColumnsContext;
-
-import java.math.BigInteger;
-import java.util.*;
-
-public class HiveQLAnalyzer extends HiveQLParserBaseVisitor<Expr> {
-  private static final Log LOG = LogFactory.getLog(HiveQLAnalyzer.class.getName());
-  private HiveQLParser parser;
-
-  public Expr parse(String sql) {
-    HiveQLLexer lexer = new HiveQLLexer(new ANTLRNoCaseStringStream(sql));
-    CommonTokenStream tokens = new CommonTokenStream(lexer);
-    parser = new HiveQLParser(tokens);
-    parser.setBuildParseTree(true);
-
-    HiveQLParser.StatementContext context;
-    try {
-      context = parser.statement();
-    } catch (SQLParseError e) {
-      throw new SQLSyntaxError(e);
-    }
-
-    return visit(context);
-  }
-
-  @Override
-  public Expr visitStatement(HiveQLParser.StatementContext ctx) {
-    return visitExecStatement(ctx.execStatement());
-  }
-
-  @Override
-  public Expr visitQueryStatement(HiveQLParser.QueryStatementContext ctx) {
-    Expr current = null;
-
-    if (ctx.body != null) {
-      current = visitBody(ctx.body(0));
-    }
-
-    if (ctx.regular_body() != null) {
-      current = visitRegular_body(ctx.regular_body());
-    }
-
-    return current;
-  }
-
-  @Override
-  public Expr visitBody(HiveQLParser.BodyContext ctx) {
-
-    Expr current = null;
-    Insert insert = null;
-
-    Projection select = null;
-
-    if (ctx.insertClause() != null) {
-      insert = visitInsertClause(ctx.insertClause());
-    }
-
-    if (ctx.selectClause() != null) {
-      select = (Projection) visitSelectClause(ctx.selectClause());
-      if (ctx.selectClause().KW_DISTINCT() != null) {
-        select.setDistinct();
-      }
-
-    }
-
-    for (int i = 0; i < ctx.getParent().getChildCount(); i++) {
-      if (ctx.getParent().getChild(i) instanceof HiveQLParser.FromClauseContext) {
-        HiveQLParser.FromClauseContext fromClauseContext = (HiveQLParser.FromClauseContext) ctx.getParent().getChild(i);
-        Expr from = visitFromClause(fromClauseContext);
-        current = from;
-      }
-    }
-
-    if (ctx.whereClause() != null) {
-      Selection where = new Selection(visitWhereClause(ctx.whereClause()));
-      where.setChild(current);
-      current = where;
-    }
-
-    if (ctx.groupByClause() != null) {
-      Aggregation aggregation = visitGroupByClause(ctx.groupByClause());
-      aggregation.setChild(current);
-      current = aggregation;
-
-      if (ctx.havingClause() != null) {
-        Expr havingCondition = visitHavingClause(ctx.havingClause());
-        Having having = new Having(havingCondition);
-        having.setChild(current);
-        current = having;
-      }
-    }
-
-    if (ctx.orderByClause() != null) {
-      Sort sort = visitOrderByClause(ctx.orderByClause());
-      sort.setChild(current);
-      current = sort;
-    }
-
-    if (ctx.clusterByClause() != null) {
-      visitClusterByClause(ctx.clusterByClause());
-    }
-
-    if (ctx.distributeByClause() != null) {
-      visitDistributeByClause(ctx.distributeByClause());
-    }
-
-    if (ctx.sortByClause() != null) {
-      Sort sort = visitSortByClause(ctx.sortByClause());
-      sort.setChild(current);
-      current = sort;
-    }
-
-    if (ctx.window_clause() != null) {
-      Expr window = visitWindow_clause(ctx.window_clause());
-    }
-
-    if (ctx.limitClause() != null) {
-      Limit limit = visitLimitClause(ctx.limitClause());
-      limit.setChild(current);
-      current = limit;
-    }
-
-    Projection projection = new Projection();
-    projection.setNamedExprs(select.getNamedExprs());
-
-    if (current != null)
-      projection.setChild(current);
-
-    if (select.isDistinct())
-      projection.setDistinct();
-
-
-    if (insert != null) {
-      insert.setSubQuery(projection);
-      current = insert;
-    } else {
-      current = projection;
-    }
-
-    return current;
-  }
-
-  @Override
-  public Expr visitRegular_body(HiveQLParser.Regular_bodyContext ctx) {
-    Expr current = null;
-    Insert insert = null;
-
-    if (ctx.selectStatement() != null) {
-      current = visitSelectStatement(ctx.selectStatement());
-    } else {
-      Projection select = null;
-
-      if (ctx.insertClause() != null) {
-        insert = visitInsertClause(ctx.insertClause());
-      }
-
-      if (ctx.selectClause() != null) {
-        select = (Projection) visitSelectClause(ctx.selectClause());
-        if (ctx.selectClause().KW_DISTINCT() != null) {
-          select.setDistinct();
-        }
-
-      }
-
-      if (ctx.fromClause() != null) {
-        Expr from = visitFromClause(ctx.fromClause());
-        current = from;
-      }
-
-      if (ctx.whereClause() != null) {
-        Selection where = new Selection(visitWhereClause(ctx.whereClause()));
-        where.setChild(current);
-        current = where;
-      }
-
-      if (ctx.groupByClause() != null) {
-        Aggregation aggregation = visitGroupByClause(ctx.groupByClause());
-        aggregation.setChild(current);
-        current = aggregation;
-
-        if (ctx.havingClause() != null) {
-          Expr havingCondition = visitHavingClause(ctx.havingClause());
-          Having having = new Having(havingCondition);
-          having.setChild(current);
-          current = having;
-        }
-      }
-
-      if (ctx.orderByClause() != null) {
-        Sort sort = visitOrderByClause(ctx.orderByClause());
-        sort.setChild(current);
-        current = sort;
-      }
-
-      if (ctx.clusterByClause() != null) {
-        visitClusterByClause(ctx.clusterByClause());
-      }
-
-      if (ctx.distributeByClause() != null) {
-        visitDistributeByClause(ctx.distributeByClause());
-      }
-
-      if (ctx.sortByClause() != null) {
-        Sort sort = visitSortByClause(ctx.sortByClause());
-        sort.setChild(current);
-        current = sort;
-      }
-
-      if (ctx.window_clause() != null) {
-        Expr window = visitWindow_clause(ctx.window_clause());
-      }
-
-      if (ctx.limitClause() != null) {
-        Limit limit = visitLimitClause(ctx.limitClause());
-        limit.setChild(current);
-        current = limit;
-      }
-
-      Projection projection = new Projection();
-      projection.setNamedExprs(select.getNamedExprs());
-
-      if (current != null)
-        projection.setChild(current);
-
-      if (select.isDistinct())
-        projection.setDistinct();
-
-      if (insert != null) {
-        insert.setSubQuery(projection);
-        current = insert;
-      } else {
-        current = projection;
-      }
-
-
-    }
-    return current;
-  }
-
-  /**
-   * This method implemented for parsing union all clause.
-   *
-   * @param ctx
-   * @return
-   */
-  @Override
-  public Expr visitQueryStatementExpression(HiveQLParser.QueryStatementExpressionContext ctx) {
-    Expr left = null, right = null, current = null;
-    if (ctx.queryStatement() != null) {
-      if (ctx.queryStatement().size() == 1)
-        return visitQueryStatement(ctx.queryStatement(0));
-
-      for (int i = 0; i < ctx.queryStatement().size(); i++) {
-        if (i == 0)
-          current = visitQueryStatement(ctx.queryStatement(i));
-        else
-          left = current;
-
-        if (i > 0) {
-          right = visitQueryStatement(ctx.queryStatement(i));
-          current = new SetOperation(OpType.Union, left, right, false);
-        }
-      }
-    }
-    return current;
-  }
-
-  @Override
-  public Expr visitSelectStatement(HiveQLParser.SelectStatementContext ctx) {
-    Expr current = null;
-
-    Projection select = (Projection) visitSelectClause(ctx.selectClause());
-
-    if (ctx.selectClause().KW_DISTINCT() != null) {
-      select.setDistinct();
-    }
-
-    Expr from = visitFromClause(ctx.fromClause());
-    current = from;
-
-    if (ctx.whereClause() != null) {
-      Selection where = new Selection(visitWhereClause(ctx.whereClause()));
-      where.setChild(current);
-      current = where;
-    }
-
-    if (ctx.groupByClause() != null) {
-      Aggregation aggregation = visitGroupByClause(ctx.groupByClause());
-      aggregation.setChild(current);
-      current = aggregation;
-
-      if (ctx.havingClause() != null) {
-        Expr havingCondition = visitHavingClause(ctx.havingClause());
-        Having having = new Having(havingCondition);
-        having.setChild(current);
-        current = having;
-      }
-    }
-
-    if (ctx.orderByClause() != null) {
-      Sort sort = visitOrderByClause(ctx.orderByClause());
-      sort.setChild(current);
-      current = sort;
-    }
-
-    if (ctx.clusterByClause() != null) {
-      visitClusterByClause(ctx.clusterByClause());
-    }
-
-    if (ctx.distributeByClause() != null) {
-      visitDistributeByClause(ctx.distributeByClause());
-    }
-
-    if (ctx.sortByClause() != null) {
-      Sort sort = visitSortByClause(ctx.sortByClause());
-      sort.setChild(current);
-      current = sort;
-    }
-
-    if (ctx.window_clause() != null) {
-      Expr window = visitWindow_clause(ctx.window_clause());
-    }
-
-    if (ctx.limitClause() != null) {
-      Limit limit = visitLimitClause(ctx.limitClause());
-      limit.setChild(current);
-      current = limit;
-    }
-
-    Projection projection = new Projection();
-    projection.setNamedExprs(select.getNamedExprs());
-
-    if (current != null)
-      projection.setChild(current);
-
-    if (select.isDistinct())
-      projection.setDistinct();
-
-    current = projection;
-
-    return current;
-  }
-
-  @Override
-  public Expr visitFromClause(HiveQLParser.FromClauseContext ctx) {
-    return visitJoinSource(ctx.joinSource());
-  }
-
-  @Override
-  public Expr visitJoinSource(HiveQLParser.JoinSourceContext ctx) {
-    Expr[] relations = null;
-    RelationList relationList = null;
-
-    if (ctx.fromSource() != null) {
-      int fromCount = ctx.fromSource().size();
-      int uniqueJoinCount = ctx.uniqueJoinSource().size();
-
-      relations = new Expr[1];
-
-      Join current = null, parent = null;
-      JoinType type = null;
-      Expr left = null, right = null, condition = null;
-
-
-      if (fromCount == 1) {
-        relations[0] = visitFromSource(ctx.fromSource(0));
-      } else {
-        left = visitFromSource((HiveQLParser.FromSourceContext) ctx.getChild(0));
-
-        for (int i = 1; i < ctx.getChildCount(); i++) {
-          type = null;
-          right = null;
-          condition = null;
-
-          if (ctx.getChild(i) instanceof HiveQLParser.JoinTokenContext) {
-            type = getJoinType((HiveQLParser.JoinTokenContext) ctx.getChild(i));
-            if (i > 1)
-              left = parent;
-
-            if (i + 1 < ctx.getChildCount() && ctx.getChild(i + 1) instanceof HiveQLParser.FromSourceContext) {
-              right = visitFromSource((HiveQLParser.FromSourceContext) ctx.getChild(i + 1));
-            }
-
-            if (i + 3 < ctx.getChildCount() && ctx.getChild(i + 3) instanceof HiveQLParser.ExpressionContext) {
-              condition = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 3));
-            }
-
-            if (type != null) {
-              current = new Join(type);
-              current.setLeft(left);
-              current.setRight(right);
-
-              if (condition != null)
-                current.setQual(condition);
-
-              parent = current;
-            }
-          }
-
-        }
-        relations[0] = current;
-      }
-
-      //TODO: implement unique join.
-      relationList = new RelationList(relations);
-    }
-
-    return relationList;
-  }
-
-  public JoinType getJoinType(HiveQLParser.JoinTokenContext context) {
-    JoinType type = JoinType.INNER;
-
-    if (context.KW_INNER() != null) {
-      type = JoinType.INNER;
-    }
-
-    if (context.KW_LEFT() != null && context.KW_OUTER() != null) {
-      type = JoinType.LEFT_OUTER;
-    }
-
-    if (context.KW_RIGHT() != null && context.KW_OUTER() != null) {
-      type = JoinType.RIGHT_OUTER;
-    }
-
-    if (context.KW_CROSS() != null) {
-      type = JoinType.CROSS;
-    }
-
-    if (context.KW_FULL() != null) {
-      type = JoinType.FULL_OUTER;
-    }
-
-    if (context.KW_SEMI() != null) {
-      type = null;
-    }
-    return type;
-  }
-
-  @Override
-  public Expr visitFromSource(HiveQLParser.FromSourceContext ctx) {
-    Expr current = null;
-
-    if (ctx.Identifier() != null && ctx.LPAREN() != null) {
-      current = new LiteralValue(ctx.Identifier().getText(), LiteralValue.LiteralType.String);
-    }
-
-    if (ctx.tableSource() != null) {
-      current = visitTableSource(ctx.tableSource());
-    }
-
-    if (ctx.subQuerySource() != null) {
-      current = visitSubQuerySource(ctx.subQuerySource());
-
-      String tableAlias = "";
-      for (int i = 0; i < ctx.subQuerySource().getChildCount(); i++) {
-        if (ctx.subQuerySource().getChild(i) instanceof HiveQLParser.IdentifierContext) {
-          tableAlias = (ctx.subQuerySource().getChild(i)).getText();
-        }
-      }
-
-      TablePrimarySubQuery subQuery = new TablePrimarySubQuery(tableAlias, current);
-      current = subQuery;
-    }
-    // TODO: implement lateralView
-
-    return current;
-  }
-
-  @Override
-  public Expr visitSubQuerySource(HiveQLParser.SubQuerySourceContext ctx) {
-    Expr current = visitQueryStatementExpression(ctx.queryStatementExpression());
-    return current;
-  }
-
-  @Override
-  public Expr visitTableSource(HiveQLParser.TableSourceContext ctx) {
-    String tableName = "", alias = "";
-
-    if (ctx.tableName() != null)
-      tableName = ctx.tableName().getText();
-
-    if (ctx.alias != null) {
-      alias = ctx.alias.getText();
-      for (String token : HiveQLParser.tokenNames) {
-        if (token.replaceAll("'", "").equalsIgnoreCase(alias))
-          alias = "";
-      }
-    }
-
-    Relation relation = new Relation(tableName);
-    if (!alias.equals(""))
-      relation.setAlias(alias);
-
-    return relation;
-  }
-
-  @Override
-  public Expr visitSelectList(HiveQLParser.SelectListContext ctx) {
-    Expr current = null;
-    Projection projection = new Projection();
-    NamedExpr[] targets = new NamedExpr[ctx.selectItem().size()];
-    for (int i = 0; i < targets.length; i++) {
-      targets[i] = visitSelectItem(ctx.selectItem(i));
-    }
-
-    projection.setNamedExprs(targets);
-    current = projection;
-    return current;
-  }
-
-  @Override
-  public NamedExpr visitSelectItem(HiveQLParser.SelectItemContext ctx) {
-    NamedExpr target = null;
-
-    if (ctx.selectExpression() != null) {
-      target = new NamedExpr(visitSelectExpression(ctx.selectExpression()));
-    } else if (ctx.window_specification() != null) {
-      // TODO: if there is a window specification clause, we should handle it properly.
-    }
-
-    if (ctx.identifier().size() > 0 && target != null) {
-      target.setAlias(ctx.identifier(0).getText());
-    }
-    return target;
-  }
-
-  @Override
-  public Expr visitSelectExpression(HiveQLParser.SelectExpressionContext ctx) {
-    Expr current = null;
-
-    if (ctx.tableAllColumns() != null) {
-      current = visitTableAllColumns(ctx.tableAllColumns());
-    } else {
-      if (ctx.expression() != null) {
-        current = visitExpression(ctx.expression());
-      }
-    }
-
-    return current;
-  }
-
-  @Override
-  public Expr visitTableAllColumns(TableAllColumnsContext ctx) {
-    QualifiedAsteriskExpr target = new QualifiedAsteriskExpr();
-    if (ctx.tableName() != null) {
-      target.setQualifier(ctx.tableName().getText());
-    }
-
-    return target;
-  }
-
-  @Override
-  public Expr visitExpression(HiveQLParser.ExpressionContext ctx) {
-    Expr current = visitPrecedenceOrExpression(ctx.precedenceOrExpression());
-    return current;
-  }
-
-  @Override
-  public Expr visitPrecedenceOrExpression(HiveQLParser.PrecedenceOrExpressionContext ctx) {
-    Expr current = null, left = null, right = null;
-
-    for (int i = 0; i < ctx.precedenceAndExpression().size(); i++) {
-      if (i == 0) {
-        left = visitPrecedenceAndExpression(ctx.precedenceAndExpression(i));
-        current = left;
-      } else {
-        left = current;
-        right = visitPrecedenceAndExpression(ctx.precedenceAndExpression(i));
-        current = new BinaryOperator(OpType.Or, left, right);
-      }
-
-    }
-    return current;
-  }
-
-  /**
-   * This method parse AND expressions at WHERE clause.
-   * And this convert 'x BETWEEN y AND z' expression into 'x >= y AND x <= z' expression
-   * because Tajo doesn't provide 'BETWEEN' expression.
-   *
-   * @param ctx
-   * @return
-   */
-  @Override
-  public Expr visitPrecedenceAndExpression(HiveQLParser.PrecedenceAndExpressionContext ctx) {
-    Expr current = null, left = null, right = null;
-
-    for (int i = 0; i < ctx.precedenceNotExpression().size(); i++) {
-      Expr min = null, max = null;
-
-      if (ctx.precedenceNotExpression(i).precedenceEqualExpression() != null) {
-        HiveQLParser.PrecedenceEqualExpressionContext expressionContext = ctx.precedenceNotExpression(i)
-            .precedenceEqualExpression();
-        if (expressionContext.KW_BETWEEN() != null) {
-
-          if (expressionContext.min != null) {
-            min = visitPrecedenceBitwiseOrExpression(expressionContext.min);
-          }
-
-          if (expressionContext.max != null) {
-            max = visitPrecedenceBitwiseOrExpression(expressionContext.max);
-          }
-        }
-      }
-
-      if (min != null && max != null) {
-        left = visitPrecedenceNotExpression(ctx.precedenceNotExpression(i));
-        if (left != null) {
-          if (i == 0) {
-            BinaryOperator minOperator = new BinaryOperator(OpType.GreaterThanOrEquals, left, min);
-            BinaryOperator maxOperator = new BinaryOperator(OpType.LessThanOrEquals, left, max);
-            current = new BinaryOperator(OpType.And, minOperator, maxOperator);
-          } else {
-            BinaryOperator minOperator = new BinaryOperator(OpType.GreaterThanOrEquals, left, min);
-            current = new BinaryOperator(OpType.And, current, minOperator);
-
-            BinaryOperator maxOperator = new BinaryOperator(OpType.LessThanOrEquals, left, max);
-            current = new BinaryOperator(OpType.And, current, maxOperator);
-          }
-        }
-      } else {
-        if (i == 0) {
-          left = visitPrecedenceNotExpression(ctx.precedenceNotExpression(i));
-          current = left;
-        } else {
-          left = current;
-          right = visitPrecedenceNotExpression(ctx.precedenceNotExpression(i));
-          current = new BinaryOperator(OpType.And, left, right);
-        }
-      }
-    }
-    return current;
-  }
-
-  @Override
-  public Expr visitPrecedenceNotExpression(HiveQLParser.PrecedenceNotExpressionContext ctx) {
-    HiveQLParser.PrecedenceEqualExpressionContext expressionContext = ctx.precedenceEqualExpression();
-    Expr current = visitPrecedenceEqualExpression(expressionContext);
-    return current;
-  }
-
-  /**
-   * This method parse operators for equals expressions as follows:
-   * =, <>, !=, >=, >, <=, <, IN, NOT IN, LIKE, REGEXP, RLIKE
-   * <p/>
-   * In this case, this make RuntimeException>
-   *
-   * @param ctx
-   * @return
-   */
-  @Override
-  public Expr visitPrecedenceEqualExpression(HiveQLParser.PrecedenceEqualExpressionContext ctx) {
-    Expr current = null, left = null, right = null, min = null, max = null;
-    OpType type = null;
-    boolean isNot = false, isIn = false;
-    for (int i = 0; i < ctx.getChildCount(); i++) {
-      if (ctx.getChild(i) instanceof HiveQLParser.PrecedenceBitwiseOrExpressionContext) {
-        if (i == 0) {
-          left = visitPrecedenceBitwiseOrExpression((HiveQLParser.PrecedenceBitwiseOrExpressionContext) ctx.getChild(i));
-        } else {
-          right = visitPrecedenceBitwiseOrExpression((HiveQLParser.PrecedenceBitwiseOrExpressionContext) ctx.getChild(i));
-        }
-      } else if (ctx.getChild(i) instanceof HiveQLParser.ExpressionsContext) {
-        right = visitExpressions((HiveQLParser.ExpressionsContext) ctx.getChild(i));
-      } else if (ctx.getChild(i) instanceof TerminalNodeImpl) {
-        int symbolType = ((TerminalNodeImpl) ctx.getChild(i)).getSymbol().getType();
-        switch (symbolType) {
-          case HiveQLLexer.KW_NOT:
-            isNot = true;
-            break;
-          case HiveQLLexer.KW_IN:
-            isIn = true;
-            break;
-          default:
-            break;
-        }
-      } else if (ctx.getChild(i) instanceof HiveQLParser.PrecedenceEqualOperatorContext
-          || ctx.getChild(i) instanceof HiveQLParser.PrecedenceEqualNegatableOperatorContext) {
-        String keyword = ctx.getChild(i).getText().toUpperCase();
-
-        if (keyword.equals(">")) {
-          type = OpType.GreaterThan;
-        } else if (keyword.equals("<=>")) {
-          throw new RuntimeException("Unexpected operator : <=>");
-        } else if (keyword.equals("=")) {
-          type = OpType.Equals;
-        } else if (keyword.equals("<=")) {
-          type = OpType.LessThanOrEquals;
-        } else if (keyword.equals("<")) {
-          type = OpType.LessThan;
-        } else if (keyword.equals(">=")) {
-          type = OpType.GreaterThanOrEquals;
-        } else if (keyword.equals("<>")) {
-          type = OpType.NotEquals;
-        } else if (keyword.equals("!=")) {
-          type = OpType.NotEquals;
-        } else if (keyword.equals("REGEXP")) {
-          type = OpType.Regexp;
-        } else if (keyword.equals("RLIKE")) {
-          type = OpType.Regexp;
-        } else if (keyword.equals("LIKE")) {
-          type = OpType.LikePredicate;
-        }
-      }
-    }
-
-    if (type != null && right != null) {
-      if (type.equals(OpType.LikePredicate)) {
-        PatternMatchPredicate like = new PatternMatchPredicate(OpType.LikePredicate,
-            isNot, left, right);
-        current = like;
-      } else if (type.equals(OpType.Regexp)) {
-        PatternMatchPredicate regex = new PatternMatchPredicate(OpType.Regexp, isNot, left, right);
-        current = regex;
-      } else {
-        BinaryOperator binaryOperator = new BinaryOperator(type, left, right);
-        current = binaryOperator;
-      }
-    } else if (isIn) {
-      InPredicate inPredicate = new InPredicate(left, right, isNot);
-      current = inPredicate;
-    } else {
-      current = left;
-    }
-
-    return current;
-  }
-
-  @Override
-  public ValueListExpr visitExpressions(HiveQLParser.ExpressionsContext ctx) {
-    int size = ctx.expression().size();
-    Expr[] exprs = new Expr[size];
-    for (int i = 0; i < size; i++) {
-      exprs[i] = visitExpression(ctx.expression(i));
-    }
-    return new ValueListExpr(exprs);
-  }
-
-  @Override
-  public Expr visitPrecedenceBitwiseOrExpression(HiveQLParser.PrecedenceBitwiseOrExpressionContext ctx) {
-    int expressionCount = ctx.precedenceAmpersandExpression().size();
-
-    Expr current = null, left = null, right = null, parentLeft, parentRight;
-    OpType type = null, parentType = null;
-
-    for (int i = 0; i < expressionCount; i += 2) {
-      int operatorIndex = (i == 0) ? 0 : i - 1;
-
-      if (ctx.precedenceBitwiseOrOperator(operatorIndex) != null) {
-        type = getPrecedenceBitwiseOrOperator(ctx.precedenceBitwiseOrOperator(operatorIndex));
-      }
-
-      if (i == 0) {
-        left = visitPrecedenceAmpersandExpression(ctx.precedenceAmpersandExpression(i));
-        if (ctx.precedenceAmpersandExpression(i + 1) != null)
-          right = visitPrecedenceAmpersandExpression(ctx.precedenceAmpersandExpression(i + 1));
-      } else {
-        parentType = getPrecedenceBitwiseOrOperator((ctx.precedenceBitwiseOrOperator(operatorIndex - 1)));
-        parentLeft = visitPrecedenceAmpersandExpression(ctx.precedenceAmpersandExpression(i - 2));
-        parentRight = visitPrecedenceAmpersandExpression(ctx.precedenceAmpersandExpression(i - 1));
-        left = new BinaryOperator(parentType, parentLeft, parentRight);
-        right = visitPrecedenceAmpersandExpression(ctx.precedenceAmpersandExpression(i));
-      }
-
-      if (right != null) {
-        current = new BinaryOperator(type, left, right);
-      } else {
-        current = left;
-      }
-    }
-    return current;
-  }
-
-  public OpType getPrecedenceBitwiseOrOperator(HiveQLParser.PrecedenceBitwiseOrOperatorContext ctx) {
-    OpType type = null;
-    // TODO: It needs to consider how to support.
-    return type;
-  }
-
-  @Override
-  public Expr visitPrecedenceAmpersandExpression(HiveQLParser.PrecedenceAmpersandExpressionContext ctx) {
-    int expressionCount = ctx.precedencePlusExpression().size();
-
-    Expr current = null, left = null, right = null, parentLeft, parentRight;
-    OpType type = null, parentType = null;
-
-    for (int i = 0; i < expressionCount; i += 2) {
-      int operatorIndex = (i == 0) ? 0 : i - 1;
-
-      if (ctx.precedenceAmpersandOperator(operatorIndex) != null) {
-        type = getPrecedenceAmpersandOperator(ctx.precedenceAmpersandOperator(operatorIndex));
-      }
-
-      if (i == 0) {
-        left = visitPrecedencePlusExpression(ctx.precedencePlusExpression(i));
-        if (ctx.precedencePlusExpression(i + 1) != null)
-          right = visitPrecedencePlusExpression(ctx.precedencePlusExpression(i + 1));
-      } else {
-        parentType = getPrecedenceAmpersandOperator((ctx.precedenceAmpersandOperator(operatorIndex - 1)));
-        parentLeft = visitPrecedencePlusExpression(ctx.precedencePlusExpression(i - 2));
-        parentRight = visitPrecedencePlusExpression(ctx.precedencePlusExpression(i - 1));
-        left = new BinaryOperator(parentType, parentLeft, parentRight);
-        right = visitPrecedencePlusExpression(ctx.precedencePlusExpression(i));
-      }
-
-      if (right != null) {
-        current = new BinaryOperator(type, left, right);
-      } else {
-        current = left;
-      }
-    }
-    return current;
-  }
-
-  public OpType getPrecedenceAmpersandOperator(HiveQLParser.PrecedenceAmpersandOperatorContext ctx) {
-    OpType type = null;
-    // TODO: It needs to consider how to support.
-    return type;
-  }
-
-  @Override
-  public Expr visitPrecedencePlusExpression(HiveQLParser.PrecedencePlusExpressionContext ctx) {
-    int expressionCount = ctx.precedenceStarExpression().size();
-
-    Expr current = null, left = null, right = null, parentLeft, parentRight;
-    OpType type = null, parentType = null;
-
-    for (int i = 0; i < expressionCount; i += 2) {
-      int operatorIndex = (i == 0) ? 0 : i - 1;
-
-      if (ctx.precedencePlusOperator(operatorIndex) != null) {
-        type = getPrecedencePlusOperator(ctx.precedencePlusOperator(operatorIndex));
-      }
-
-      if (i == 0) {
-        left = visitPrecedenceStarExpression(ctx.precedenceStarExpression(i));
-        if (ctx.precedenceStarExpression(i + 1) != null)
-          right = visitPrecedenceStarExpression(ctx.precedenceStarExpression(i + 1));
-      } else {
-        parentType = getPrecedencePlusOperator((ctx.precedencePlusOperator(operatorIndex - 1)));
-        parentLeft = visitPrecedenceStarExpression(ctx.precedenceStarExpression(i - 2));
-        parentRight = visitPrecedenceStarExpression(ctx.precedenceStarExpression(i - 1));
-        left = new BinaryOperator(parentType, parentLeft, parentRight);
-        right = visitPrecedenceStarExpression(ctx.precedenceStarExpression(i));
-      }
-
-      if (right != null) {
-        current = new BinaryOperator(type, left, right);
-      } else {
-        current = left;
-      }
-    }
-    return current;
-  }
-
-  public OpType getPrecedencePlusOperator(HiveQLParser.PrecedencePlusOperatorContext ctx) {
-    OpType type = null;
-
-    if (ctx.MINUS() != null) {
-      type = OpType.Minus;
-    } else if (ctx.PLUS() != null) {
-      type = OpType.Plus;
-    }
-
-    return type;
-  }
-
-  @Override
-  public Expr visitPrecedenceStarExpression(HiveQLParser.PrecedenceStarExpressionContext ctx) {
-    int expressionCount = ctx.precedenceBitwiseXorExpression().size();
-
-    Expr current = null, left = null, right = null, parentLeft, parentRight;
-    OpType type = null, parentType = null;
-
-    for (int i = 0; i < expressionCount; i += 2) {
-      int operatorIndex = (i == 0) ? 0 : i - 1;
-
-      if (ctx.precedenceStarOperator(operatorIndex) != null) {
-        type = getPrecedenceStarOperator(ctx.precedenceStarOperator(operatorIndex));
-      }
-
-      if (i == 0) {
-        left = visitPrecedenceBitwiseXorExpression(ctx.precedenceBitwiseXorExpression(i));
-        if (ctx.precedenceBitwiseXorExpression(i + 1) != null)
-          right = visitPrecedenceBitwiseXorExpression(ctx.precedenceBitwiseXorExpression(i + 1));
-      } else {
-        parentType = getPrecedenceStarOperator((ctx.precedenceStarOperator(operatorIndex - 1)));
-        parentLeft = visitPrecedenceBitwiseXorExpression(ctx.precedenceBitwiseXorExpression(i - 2));
-        parentRight = visitPrecedenceBitwiseXorExpression(ctx.precedenceBitwiseXorExpression(i - 1));
-        left = new BinaryOperator(parentType, parentLeft, parentRight);
-        right = visitPrecedenceBitwiseXorExpression(ctx.precedenceBitwiseXorExpression(i));
-      }
-
-      if (right != null) {
-        current = new BinaryOperator(type, left, right);
-      } else {
-        current = left;
-      }
-    }
-
-    return current;
-  }
-
-  public OpType getPrecedenceStarOperator(HiveQLParser.PrecedenceStarOperatorContext ctx) {
-    OpType type = null;
-
-    if (ctx.DIV() != null) {
-      type = OpType.Divide;
-    } else if (ctx.DIVIDE() != null) {
-      type = OpType.Divide;
-    } else if (ctx.MOD() != null) {
-      type = OpType.Modular;
-    } else if (ctx.STAR() != null) {
-      type = OpType.Multiply;
-    }
-
-    return type;
-  }
-
-  @Override
-  public Expr visitPrecedenceBitwiseXorExpression(HiveQLParser.PrecedenceBitwiseXorExpressionContext ctx) {
-    int expressionCount = ctx.precedenceUnarySuffixExpression().size();
-
-    Expr current = null, left = null, right = null, parentLeft, parentRight;
-    OpType type = null, parentType = null;
-
-    for (int i = 0; i < expressionCount; i += 2) {
-      int operatorIndex = (i == 0) ? 0 : i - 1;
-
-      if (ctx.precedenceBitwiseXorOperator(operatorIndex) != null) {
-        type = getPrecedenceBitwiseXorOperator(ctx.precedenceBitwiseXorOperator(operatorIndex));
-      }
-
-      if (i == 0) {
-        left = visitPrecedenceUnarySuffixExpression(ctx.precedenceUnarySuffixExpression(i));
-        if (ctx.precedenceUnarySuffixExpression(i + 1) != null)
-          right = visitPrecedenceUnarySuffixExpression(ctx.precedenceUnarySuffixExpression(i + 1));
-      } else {
-        parentType = getPrecedenceBitwiseXorOperator((ctx.precedenceBitwiseXorOperator(operatorIndex - 1)));
-        parentLeft = visitPrecedenceUnarySuffixExpression(ctx.precedenceUnarySuffixExpression(i - 2));
-        parentRight = visitPrecedenceUnarySuffixExpression(ctx.precedenceUnarySuffixExpression(i - 1));
-        left = new BinaryOperator(parentType, parentLeft, parentRight);
-        right = visitPrecedenceUnarySuffixExpression(ctx.precedenceUnarySuffixExpression(i));
-      }
-
-      if (right != null) {
-        current = new BinaryOperator(type, left, right);
-      } else {
-        current = left;
-      }
-    }
-
-    return current;
-  }
-
-  public OpType getPrecedenceBitwiseXorOperator(HiveQLParser.PrecedenceBitwiseXorOperatorContext ctx) {
-    OpType type = null;
-    // TODO: It needs to consider how to support.
-
-    return type;
-  }
-
-  @Override
-  public Expr visitPrecedenceUnarySuffixExpression(HiveQLParser.PrecedenceUnarySuffixExpressionContext ctx) {
-    Expr current = visitPrecedenceUnaryPrefixExpression(ctx.precedenceUnaryPrefixExpression());
-
-    if (ctx.nullCondition() != null) {
-      boolean isNot = ctx.nullCondition().KW_NOT() == null ? false : true;
-      IsNullPredicate isNullPredicate = new IsNullPredicate(isNot, (ColumnReferenceExpr) current);
-      current = isNullPredicate;
-    }
-
-    return current;
-  }
-
-  @Override
-  public Expr visitPrecedenceUnaryPrefixExpression(HiveQLParser.PrecedenceUnaryPrefixExpressionContext ctx) {
-    Expr current = visitPrecedenceFieldExpression(ctx.precedenceFieldExpression());
-    return current;
-  }
-
-  @Override
-  public Expr visitNullCondition(HiveQLParser.NullConditionContext ctx) {
-    return new NullLiteral();
-  }
-
-  @Override
-  public Expr visitPrecedenceFieldExpression(HiveQLParser.PrecedenceFieldExpressionContext ctx) {
-    Expr current = visitAtomExpression(ctx.atomExpression());
-
-    if (ctx.DOT().size() > 0) {
-      ColumnReferenceExpr column = new ColumnReferenceExpr(ctx.identifier(0).getText());
-      ColumnReferenceExpr table = (ColumnReferenceExpr) current;
-      column.setQualifier(table.getName());
-      current = column;
-    }
-    return current;
-  }
-
-  @Override
-  public Expr visitAtomExpression(HiveQLParser.AtomExpressionContext ctx) {
-    Expr current = null;
-
-    if (ctx.KW_NULL() != null) {
-      current = new NullLiteral();
-    }
-    if (ctx.constant() != null) {
-      current = visitConstant(ctx.constant());
-    }
-    if (ctx.function() != null) {
-      current = visitFunction(ctx.function());
-    }
-    if (ctx.castExpression() != null) {
-      current = visitCastExpression(ctx.castExpression());
-    }
-    if (ctx.caseExpression() != null) {
-      current = visitCaseExpression(ctx.caseExpression());
-    }
-    if (ctx.whenExpression() != null) {
-      current = visitWhenExpression(ctx.whenExpression());
-    }
-    if (ctx.tableOrColumn() != null) {
-      current = visitTableOrColumn(ctx.tableOrColumn());
-    } else {
-      if (ctx.LPAREN() != null && ctx.RPAREN() != null) {
-        current = visitExpression(ctx.expression());
-      }
-    }
-
-    return current;
-  }
-
-  @Override
-  public Expr visitTableOrColumn(HiveQLParser.TableOrColumnContext ctx) {
-    ColumnReferenceExpr columnReferenceExpr = new ColumnReferenceExpr(ctx.identifier().getText());
-    return columnReferenceExpr;
-  }
-
-  @Override
-  public Expr visitIdentifier(HiveQLParser.IdentifierContext ctx) {
-    Expr current = null;
-
-    if (ctx.nonReserved() != null) {
-      current = new LiteralValue(ctx.nonReserved().getText(), LiteralValue.LiteralType.String);
-    } else {
-      current = new LiteralValue(ctx.Identifier().getText(), LiteralValue.LiteralType.String);
-    }
-
-    return current;
-  }
-
-  @Override
-  public LiteralValue visitConstant(HiveQLParser.ConstantContext ctx) {
-    LiteralValue literalValue = null;
-
-    if (ctx.StringLiteral() != null) {
-      String value = ctx.StringLiteral().getText();
-      String strValue = "";
-      if ((value.startsWith("'") && value.endsWith("'")) || value.startsWith("\"") && value.endsWith("\"")) {
-        strValue = value.substring(1, value.length() - 1);
-      } else {
-        strValue = value;
-      }
-
-      literalValue = new LiteralValue(strValue, LiteralValue.LiteralType.String);
-    } else if (ctx.TinyintLiteral() != null) {
-      literalValue = new LiteralValue(ctx.TinyintLiteral().getSymbol().getText(),
-          LiteralValue.LiteralType.Unsigned_Integer);
-    } else if (ctx.BigintLiteral() != null) {
-      literalValue = new LiteralValue(ctx.BigintLiteral().getSymbol().getText(),
-          LiteralValue.LiteralType.Unsigned_Large_Integer);
-    } else if (ctx.DecimalLiteral() != null) {
-      literalValue = new LiteralValue(ctx.DecimalLiteral().getSymbol().getText(),
-          LiteralValue.LiteralType.Unsigned_Integer);
-    } else if (ctx.Number() != null) {
-      try {
-        float floatValue = NumberUtils.createFloat(ctx.getText());
-        literalValue = new LiteralValue(ctx.Number().getSymbol().getText(), LiteralValue.LiteralType.Unsigned_Float);
-      } catch (NumberFormatException nf) {
-      }
-
-      // TODO: double type
-
-      try {
-        BigInteger bigIntegerVallue = NumberUtils.createBigInteger(ctx.getText());
-        literalValue = new LiteralValue(ctx.Number().getSymbol().getText(),
-            LiteralValue.LiteralType.Unsigned_Large_Integer);
-      } catch (NumberFormatException nf) {
-      }
-
-      try {
-        int intValue = NumberUtils.createInteger(ctx.getText());
-        literalValue = new LiteralValue(ctx.Number().getSymbol().getText(), LiteralValue.LiteralType.Unsigned_Integer);
-      } catch (NumberFormatException nf) {
-      }
-
-    } else if (ctx.SmallintLiteral() != null) {
-      literalValue = new LiteralValue(ctx.SmallintLiteral().getSymbol().getText(),
-          LiteralValue.LiteralType.Unsigned_Integer);
-    } else if (ctx.booleanValue() != null) {
-      // TODO: boolean type
-    }
-
-    return literalValue;
-  }
-
-  @Override
-  public Expr visitFunction(HiveQLParser.FunctionContext ctx) {
-    Expr current = null;
-    String signature = ctx.functionName().getText();
-
-    boolean isDistinct = false;
-    if (ctx.getChild(2) != null) {
-      if (ctx.getChild(2) instanceof TerminalNodeImpl
-          && ctx.getChild(2).getText().equalsIgnoreCase("DISTINCT_GROUP_BY")) {
-        isDistinct = true;
-      }
-    }
-
-    if (signature.equalsIgnoreCase("MIN")
-        || signature.equalsIgnoreCase("MAX")
-        || signature.equalsIgnoreCase("SUM")
-        || signature.equalsIgnoreCase("AVG")
-        || signature.equalsIgnoreCase("COUNT")
-        ) {
-      if (ctx.selectExpression().size() > 1) {
-        throw new RuntimeException("Exactly expected one argument.");
-      }
-
-      if (ctx.selectExpression().size() == 0) {
-        CountRowsFunctionExpr countRowsFunctionExpr = new CountRowsFunctionExpr();
-        current = countRowsFunctionExpr;
-      } else {
-        GeneralSetFunctionExpr setFunctionExpr = new GeneralSetFunctionExpr(signature, isDistinct, new Expr [] {
-            visitSelectExpression(ctx.selectExpression(0))});
-        current = setFunctionExpr;
-      }
-    } else {
-      FunctionExpr functionExpr = new FunctionExpr(signature);
-      Expr[] params = new Expr[ctx.selectExpression().size()];
-      for (int i = 0; i < ctx.selectExpression().size(); i++) {
-        params[i] = visitSelectExpression(ctx.selectExpression(i));
-      }
-      functionExpr.setParams(params);
-      current = functionExpr;
-    }
-
-
-    return current;
-  }
-
-  /**
-   * This method parse CAST expression.
-   * This returns only expression field without casting type
-   * because Tajo doesn't provide CAST expression.
-   *
-   * @param ctx
-   * @return
-   */
-  @Override
-  public Expr visitCastExpression(HiveQLParser.CastExpressionContext ctx) {
-    DataTypeExpr castTarget = getDataTypeExpr(ctx.primitiveType());
-    Expr expr = visitExpression(ctx.expression());
-    Expr current = new CastExpr(expr, castTarget);
-    return current;
-  }
-
-  @Override
-  public Expr visitCaseExpression(HiveQLParser.CaseExpressionContext ctx) {
-    CaseWhenPredicate caseWhen = new CaseWhenPredicate();
-    Expr condition = null, result = null;
-    for (int i = 1; i < ctx.getChildCount(); i++) {
-      if (ctx.getChild(i) instanceof TerminalNodeImpl) {
-        if (((TerminalNodeImpl) ctx.getChild(i)).getSymbol().getType() == HiveQLLexer.KW_WHEN) {
-          condition = null;
-          result = null;
-
-          if (ctx.getChild(i + 1) instanceof HiveQLParser.ExpressionContext) {
-            condition = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 1));
-          }
-
-          if (ctx.getChild(i + 3) instanceof HiveQLParser.ExpressionContext) {
-            result = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 3));
-          }
-
-          if (condition != null && result != null) {
-            caseWhen.addWhen(condition, result);
-          }
-        } else if (((TerminalNodeImpl) ctx.getChild(i)).getSymbol().getType() == HiveQLLexer.KW_ELSE) {
-          result = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 1));
-          caseWhen.setElseResult(result);
-        }
-      }
-    }
-
-    return caseWhen;
-  }
-
-  @Override
-  public Expr visitWhenExpression(HiveQLParser.WhenExpressionContext ctx) {
-    CaseWhenPredicate caseWhen = new CaseWhenPredicate();
-    Expr condition = null, result = null;
-    for (int i = 1; i < ctx.getChildCount(); i++) {
-      if (ctx.getChild(i) instanceof TerminalNodeImpl) {
-        if (((TerminalNodeImpl) ctx.getChild(i)).getSymbol().getType() == HiveQLLexer.KW_WHEN) {
-          condition = null;
-          result = null;
-
-          if (ctx.getChild(i + 1) instanceof HiveQLParser.ExpressionContext) {
-            condition = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 1));
-          }
-
-          if (ctx.getChild(i + 3) instanceof HiveQLParser.ExpressionContext) {
-            result = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 3));
-          }
-
-          if (condition != null && result != null) {
-            caseWhen.addWhen(condition, result);
-          }
-        } else if (((TerminalNodeImpl) ctx.getChild(i)).getSymbol().getType() == HiveQLLexer.KW_ELSE) {
-          result = visitExpression((HiveQLParser.ExpressionContext) ctx.getChild(i + 1));
-          caseWhen.setElseResult(result);
-        }
-      }
-    }
-
-    return caseWhen;
-  }
-
-  @Override
-  public Aggregation visitGroupByClause(HiveQLParser.GroupByClauseContext ctx) {
-    Aggregation clause = new Aggregation();
-
-    if (ctx.groupByExpression().size() > 0) {
-      int elementSize = ctx.groupByExpression().size();
-      ArrayList<Aggregation.GroupElement> groups = new ArrayList<Aggregation.GroupElement>(elementSize + 1);
-      ArrayList<Expr> ordinaryExprs = new ArrayList<Expr>();
-      int groupSize = 1;
-      groups.add(null);
-
-      for (int i = 0; i < ctx.groupByExpression().size(); i++) {
-        Expr expr = visitGroupByExpression(ctx.groupByExpression(i));
-
-        if (expr instanceof FunctionExpr) {
-          FunctionExpr function = (FunctionExpr) expr;
-
-          if (function.getSignature().equalsIgnoreCase("ROLLUP")) {
-            groupSize++;
-            groups.add(new Aggregation.GroupElement(Aggregation.GroupType.Rollup,
-                function.getParams()));
-          } else if (function.getSignature().equalsIgnoreCase("CUBE")) {
-            groupSize++;
-            groups.add(new Aggregation.GroupElement(Aggregation.GroupType.Cube, function.getParams()));
-          } else {
-            Collections.addAll(ordinaryExprs, function);
-          }
-        } else {
-          Collections.addAll(ordinaryExprs, (ColumnReferenceExpr)expr);
-        }
-      }
-
-      if (ordinaryExprs != null) {
-        groups.set(0, new Aggregation.GroupElement(Aggregation.GroupType.OrdinaryGroup, ordinaryExprs.toArray(new Expr[ordinaryExprs.size()])));
-        clause.setGroups(groups.subList(0, groupSize).toArray(new Aggregation.GroupElement[groupSize]));
-      } else if (groupSize > 1) {
-        clause.setGroups(groups.subList(1, groupSize).toArray(new Aggregation.GroupElement[groupSize - 1]));
-      }
-    }
-
-    //TODO: grouping set expression
-    return clause;
-  }
-
-  @Override
-  public Sort visitOrderByClause(HiveQLParser.OrderByClauseContext ctx) {
-    Sort clause = null;
-    Sort.SortSpec[] specs = null;
-
-    if (ctx.columnRefOrder().size() > 0) {
-      specs = new Sort.SortSpec[ctx.columnRefOrder().size()];
-      for (int i = 0; i < ctx.columnRefOrder().size(); i++) {
-        ColumnReferenceExpr column = (ColumnReferenceExpr) visitExpression(ctx.columnRefOrder().get(i).expression());
-        specs[i] = new Sort.SortSpec(column);
-        if (ctx.columnRefOrder(i).KW_DESC() != null) {
-          specs[i].setDescending();
-        }
-      }
-      clause = new Sort(specs);
-    }
-    return clause;
-
-  }
-
-  @Override
-  public Expr visitHavingClause(HiveQLParser.HavingClauseContext ctx) {
-    return visitHavingCondition(ctx.havingCondition());
-  }
-
-  @Override
-  public Expr visitClusterByClause(HiveQLParser.ClusterByClauseContext ctx) {
-    // TODO: It needs to consider how to support.
-    return null;
-  }
-
-  @Override
-  public Expr visitDistributeByClause(HiveQLParser.DistributeByClauseContext ctx) {
-    // TODO: It needs to consider how to support.
-
-    return null;
-  }
-
-  @Override
-  public Sort visitSortByClause(HiveQLParser.SortByClauseContext ctx) {
-    Sort clause = null;
-    Sort.SortSpec[] specs = null;
-
-    if (ctx.columnRefOrder().size() > 0) {
-      specs = new Sort.SortSpec[ctx.columnRefOrder().size()];
-      for (int i = 0; i < ctx.columnRefOrder().size(); i++) {
-        ColumnReferenceExpr column = (ColumnReferenceExpr) visitColumnRefOrder(ctx.columnRefOrder(i));
-        specs[i] = new Sort.SortSpec(column);
-
-        if (ctx.columnRefOrder(i).KW_DESC() != null) {
-          specs[i].setDescending();
-        }
-      }
-      clause = new Sort(specs);
-    }
-
-    return clause;
-  }
-
-  @Override
-  public Limit visitLimitClause(HiveQLParser.LimitClauseContext ctx) {
-    LiteralValue expr = new LiteralValue(ctx.Number().getText(), LiteralValue.LiteralType.Unsigned_Integer);
-    Limit limit = new Limit(expr);
-    return limit;
-  }
-
-  @Override
-  public Expr visitWindow_clause(HiveQLParser.Window_clauseContext ctx) {
-    // TODO: It needs to consider how to support.
-    return null;
-  }
-
-  @Override
-  public Insert visitInsertClause(HiveQLParser.InsertClauseContext ctx) {
-    Insert insert = new Insert();
-    if (ctx.KW_OVERWRITE() != null)
-      insert.setOverwrite();
-
-    if (ctx.tableOrPartition() != null) {
-      HiveQLParser.TableOrPartitionContext partitionContext = ctx.tableOrPartition();
-      if (partitionContext.tableName() != null) {
-        insert.setTableName(ctx.tableOrPartition().tableName().getText());
-      }
-    }
-
-    if (ctx.destination() != null) {
-      HiveQLParser.DestinationContext destination = ctx.destination();
-      if (destination.KW_DIRECTORY() != null) {
-        String location = destination.StringLiteral().getText();
-        location = location.replaceAll("\\'", "");
-        insert.setLocation(location);
-      } else if (destination.KW_TABLE() != null) {
-        if (destination.tableOrPartition() != null) {
-          HiveQLParser.TableOrPartitionContext partitionContext = destination.tableOrPartition();
-          if (partitionContext.tableName() != null) {
-            insert.setTableName(partitionContext.tableName().getText());
-          }
-        }
-
-        if (destination.tableFileFormat() != null) {
-          if (destination.tableFileFormat().KW_RCFILE() != null) {
-            insert.setStorageType("rcfile");
-          } else if (destination.tableFileFormat().KW_TEXTFILE() != null) {
-            insert.setStorageType("csv");
-          }
-
-        }
-      }
-    }
-
-    return insert;
-  }
-
-  @Override
-  public Expr visitCreateTableStatement(HiveQLParser.CreateTableStatementContext ctx) {
-    CreateTable createTable = null;
-    Map<String, String> params = new HashMap<String, String>();
-
-    if (ctx.name != null) {
-      createTable = new CreateTable(ctx.name.getText(), ctx.ifNotExists() != null);
-      if (ctx.KW_EXTERNAL() != null) {
-        createTable.setExternal();
-      }
-
-      if (ctx.tableFileFormat() != null) {
-        if (ctx.tableFileFormat().KW_RCFILE() != null) {
-          createTable.setStorageType("rcfile");
-        } else if (ctx.tableFileFormat().KW_TEXTFILE() != null) {
-          createTable.setStorageType("csv");
-        }
-      }
-
-      if (ctx.tableRowFormat() != null) {
-        if (ctx.tableRowFormat().rowFormatDelimited() != null) {
-          String delimiter = ctx.tableRowFormat().rowFormatDelimited().tableRowFormatFieldIdentifier().getChild(3)
-              .getText().replaceAll("'", "");
-          params.put("csvfile.delimiter", SQLAnalyzer.escapeDelimiter(delimiter));
-        }
-      }
-
-      if (ctx.tableLocation() != null) {
-        String location = ctx.tableLocation().StringLiteral().getText();
-        location = location.replaceAll("'", "");
-        createTable.setLocation(location);
-
-      }
-
-      if (ctx.columnNameTypeList() != null) {
-        List<HiveQLParser.ColumnNameTypeContext> list = ctx.columnNameTypeList().columnNameType();
-
-        ColumnDefinition[] columns = new ColumnDefinition[list.size()];
-
-        for (int i = 0; i < list.size(); i++) {
-          HiveQLParser.ColumnNameTypeContext eachColumn = list.get(i);
-          String type = null;
-          if (eachColumn.colType().type() != null) {
-            if (eachColumn.colType().type().primitiveType() != null) {
-              HiveQLParser.PrimitiveTypeContext primitiveType = eachColumn.colType().type().primitiveType();
-              type = getDataTypeExpr(primitiveType).getTypeName();
-              columns[i] = new ColumnDefinition(eachColumn.colName.Identifier().getText(), type);
-            }
-          }
-        }
-        if (columns != null) {
-          createTable.setTableElements(columns);
-        }
-
-        if (!params.isEmpty()) {
-          createTable.setParams(params);
-        }
-      }
-    }
-
-    return createTable;
-  }
-
-
-  private DataTypeExpr getDataTypeExpr(HiveQLParser.PrimitiveTypeContext primitiveType) {
-    DataTypeExpr typeDefinition = null;
-
-    if (primitiveType.KW_STRING() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.TEXT.name());
-    } else if (primitiveType.KW_TINYINT() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.INT1.name());
-    } else if (primitiveType.KW_SMALLINT() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.INT2.name());
-    } else if (primitiveType.KW_INT() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.INT4.name());
-    } else if (primitiveType.KW_BIGINT() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.INT8.name());
-    } else if (primitiveType.KW_FLOAT() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.FLOAT4.name());
-    } else if (primitiveType.KW_DOUBLE() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.FLOAT8.name());
-    } else if (primitiveType.KW_DECIMAL() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.NUMERIC.name());
-    } else if (primitiveType.KW_BOOLEAN() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.BOOLEAN.name());
-    } else if (primitiveType.KW_DATE() != null) {
-    } else if (primitiveType.KW_DATETIME() != null) {
-      //TODO
-    } else if (primitiveType.KW_TIMESTAMP() != null) {
-      typeDefinition = new DataTypeExpr(TajoDataTypes.Type.TIMESTAMP.name());
-    }
-
-    return typeDefinition;
-  }
-
-
-  @Override
-  public Expr visitDropTableStatement(HiveQLParser.DropTableStatementContext ctx) {
-    DropTable dropTable = new DropTable(ctx.tableName().getText(), false, ctx.ifExists() != null);
-    return dropTable;
-  }
-
-  /**
-   * This class provides and implementation for a case insensitive token checker
-   * for the lexical analysis part of antlr. By converting the token stream into
-   * upper case at the time when lexical rules are checked, this class ensures that the
-   * lexical rules need to just match the token with upper case letters as opposed to
-   * combination of upper case and lower case characteres. This is purely used for matching lexical
-   * rules. The actual token text is stored in the same way as the user input without
-   * actually converting it into an upper case. The token values are generated by the consume()
-   * function of the super class ANTLRStringStream. The LA() function is the lookahead funtion
-   * and is purely used for matching lexical rules. This also means that the grammar will only
-   * accept capitalized tokens in case it is run from other tools like antlrworks which
-   * do not have the ANTLRNoCaseStringStream implementation.
-   */
-  public class ANTLRNoCaseStringStream extends ANTLRInputStream {
-
-    public ANTLRNoCaseStringStream(String input) {
-      super(input);
-    }
-
-    @Override
-    public int LA(int i) {
-
-      int returnChar = super.LA(i);
-      if (returnChar == CharStream.EOF) {
-        return returnChar;
-      } else if (returnChar == 0) {
-        return returnChar;
-      }
-
-      return Character.toUpperCase((char) returnChar);
-    }
-  }
-}
\ No newline at end of file
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/parser/SQLAnalyzer.java b/tajo-core/src/main/java/org/apache/tajo/engine/parser/SQLAnalyzer.java
index eef8a46..59a9b46 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/parser/SQLAnalyzer.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/parser/SQLAnalyzer.java
@@ -24,15 +24,14 @@
 import org.antlr.v4.runtime.CommonTokenStream;
 import org.antlr.v4.runtime.misc.NotNull;
 import org.antlr.v4.runtime.tree.TerminalNode;
-import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.tajo.algebra.*;
 import org.apache.tajo.algebra.Aggregation.GroupType;
 import org.apache.tajo.algebra.LiteralValue.LiteralType;
 import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.engine.parser.SQLParser.*;
 import org.apache.tajo.storage.StorageConstants;
+import org.apache.tajo.util.StringUtils;
 
-import java.nio.charset.Charset;
 import java.util.*;
 
 import static org.apache.tajo.algebra.Aggregation.GroupElement;
@@ -1092,14 +1091,15 @@
   @Override
   public Expr visitExtract_expression(Extract_expressionContext ctx) {
     Expr extractTarget = new LiteralValue(ctx.extract_field_string.getText(), LiteralType.String);
-    Expr extractSource;
-    if (checkIfExist(ctx.extract_source().column_reference())) {
-      extractSource = visitColumn_reference(ctx.extract_source().column_reference());
-    } else if (checkIfExist(ctx.extract_source().datetime_literal())) {
-      extractSource = visitDatetime_literal(ctx.extract_source().datetime_literal());
-    } else {
-      return null;
-    }
+    Expr extractSource = visitDatetime_value_expression(ctx.extract_source().datetime_value_expression());
+//    if (checkIfExist(ctx.extract_source().column_reference())) {
+//      extractSource = visitColumn_reference(ctx.extract_source().column_reference());
+//    } else if (checkIfExist(ctx.extract_source().datetime_literal())) {
+//      extractSource = visitDatetime_literal(ctx.extract_source().datetime_literal());
+//    } else {
+//      return null;
+//    }
+
 
     String functionName = "date_part";
     Expr[] params = new Expr[]{extractTarget, extractSource};
@@ -1151,8 +1151,12 @@
 
   @Override
   public Expr visitCreate_table_statement(SQLParser.Create_table_statementContext ctx) {
-    String tableName = ctx.table_name().getText();
+    String tableName = ctx.table_name(0).getText();
     CreateTable createTable = new CreateTable(tableName, checkIfExist(ctx.if_not_exists()));
+    if(checkIfExist(ctx.LIKE()))  {
+      createTable.setLikeParentTable(ctx.like_table_name.getText());
+      return createTable;
+    }
 
     if (checkIfExist(ctx.EXTERNAL())) {
       createTable.setExternal();
@@ -1194,6 +1198,18 @@
     return createTable;
   }
 
+  @Override
+  public Expr visitTruncate_table_statement(@NotNull SQLParser.Truncate_table_statementContext ctx) {
+    List<Table_nameContext> tableNameContexts = ctx.table_name();
+    List<String> tableNames = new ArrayList<String>();
+
+    for (Table_nameContext eachTableNameContext: tableNameContexts) {
+      tableNames.add(eachTableNameContext.getChild(0).getText());
+    }
+
+    return new TruncateTable(tableNames);
+  }
+
   private ColumnDefinition[] getDefinitions(SQLParser.Table_elementsContext ctx) {
     int size = ctx.field_element().size();
     ColumnDefinition[] elements = new ColumnDefinition[size];
@@ -1469,7 +1485,7 @@
     Map<String, String> params = new HashMap<String, String>();
     for (Map.Entry<String, String> entry : map.entrySet()) {
       if (entry.getKey().equals(StorageConstants.CSVFILE_DELIMITER)) {
-        params.put(entry.getKey(), escapeDelimiter(entry.getValue()));
+        params.put(entry.getKey(), StringUtils.unicodeEscapedDelimiter(entry.getValue()));
       } else {
         params.put(entry.getKey(), entry.getValue());
       }
@@ -1477,16 +1493,6 @@
     return params;
   }
 
-  public static String escapeDelimiter(String value) {
-    try {
-      String delimiter = StringEscapeUtils.unescapeJava(value);
-      delimiter = new String(new byte[]{Byte.valueOf(delimiter).byteValue()}, Charset.defaultCharset());
-      return StringEscapeUtils.escapeJava(delimiter);
-    } catch (NumberFormatException e) {
-    }
-    return value;
-  }
-
   private static String stripQuote(String str) {
     return str.substring(1, str.length() - 1);
   }
@@ -1557,11 +1563,68 @@
     return new TimestampLiteral(parseDate(datePart), parseTime(timePart));
   }
 
-  @Override public Expr visitInterval_literal(@NotNull SQLParser.Interval_literalContext ctx) {
+  @Override
+  public Expr visitInterval_literal(@NotNull SQLParser.Interval_literalContext ctx) {
     String intervalStr = stripQuote(ctx.interval_string.getText());
     return new IntervalLiteral(intervalStr);
   }
 
+  @Override
+  public Expr visitDatetime_value_expression(@NotNull SQLParser.Datetime_value_expressionContext ctx) {
+    return visitDatetime_term(ctx.datetime_term());
+  }
+
+  @Override
+  public Expr visitDatetime_term(@NotNull SQLParser.Datetime_termContext ctx) {
+    return visitDatetime_factor(ctx.datetime_factor());
+  }
+
+  @Override
+  public Expr visitDatetime_factor(@NotNull SQLParser.Datetime_factorContext ctx) {
+    return visitDatetime_primary(ctx.datetime_primary());
+  }
+
+  @Override
+  public Expr visitDatetime_primary(@NotNull SQLParser.Datetime_primaryContext ctx) {
+    if (checkIfExist(ctx.value_expression_primary())) {
+      return visitValue_expression_primary(ctx.value_expression_primary());
+    } else {
+      return visitDatetime_value_function(ctx.datetime_value_function());
+    }
+  }
+
+  @Override
+  public Expr visitDatetime_value_function(@NotNull SQLParser.Datetime_value_functionContext ctx) {
+    if (checkIfExist(ctx.current_date_value_function())) {
+      return visitCurrent_date_value_function(ctx.current_date_value_function());
+    } else if (checkIfExist(ctx.current_time_value_function())) {
+      return visitCurrent_time_value_function(ctx.current_time_value_function());
+    } else {
+      return visitCurrent_timestamp_value_function(ctx.current_timestamp_value_function());
+    }
+  }
+
+  @Override
+  public Expr visitCurrent_date_value_function(@NotNull SQLParser.Current_date_value_functionContext ctx) {
+    String functionName = "current_date";
+    Expr[] params = new Expr[]{};
+    return new FunctionExpr(functionName, params);
+  }
+
+  @Override
+  public Expr visitCurrent_time_value_function(@NotNull SQLParser.Current_time_value_functionContext ctx) {
+    String functionName = "current_time";
+    Expr[] params = new Expr[]{};
+    return new FunctionExpr(functionName, params);
+  }
+
+  @Override
+  public Expr visitCurrent_timestamp_value_function(@NotNull SQLParser.Current_timestamp_value_functionContext ctx) {
+    String functionName = "now";
+    Expr[] params = new Expr[]{};
+    return new FunctionExpr(functionName, params);
+  }
+
   private DateValue parseDate(String datePart) {
     // e.g., 1980-04-01
     String[] parts = datePart.split("-");
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/AlgebraVisitor.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/AlgebraVisitor.java
index 0ef8a26..5811d36 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/AlgebraVisitor.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/AlgebraVisitor.java
@@ -48,6 +48,7 @@
   RESULT visitDropTable(CONTEXT ctx, Stack<Expr> stack, DropTable expr) throws PlanningException;
   RESULT visitAlterTablespace(CONTEXT ctx, Stack<Expr> stack, AlterTablespace expr) throws PlanningException;
   RESULT visitAlterTable(CONTEXT ctx, Stack<Expr> stack, AlterTable expr) throws PlanningException;
+  RESULT visitTruncateTable(CONTEXT ctx, Stack<Expr> stack, TruncateTable expr) throws PlanningException;
 
     // Insert or Update
   RESULT visitInsert(CONTEXT ctx, Stack<Expr> stack, Insert expr) throws PlanningException;
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/BaseAlgebraVisitor.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/BaseAlgebraVisitor.java
index 841ea22..907042a 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/BaseAlgebraVisitor.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/BaseAlgebraVisitor.java
@@ -118,6 +118,9 @@
     case AlterTable:
       current = visitAlterTable(ctx, stack, (AlterTable) expr);
       break;
+    case TruncateTable:
+      current = visitTruncateTable(ctx, stack, (TruncateTable)expr);
+      break;
 
     case Insert:
       current = visitInsert(ctx, stack, (Insert) expr);
@@ -463,6 +466,9 @@
     return null;
   }
 
+  public RESULT visitTruncateTable(CONTEXT ctx, Stack<Expr> stack, TruncateTable expr) throws PlanningException {
+    return null;
+  }
   ///////////////////////////////////////////////////////////////////////////////////////////////////////////
   // Insert or Update Section
   ///////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/BasicLogicalPlanVisitor.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/BasicLogicalPlanVisitor.java
index 3bffefb..2112615 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/BasicLogicalPlanVisitor.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/BasicLogicalPlanVisitor.java
@@ -125,6 +125,9 @@
       case ALTER_TABLE:
         current = visitAlterTable(context, plan, block, (AlterTableNode) node, stack);
         break;
+      case TRUNCATE_TABLE:
+        current = visitTruncateTable(context, plan, block, (TruncateTableNode) node, stack);
+        break;
       default:
         throw new PlanningException("Unknown logical node type: " + node.getType());
     }
@@ -252,7 +255,7 @@
                                    TableSubQueryNode node, Stack<LogicalNode> stack) throws PlanningException {
     stack.push(node);
     LogicalPlan.QueryBlock childBlock = plan.getBlock(node.getSubQuery());
-    RESULT result = visit(context, plan, childBlock, childBlock.getRoot(), new Stack<LogicalNode>());
+    RESULT result = visit(context, plan, childBlock, childBlock.getRoot(), stack);
     stack.pop();
     return result;
   }
@@ -328,4 +331,10 @@
                                  Stack<LogicalNode> stack) {
         return null;
     }
+
+  @Override
+  public RESULT visitTruncateTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                   TruncateTableNode node, Stack<LogicalNode> stack) throws PlanningException {
+    return null;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprAnnotator.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprAnnotator.java
index e74fd70..e143823 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprAnnotator.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprAnnotator.java
@@ -34,7 +34,8 @@
 import org.apache.tajo.exception.InternalException;
 import org.apache.tajo.util.Pair;
 import org.apache.tajo.util.TUtil;
-import org.joda.time.DateTime;
+import org.apache.tajo.util.datetime.DateTimeUtil;
+import org.apache.tajo.util.datetime.TimeMeta;
 
 import java.util.Map;
 import java.util.Stack;
@@ -67,7 +68,7 @@
   public EvalNode createEvalNode(LogicalPlan plan, LogicalPlan.QueryBlock block, Expr expr)
       throws PlanningException {
     Context context = new Context(plan, block);
-    return visit(context, new Stack<Expr>(), expr);
+    return AlgebraicUtil.eliminateConstantExprs(visit(context, new Stack<Expr>(), expr));
   }
 
   public static void assertEval(boolean condition, String message) throws PlanningException {
@@ -124,9 +125,17 @@
   static DataType getWidestType(DataType...types) throws PlanningException {
     DataType widest = types[0];
     for (int i = 1; i < types.length; i++) {
-      Type candidate = TUtil.getFromNestedMap(TYPE_CONVERSION_MAP, widest.getType(), types[i].getType());
-      assertEval(candidate != null, "No matched operation for those types: " + TUtil.arrayToString(types));
-      widest = CatalogUtil.newSimpleDataType(candidate);
+
+      if (widest.getType() == Type.NULL_TYPE) { // if null, skip this type
+        widest = types[i];
+        continue;
+      }
+
+      if (types[i].getType() != Type.NULL_TYPE) {
+        Type candidate = TUtil.getFromNestedMap(TYPE_CONVERSION_MAP, widest.getType(), types[i].getType());
+        assertEval(candidate != null, "No matched operation for those types: " + TUtil.arrayToString(types));
+        widest = CatalogUtil.newSimpleDataType(candidate);
+      }
     }
 
     return widest;
@@ -143,7 +152,7 @@
   private static EvalNode convertType(EvalNode evalNode, DataType toType) {
 
     // if original and toType is the same, we don't need type conversion.
-    if (evalNode.getValueType() == toType) {
+    if (evalNode.getValueType().equals(toType)) {
       return evalNode;
     }
     // the conversion to null is not allowed.
@@ -611,7 +620,7 @@
     FunctionDesc countRows = catalog.getFunction("count", CatalogProtos.FunctionType.AGGREGATION,
         new DataType[] {});
     if (countRows == null) {
-      throw new NoSuchFunctionException(countRows.getSignature(), new DataType[]{});
+      throw new NoSuchFunctionException(expr.getSignature(), new DataType[]{});
     }
 
     try {
@@ -704,8 +713,16 @@
   @Override
   public EvalNode visitDateLiteral(Context context, Stack<Expr> stack, DateLiteral expr) throws PlanningException {
     DateValue dateValue = expr.getDate();
-    int [] dates = dateToIntArray(dateValue.getYears(), dateValue.getMonths(), dateValue.getDays());
-    return new ConstEval(new DateDatum(dates[0], dates[1], dates[2]));
+    int[] dates = dateToIntArray(dateValue.getYears(), dateValue.getMonths(), dateValue.getDays());
+
+    TimeMeta tm = new TimeMeta();
+    tm.years = dates[0];
+    tm.monthOfYear = dates[1];
+    tm.dayOfMonth = dates[2];
+
+    DateTimeUtil.j2date(DateTimeUtil.date2j(dates[0], dates[1], dates[2]), tm);
+
+    return new ConstEval(new DateDatum(DateTimeUtil.date2j(tm.years, tm.monthOfYear, tm.dayOfMonth)));
   }
 
   @Override
@@ -721,14 +738,20 @@
         timeValue.getMinutes(),
         timeValue.getSeconds(),
         timeValue.getSecondsFraction());
-    DateTime dateTime;
+
+    long timestamp;
     if (timeValue.hasSecondsFraction()) {
-      dateTime = new DateTime(dates[0], dates[1], dates[2], times[0], times[1], times[2], times[3]);
+      timestamp = DateTimeUtil.toJulianTimestamp(dates[0], dates[1], dates[2], times[0], times[1], times[2],
+          times[3] * 1000);
     } else {
-      dateTime = new DateTime(dates[0], dates[1], dates[2], times[0], times[1], times[2]);
+      timestamp = DateTimeUtil.toJulianTimestamp(dates[0], dates[1], dates[2], times[0], times[1], times[2], 0);
     }
 
-    return new ConstEval(new TimestampDatum(dateTime));
+    TimeMeta tm = new TimeMeta();
+    DateTimeUtil.toJulianTimeMeta(timestamp, tm);
+    DateTimeUtil.toUTCTimezone(tm);
+
+    return new ConstEval(new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm)));
   }
 
   @Override
@@ -744,13 +767,17 @@
         timeValue.getSeconds(),
         timeValue.getSecondsFraction());
 
-    TimeDatum datum;
+    long time;
     if (timeValue.hasSecondsFraction()) {
-      datum = new TimeDatum(times[0], times[1], times[2], times[3]);
+      time = DateTimeUtil.toTime(times[0], times[1], times[2], times[3] * 1000);
     } else {
-      datum = new TimeDatum(times[0], times[1], times[2]);
+      time = DateTimeUtil.toTime(times[0], times[1], times[2], 0);
     }
-    return new ConstEval(datum);
+    TimeDatum timeDatum = new TimeDatum(time);
+    TimeMeta tm = timeDatum.toTimeMeta();
+    DateTimeUtil.toUTCTimezone(tm);
+
+    return new ConstEval(new TimeDatum(DateTimeUtil.toTime(tm)));
   }
 
   public static int [] dateToIntArray(String years, String months, String days)
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprNormalizer.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprNormalizer.java
index b87665a..75b2b95 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprNormalizer.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/ExprNormalizer.java
@@ -20,6 +20,7 @@
 
 import org.apache.tajo.algebra.*;
 import org.apache.tajo.catalog.CatalogUtil;
+import org.apache.tajo.engine.exception.NoSuchColumnException;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -80,15 +81,21 @@
   public static class ExprNormalizedResult {
     private final LogicalPlan plan;
     private final LogicalPlan.QueryBlock block;
+    private final boolean tryBinaryCommonTermsElimination;
 
     Expr baseExpr; // outmost expressions, which can includes one or more references of the results of aggregation
                    // function.
     List<NamedExpr> aggExprs = new ArrayList<NamedExpr>(); // aggregation functions
     List<NamedExpr> scalarExprs = new ArrayList<NamedExpr>(); // scalar expressions which can be referred
 
-    private ExprNormalizedResult(LogicalPlanner.PlanContext context) {
+    private ExprNormalizedResult(LogicalPlanner.PlanContext context, boolean tryBinaryCommonTermsElimination) {
       this.plan = context.plan;
       this.block = context.queryBlock;
+      this.tryBinaryCommonTermsElimination = tryBinaryCommonTermsElimination;
+    }
+
+    public boolean isBinaryCommonTermsElimination() {
+      return tryBinaryCommonTermsElimination;
     }
 
     @Override
@@ -98,7 +105,11 @@
   }
 
   public ExprNormalizedResult normalize(LogicalPlanner.PlanContext context, Expr expr) throws PlanningException {
-    ExprNormalizedResult exprNormalizedResult = new ExprNormalizedResult(context);
+    return normalize(context, expr, false);
+  }
+  public ExprNormalizedResult normalize(LogicalPlanner.PlanContext context, Expr expr, boolean subexprElimination)
+      throws PlanningException {
+    ExprNormalizedResult exprNormalizedResult = new ExprNormalizedResult(context, subexprElimination);
     Stack<Expr> stack = new Stack<Expr>();
     stack.push(expr);
     visit(exprNormalizedResult, new Stack<Expr>(), expr);
@@ -152,9 +163,27 @@
     return expr;
   }
 
+  private boolean isBinaryCommonTermsElimination(ExprNormalizedResult ctx, Expr expr) {
+    return ctx.isBinaryCommonTermsElimination() && expr.getType() != OpType.Column
+        && ctx.block.namedExprsMgr.contains(expr);
+  }
+
   @Override
   public Expr visitBinaryOperator(ExprNormalizedResult ctx, Stack<Expr> stack, BinaryOperator expr) throws PlanningException {
-    super.visitBinaryOperator(ctx, stack, expr);
+    stack.push(expr);
+
+    visit(ctx, new Stack<Expr>(), expr.getLeft());
+    if (isBinaryCommonTermsElimination(ctx, expr.getLeft())) {
+      String refName = ctx.block.namedExprsMgr.addExpr(expr.getLeft());
+      expr.setLeft(new ColumnReferenceExpr(refName));
+    }
+
+    visit(ctx, new Stack<Expr>(), expr.getRight());
+    if (isBinaryCommonTermsElimination(ctx, expr.getRight())) {
+      String refName = ctx.block.namedExprsMgr.addExpr(expr.getRight());
+      expr.setRight(new ColumnReferenceExpr(refName));
+    }
+    stack.pop();
 
     ////////////////////////
     // For Left Term
@@ -249,9 +278,12 @@
       throws PlanningException {
     // if a column reference is not qualified, it finds and sets the qualified column name.
     if (!(expr.hasQualifier() && CatalogUtil.isFQTableName(expr.getQualifier()))) {
-      if (!ctx.block.namedExprsMgr.contains(expr.getCanonicalName())) {
-        String normalized = ctx.plan.getNormalizedColumnName(ctx.block, expr);
-        expr.setName(normalized);
+      if (!ctx.block.namedExprsMgr.contains(expr.getCanonicalName()) && expr.getType() == OpType.Column) {
+        try {
+          String normalized = ctx.plan.getNormalizedColumnName(ctx.block, expr);
+          expr.setName(normalized);
+        } catch (NoSuchColumnException nsc) {
+        }
       }
     }
     return expr;
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalOptimizer.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalOptimizer.java
index 974dc60..3ffeeb0 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalOptimizer.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalOptimizer.java
@@ -20,9 +20,12 @@
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.tajo.algebra.JoinType;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.engine.eval.AlgebraicUtil;
 import org.apache.tajo.engine.eval.EvalNode;
 import org.apache.tajo.engine.planner.graph.DirectedGraphCursor;
@@ -31,10 +34,8 @@
 import org.apache.tajo.engine.planner.logical.join.GreedyHeuristicJoinOrderAlgorithm;
 import org.apache.tajo.engine.planner.logical.join.JoinGraph;
 import org.apache.tajo.engine.planner.logical.join.JoinOrderAlgorithm;
-import org.apache.tajo.engine.planner.rewrite.BasicQueryRewriteEngine;
-import org.apache.tajo.engine.planner.rewrite.FilterPushDownRule;
-import org.apache.tajo.engine.planner.rewrite.PartitionedTableRewriter;
-import org.apache.tajo.engine.planner.rewrite.ProjectionPushDownRule;
+import org.apache.tajo.engine.planner.rewrite.*;
+import org.apache.tajo.master.session.Session;
 
 import java.util.LinkedHashSet;
 import java.util.Set;
@@ -48,29 +49,55 @@
  */
 @InterfaceStability.Evolving
 public class LogicalOptimizer {
+  private static final Log LOG = LogFactory.getLog(LogicalOptimizer.class.getName());
+
   private BasicQueryRewriteEngine rulesBeforeJoinOpt;
   private BasicQueryRewriteEngine rulesAfterToJoinOpt;
   private JoinOrderAlgorithm joinOrderAlgorithm = new GreedyHeuristicJoinOrderAlgorithm();
 
   public LogicalOptimizer(TajoConf systemConf) {
     rulesBeforeJoinOpt = new BasicQueryRewriteEngine();
-    rulesBeforeJoinOpt.addRewriteRule(new FilterPushDownRule());
+    if (systemConf.getBoolVar(ConfVars.PLANNER_USE_FILTER_PUSHDOWN)) {
+      rulesBeforeJoinOpt.addRewriteRule(new FilterPushDownRule());
+    }
 
     rulesAfterToJoinOpt = new BasicQueryRewriteEngine();
     rulesAfterToJoinOpt.addRewriteRule(new ProjectionPushDownRule());
     rulesAfterToJoinOpt.addRewriteRule(new PartitionedTableRewriter(systemConf));
+
+    // Currently, it is only used for some test cases to inject exception manually.
+    String userDefinedRewriterClass = systemConf.get("tajo.plan.rewriter.classes");
+    if (userDefinedRewriterClass != null && !userDefinedRewriterClass.isEmpty()) {
+      for (String eachRewriterClass : userDefinedRewriterClass.split(",")) {
+        try {
+          RewriteRule rule = (RewriteRule) Class.forName(eachRewriterClass).newInstance();
+          rulesAfterToJoinOpt.addRewriteRule(rule);
+        } catch (Exception e) {
+          LOG.error("Can't initiate a Rewriter object: " + eachRewriterClass, e);
+          continue;
+        }
+      }
+    }
   }
 
   public LogicalNode optimize(LogicalPlan plan) throws PlanningException {
+    return optimize(null, plan);
+  }
+
+  public LogicalNode optimize(Session session, LogicalPlan plan) throws PlanningException {
     rulesBeforeJoinOpt.rewrite(plan);
 
     DirectedGraphCursor<String, BlockEdge> blockCursor =
         new DirectedGraphCursor<String, BlockEdge>(plan.getQueryBlockGraph(), plan.getRootBlock().getName());
 
-    while(blockCursor.hasNext()) {
-      optimizeJoinOrder(plan, blockCursor.nextBlock());
+    if (session == null || "true".equals(session.getVariable(ConfVars.OPTIMIZER_JOIN_ENABLE.varname, "true"))) {
+      // default is true
+      while (blockCursor.hasNext()) {
+        optimizeJoinOrder(plan, blockCursor.nextBlock());
+      }
+    } else {
+      LOG.info("Skip Join Optimized.");
     }
-
     rulesAfterToJoinOpt.rewrite(plan);
     return plan.getRootBlock().getRoot();
   }
@@ -88,6 +115,8 @@
       // finding join order and restore remain filter order
       FoundJoinOrder order = joinOrderAlgorithm.findBestOrder(plan, block,
           joinGraphContext.joinGraph, joinGraphContext.relationsForProduct);
+
+      // replace join node with FoundJoinOrder.
       JoinNode newJoinNode = order.getOrderedJoin();
       JoinNode old = PlannerUtil.findTopNode(block.getRoot(), NodeType.JOIN);
 
@@ -100,8 +129,9 @@
       } else {
         newJoinNode.setTargets(targets.toArray(new Target[targets.size()]));
       }
-
       PlannerUtil.replaceNode(plan, block.getRoot(), old, newJoinNode);
+      // End of replacement logic
+
       String optimizedOrder = JoinOrderStringBuilder.buildJoinOrderString(plan, block);
       block.addPlanHistory("Non-optimized join order: " + originalOrder + " (cost: " + nonOptimizedJoinCost + ")");
       block.addPlanHistory("Optimized join order    : " + optimizedOrder + " (cost: " + order.getCost() + ")");
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlan.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlan.java
index 6be0c6a..92df760 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlan.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlan.java
@@ -26,6 +26,7 @@
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.engine.eval.EvalNode;
+import org.apache.tajo.engine.exception.AmbiguousFieldException;
 import org.apache.tajo.engine.exception.NoSuchColumnException;
 import org.apache.tajo.engine.exception.VerifyException;
 import org.apache.tajo.engine.planner.graph.DirectedGraphCursor;
@@ -217,6 +218,10 @@
     }
   }
 
+  public void disconnectBlocks(QueryBlock srcBlock, QueryBlock targetBlock) {
+    queryBlockGraph.removeEdge(srcBlock.getName(), targetBlock.getName());
+  }
+
   public void connectBlocks(QueryBlock srcBlock, QueryBlock targetBlock, BlockType type) {
     queryBlockGraph.addEdge(srcBlock.getName(), targetBlock.getName(), new BlockEdge(srcBlock, targetBlock, type));
   }
@@ -380,6 +385,21 @@
 
   private Column resolveColumnWithoutQualifier(QueryBlock block,
                                                ColumnReferenceExpr columnRef)throws PlanningException {
+
+    List<Column> candidates = TUtil.newList();
+
+    // It tries to find a full qualified column name from all relations in the current block.
+    for (RelationNode rel : block.getRelations()) {
+      Column found = rel.getTableSchema().getColumn(columnRef.getName());
+      if (found != null) {
+        candidates.add(found);
+      }
+    }
+
+    if (!candidates.isEmpty()) {
+      return ensureUniqueColumn(candidates);
+    }
+
     // Trying to find the column within the current block
     if (block.currentNode != null && block.currentNode.getInSchema() != null) {
       Column found = block.currentNode.getInSchema().getColumn(columnRef.getCanonicalName());
@@ -395,7 +415,7 @@
       }
     }
 
-    List<Column> candidates = TUtil.newList();
+
     // Trying to find columns from aliased references.
     if (block.namedExprsMgr.isAliased(columnRef.getCanonicalName())) {
       String originalName = block.namedExprsMgr.getAlias(columnRef.getCanonicalName());
@@ -408,16 +428,10 @@
       return ensureUniqueColumn(candidates);
     }
 
-    // Trying to find columns from other relations in the current block
-    for (RelationNode rel : block.getRelations()) {
-      Column found = rel.getTableSchema().getColumn(columnRef.getName());
-      if (found != null) {
-        candidates.add(found);
-      }
-    }
-
-    if (!candidates.isEmpty()) {
-      return ensureUniqueColumn(candidates);
+    // This is an exception case. It means that there are some bugs in other parts.
+    LogicalNode blockRootNode = block.getRoot();
+    if (blockRootNode != null && blockRootNode.getOutSchema().getColumn(columnRef.getCanonicalName()) != null) {
+      throw new NoSuchColumnException("ERROR: no such a column name "+ columnRef.getCanonicalName());
     }
 
     // Trying to find columns from other relations in other blocks
@@ -446,7 +460,7 @@
       return ensureUniqueColumn(candidates);
     }
 
-    throw new VerifyException("ERROR: no such a column name "+ columnRef.getCanonicalName());
+    throw new NoSuchColumnException("ERROR: no such a column name "+ columnRef.getCanonicalName());
   }
 
   private static Column ensureUniqueColumn(List<Column> candidates)
@@ -464,7 +478,7 @@
         }
         sb.append(column);
       }
-      throw new VerifyException("Ambiguous Column Name: " + sb.toString());
+      throw new AmbiguousFieldException("Ambiguous Column Name: " + sb.toString());
     } else {
       return null;
     }
@@ -479,14 +493,20 @@
     sb.append(queryBlockGraph.toStringGraph(getRootBlock().getName()));
     sb.append("-----------------------------\n");
     sb.append("Optimization Log:\n");
+    if (!planingHistory.isEmpty()) {
+      sb.append("[LogicalPlan]\n");
+      for (String eachHistory: planingHistory) {
+        sb.append("\t> ").append(eachHistory).append("\n");
+      }
+    }
     DirectedGraphCursor<String, BlockEdge> cursor =
         new DirectedGraphCursor<String, BlockEdge>(queryBlockGraph, getRootBlock().getName());
     while(cursor.hasNext()) {
       QueryBlock block = getBlock(cursor.nextBlock());
       if (block.getPlanHistory().size() > 0) {
-        sb.append("\n[").append(block.getName()).append("]\n");
+        sb.append("[").append(block.getName()).append("]\n");
         for (String log : block.getPlanHistory()) {
-          sb.append("> ").append(log).append("\n");
+          sb.append("\t> ").append(log).append("\n");
         }
       }
     }
@@ -575,6 +595,7 @@
     private final Map<String, RelationNode> canonicalNameToRelationMap = TUtil.newHashMap();
     private final Map<String, List<String>> aliasMap = TUtil.newHashMap();
     private final Map<OpType, List<Expr>> operatorToExprMap = TUtil.newHashMap();
+    private final List<RelationNode> relationList = TUtil.newList();
     /**
      * It's a map between nodetype and node. node types can be duplicated. So, latest node type is only kept.
      */
@@ -658,10 +679,11 @@
         TUtil.putToNestedList(aliasMap, relation.getTableName(), relation.getCanonicalName());
       }
       canonicalNameToRelationMap.put(relation.getCanonicalName(), relation);
+      relationList.add(relation);
     }
 
     public Collection<RelationNode> getRelations() {
-      return this.canonicalNameToRelationMap.values();
+      return Collections.unmodifiableList(relationList);
     }
 
     public boolean hasTableExpression() {
@@ -737,6 +759,12 @@
       queryBlockByPID.put(node.getPID(), this);
     }
 
+    public void unregisterNode(LogicalNode node) {
+      nodeMap.remove(node.getPID());
+      nodeTypeToNodeMap.remove(node.getType());
+      queryBlockByPID.remove(node.getPID());
+    }
+
     @SuppressWarnings("unchecked")
     public <T extends LogicalNode> T getNode(NodeType nodeType) {
       return (T) nodeTypeToNodeMap.get(nodeType);
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanPreprocessor.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanPreprocessor.java
index 56863f7..2de96c4 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanPreprocessor.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanPreprocessor.java
@@ -289,11 +289,13 @@
     LogicalPlan.QueryBlock leftBlock = ctx.plan.newQueryBlock();
     PreprocessContext leftContext = new PreprocessContext(ctx, leftBlock);
     LogicalNode leftChild = visit(leftContext, new Stack<Expr>(), expr.getLeft());
+    leftBlock.setRoot(leftChild);
     ctx.currentBlock.registerExprWithNode(expr.getLeft(), leftChild);
 
     LogicalPlan.QueryBlock rightBlock = ctx.plan.newQueryBlock();
     PreprocessContext rightContext = new PreprocessContext(ctx, rightBlock);
     LogicalNode rightChild = visit(rightContext, new Stack<Expr>(), expr.getRight());
+    rightBlock.setRoot(rightChild);
     ctx.currentBlock.registerExprWithNode(expr.getRight(), rightChild);
 
     UnionNode unionNode = new UnionNode(ctx.plan.newPID());
@@ -363,8 +365,10 @@
     PreprocessContext newContext;
     // Note: TableSubQuery always has a table name.
     // SELECT .... FROM (SELECT ...) TB_NAME <-
-    newContext = new PreprocessContext(ctx, ctx.plan.newQueryBlock());
+    QueryBlock queryBlock = ctx.plan.newQueryBlock();
+    newContext = new PreprocessContext(ctx, queryBlock);
     LogicalNode child = super.visitTableSubQuery(newContext, stack, expr);
+    queryBlock.setRoot(child);
 
     // a table subquery should be dealt as a relation.
     TableSubQueryNode node = ctx.plan.createNode(TableSubQueryNode.class);
@@ -427,6 +431,13 @@
     return alterTableNode;
   }
 
+  @Override
+  public LogicalNode visitTruncateTable(PreprocessContext ctx, Stack<Expr> stack, TruncateTable expr)
+      throws PlanningException {
+    TruncateTableNode truncateTableNode = ctx.plan.createNode(TruncateTableNode.class);
+    return truncateTableNode;
+  }
+
   ///////////////////////////////////////////////////////////////////////////////////////////////////////////
   // Insert or Update Section
   ///////////////////////////////////////////////////////////////////////////////////////////////////////////
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanVisitor.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanVisitor.java
index 6850046..963e9f1 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanVisitor.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanVisitor.java
@@ -91,4 +91,7 @@
 
   RESULT visitAlterTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, AlterTableNode node,
                          Stack<LogicalNode> stack) throws PlanningException;
+
+  RESULT visitTruncateTable(CONTEXT context, LogicalPlan plan, LogicalPlan.QueryBlock block, TruncateTableNode node,
+                         Stack<LogicalNode> stack) throws PlanningException;
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanner.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanner.java
index d5d2d47..be7bce6 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanner.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/LogicalPlanner.java
@@ -584,7 +584,7 @@
     stack.pop();
     ////////////////////////////////////////////////////////
 
-    HavingNode having = new HavingNode(context.plan.newPID());
+    HavingNode having = context.queryBlock.getNodeFromExpr(expr);
     having.setChild(child);
     having.setInSchema(child.getOutSchema());
     having.setOutSchema(child.getOutSchema());
@@ -767,7 +767,7 @@
     QueryBlock block = context.queryBlock;
 
     if (join.hasQual()) {
-      ExprNormalizedResult normalizedResult = normalizer.normalize(context, join.getQual());
+      ExprNormalizedResult normalizedResult = normalizer.normalize(context, join.getQual(), true);
       block.namedExprsMgr.addExpr(normalizedResult.baseExpr);
       if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) {
         throw new VerifyException("Filter condition cannot include aggregation function");
@@ -835,8 +835,8 @@
           block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode);
           newlyEvaluatedExprs.add(namedExpr.getAlias());
         }
-      } catch (VerifyException ve) {} catch (PlanningException e) {
-        e.printStackTrace();
+      } catch (VerifyException ve) {
+      } catch (PlanningException e) {
       }
     }
     return newlyEvaluatedExprs;
@@ -1207,18 +1207,15 @@
 
       // See PreLogicalPlanVerifier.visitInsert.
       // It guarantees that the equivalence between the numbers of target and projected columns.
-      ScanNode scanNode = context.plan.createNode(ScanNode.class);
-      scanNode.init(desc);
-      context.queryBlock.addRelation(scanNode);
       String [] targets = expr.getTargetColumns();
       Schema targetColumns = new Schema();
       for (int i = 0; i < targets.length; i++) {
-        Column targetColumn = context.plan.resolveColumn(context.queryBlock, new ColumnReferenceExpr(targets[i]));
+        Column targetColumn = desc.getLogicalSchema().getColumn(targets[i]);
         targetColumns.addColumn(targetColumn);
       }
       insertNode.setTargetSchema(targetColumns);
       insertNode.setOutSchema(targetColumns);
-      buildProjectedInsert(insertNode);
+      buildProjectedInsert(context, insertNode);
 
     } else { // when a user do not specified target columns
 
@@ -1231,7 +1228,7 @@
         targetColumns.addColumn(tableSchema.getColumn(i));
       }
       insertNode.setTargetSchema(targetColumns);
-      buildProjectedInsert(insertNode);
+      buildProjectedInsert(context, insertNode);
     }
 
     if (desc.hasPartition()) {
@@ -1240,11 +1237,16 @@
     return insertNode;
   }
 
-  private void buildProjectedInsert(InsertNode insertNode) {
+  private void buildProjectedInsert(PlanContext context, InsertNode insertNode) {
     Schema tableSchema = insertNode.getTableSchema();
     Schema targetColumns = insertNode.getTargetSchema();
 
     LogicalNode child = insertNode.getChild();
+
+    if (child.getType() == NodeType.UNION) {
+      child = makeProjectionForInsertUnion(context, insertNode);
+    }
+
     if (child instanceof Projectable) {
       Projectable projectionNode = (Projectable) insertNode.getChild();
 
@@ -1270,6 +1272,45 @@
     }
   }
 
+  private ProjectionNode makeProjectionForInsertUnion(PlanContext context, InsertNode insertNode) {
+    LogicalNode child = insertNode.getChild();
+    // add (projection - subquery) to RootBlock and create new QueryBlock for UnionNode
+    TableSubQueryNode subQueryNode = context.plan.createNode(TableSubQueryNode.class);
+    subQueryNode.init(context.queryBlock.getName(), child);
+    subQueryNode.setTargets(PlannerUtil.schemaToTargets(subQueryNode.getOutSchema()));
+
+    ProjectionNode projectionNode = context.plan.createNode(ProjectionNode.class);
+    projectionNode.setChild(subQueryNode);
+    projectionNode.setInSchema(subQueryNode.getInSchema());
+    projectionNode.setTargets(subQueryNode.getTargets());
+
+    context.queryBlock.registerNode(projectionNode);
+    context.queryBlock.registerNode(subQueryNode);
+
+    // add child QueryBlock to the UnionNode's QueryBlock
+    UnionNode unionNode = (UnionNode)child;
+    context.queryBlock.unregisterNode(unionNode);
+
+    QueryBlock unionBlock = context.plan.newQueryBlock();
+    unionBlock.registerNode(unionNode);
+    unionBlock.setRoot(unionNode);
+
+    QueryBlock leftBlock = context.plan.getBlock(unionNode.getLeftChild());
+    QueryBlock rightBlock = context.plan.getBlock(unionNode.getRightChild());
+
+    context.plan.disconnectBlocks(leftBlock, context.queryBlock);
+    context.plan.disconnectBlocks(rightBlock, context.queryBlock);
+
+    context.plan.connectBlocks(unionBlock, context.queryBlock, BlockType.TableSubQuery);
+    context.plan.connectBlocks(leftBlock, unionBlock, BlockType.TableSubQuery);
+    context.plan.connectBlocks(rightBlock, unionBlock, BlockType.TableSubQuery);
+
+    // set InsertNode's child with ProjectionNode which is created.
+    insertNode.setChild(projectionNode);
+
+    return projectionNode;
+  }
+
   /**
    * Build a InsertNode with a location.
    *
@@ -1278,7 +1319,13 @@
   private InsertNode buildInsertIntoLocationPlan(PlanContext context, InsertNode insertNode, Insert expr) {
     // INSERT (OVERWRITE)? INTO LOCATION path (USING file_type (param_clause)?)? query_expression
 
-    Schema childSchema = insertNode.getChild().getOutSchema();
+    LogicalNode child = insertNode.getChild();
+
+    if (child.getType() == NodeType.UNION) {
+      child = makeProjectionForInsertUnion(context, insertNode);
+    }
+
+    Schema childSchema = child.getOutSchema();
     insertNode.setInSchema(childSchema);
     insertNode.setOutSchema(childSchema);
     insertNode.setTableSchema(childSchema);
@@ -1310,11 +1357,38 @@
   @Override
   public LogicalNode visitDropDatabase(PlanContext context, Stack<Expr> stack, DropDatabase expr)
       throws PlanningException {
-    DropDatabaseNode dropDatabaseNode = context.plan.createNode(DropDatabaseNode.class);
+    DropDatabaseNode dropDatabaseNode = context.queryBlock.getNodeFromExpr(expr);
     dropDatabaseNode.init(expr.getDatabaseName(), expr.isIfExists());
     return dropDatabaseNode;
   }
 
+  public LogicalNode handleCreateTableLike(PlanContext context, CreateTable expr, CreateTableNode createTableNode)
+    throws PlanningException {
+    String parentTableName = expr.getLikeParentTableName();
+
+    if (CatalogUtil.isFQTableName(parentTableName) == false) {
+      parentTableName =
+	CatalogUtil.buildFQName(context.session.getCurrentDatabase(),
+				parentTableName);
+    }
+    TableDesc parentTableDesc = catalog.getTableDesc(parentTableName);
+    if(parentTableDesc == null)
+      throw new PlanningException("Table '"+parentTableName+"' does not exist");
+    PartitionMethodDesc partitionDesc = parentTableDesc.getPartitionMethod();
+    createTableNode.setTableSchema(parentTableDesc.getSchema());
+    createTableNode.setPartitionMethod(partitionDesc);
+
+    createTableNode.setStorageType(parentTableDesc.getMeta().getStoreType());
+    createTableNode.setOptions(parentTableDesc.getMeta().getOptions());
+
+    createTableNode.setExternal(parentTableDesc.isExternal());
+    if(parentTableDesc.isExternal()) {
+      createTableNode.setPath(parentTableDesc.getPath());
+    }
+    return createTableNode;
+  }
+
+
   @Override
   public LogicalNode visitCreateTable(PlanContext context, Stack<Expr> stack, CreateTable expr)
       throws PlanningException {
@@ -1329,7 +1403,9 @@
       createTableNode.setTableName(
           CatalogUtil.buildFQName(context.session.getCurrentDatabase(), expr.getTableName()));
     }
-
+    // This is CREATE TABLE <tablename> LIKE <parentTable>
+    if(expr.getLikeParentTableName() != null)
+      return handleCreateTableLike(context, expr, createTableNode);
 
     if (expr.hasStorageType()) { // If storage type (using clause) is specified
       createTableNode.setStorageType(CatalogUtil.getStoreType(expr.getStorageType()));
@@ -1512,6 +1588,14 @@
     return alterTableNode;
   }
 
+  @Override
+  public LogicalNode visitTruncateTable(PlanContext context, Stack<Expr> stack, TruncateTable truncateTable)
+      throws PlanningException {
+    TruncateTableNode truncateTableNode = context.queryBlock.getNodeFromExpr(truncateTable);
+    truncateTableNode.setTableNames(truncateTable.getTableNames());
+    return truncateTableNode;
+  }
+
   /*===============================================================================================
     Util SECTION
   ===============================================================================================*/
@@ -1542,22 +1626,25 @@
     // at the topmost join operator.
     // TODO - It's also valid that case-when is evalauted at the topmost outer operator.
     //        But, how can we know there is no further outer join operator after this node?
-    if (!checkIfCaseWhenWithOuterJoinBeEvaluated(block, evalNode, isTopMostJoin)) {
-      return false;
+    if (containsOuterJoin(block)) {
+      if (!isTopMostJoin) {
+        Collection<EvalNode> found = EvalTreeUtil.findOuterJoinSensitiveEvals(evalNode);
+        if (found.size() > 0) {
+          return false;
+        }
+      }
     }
 
     return true;
   }
 
-  private static boolean checkIfCaseWhenWithOuterJoinBeEvaluated(QueryBlock block, EvalNode evalNode,
-                                                                 boolean isTopMostJoin) {
-    if (block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER)) {
-      Collection<CaseWhenEval> caseWhenEvals = EvalTreeUtil.findEvalsByType(evalNode, EvalType.CASE);
-      if (caseWhenEvals.size() > 0 && !isTopMostJoin) {
-        return false;
-      }
-    }
-    return true;
+  public static boolean isOuterJoin(JoinType joinType) {
+    return joinType == JoinType.LEFT_OUTER || joinType == JoinType.RIGHT_OUTER || joinType==JoinType.FULL_OUTER;
+  }
+
+  public static boolean containsOuterJoin(QueryBlock block) {
+    return block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER) ||
+        block.containsJoinType(JoinType.FULL_OUTER);
   }
 
   /**
@@ -1576,8 +1663,8 @@
     }
 
     // Why? - When a {case when} is used with outer join, case when must be evaluated at topmost outer join.
-    if (block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER)) {
-      Collection<CaseWhenEval> found = EvalTreeUtil.findEvalsByType(evalNode, EvalType.CASE);
+    if (containsOuterJoin(block)) {
+      Collection<EvalNode> found = EvalTreeUtil.findOuterJoinSensitiveEvals(evalNode);
       if (found.size() > 0) {
         return false;
       }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PhysicalPlannerImpl.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PhysicalPlannerImpl.java
index e508d2c..f41d61d 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PhysicalPlannerImpl.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PhysicalPlannerImpl.java
@@ -844,16 +844,13 @@
 
   public PhysicalExec createScanPlan(TaskAttemptContext ctx, ScanNode scanNode, Stack<LogicalNode> node)
       throws IOException {
-    if (ctx.getTable(scanNode.getCanonicalName()) == null) {
-      return new SeqScanExec(ctx, sm, scanNode, null);
-    }
-    Preconditions.checkNotNull(ctx.getTable(scanNode.getCanonicalName()),
-        "Error: There is no table matched to %s", scanNode.getCanonicalName() + "(" + scanNode.getTableName() + ")");    
-
     // check if an input is sorted in the same order to the subsequence sort operator.
     // TODO - it works only if input files are raw files. We should check the file format.
     // Since the default intermediate file format is raw file, it is not problem right now.
     if (checkIfSortEquivalance(ctx, scanNode, node)) {
+      if (ctx.getTable(scanNode.getCanonicalName()) == null) {
+        return new SeqScanExec(ctx, sm, scanNode, null);
+      }
       FragmentProto [] fragments = ctx.getTables(scanNode.getCanonicalName());
       return new ExternalSortExec(ctx, sm, (SortNode) node.peek(), fragments);
     } else {
@@ -886,6 +883,9 @@
         }
       }
 
+      if (ctx.getTable(scanNode.getCanonicalName()) == null) {
+        return new SeqScanExec(ctx, sm, scanNode, null);
+      }
       FragmentProto [] fragments = ctx.getTables(scanNode.getCanonicalName());
       return new SeqScanExec(ctx, sm, scanNode, fragments);
     }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PlannerUtil.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PlannerUtil.java
index a1ff0f0..1d8fd0f 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PlannerUtil.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PlannerUtil.java
@@ -53,7 +53,8 @@
             (type == NodeType.CREATE_TABLE && !((CreateTableNode) baseNode).hasSubQuery()) ||
             baseNode.getType() == NodeType.DROP_TABLE ||
             baseNode.getType() == NodeType.ALTER_TABLESPACE ||
-            baseNode.getType() == NodeType.ALTER_TABLE;
+            baseNode.getType() == NodeType.ALTER_TABLE ||
+            baseNode.getType() == NodeType.TRUNCATE_TABLE;
   }
 
   /**
@@ -598,7 +599,7 @@
           for (int j = 0; j < schemas.length; j++) {
             // check whether the column is for either outer or inner
             // 0 is outer, and 1 is inner
-            if (schemas[j].containsByQualifiedName(column.getQualifiedName())) {
+            if (schemas[j].contains(column.getQualifiedName())) {
               pair[j] = column;
             }
           }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PreLogicalPlanVerifier.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PreLogicalPlanVerifier.java
index 5eca5fd..f744cf6 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/PreLogicalPlanVerifier.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/PreLogicalPlanVerifier.java
@@ -18,9 +18,11 @@
 
 package org.apache.tajo.engine.planner;
 
+import org.apache.tajo.TajoConstants;
 import org.apache.tajo.algebra.*;
 import org.apache.tajo.catalog.CatalogService;
 import org.apache.tajo.catalog.CatalogUtil;
+import org.apache.tajo.catalog.TableDesc;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.util.TUtil;
@@ -223,7 +225,6 @@
 
   public Expr visitInsert(Context context, Stack<Expr> stack, Insert expr) throws PlanningException {
     Expr child = super.visitInsert(context, stack, expr);
-
     if (!expr.isOverwrite()) {
       context.state.addVerification("INSERT INTO statement is not supported yet.");
     }
@@ -233,9 +234,10 @@
     }
 
     if (child != null && child.getType() == OpType.Projection) {
+      Projection projection = (Projection) child;
+      int projectColumnNum = projection.getNamedExprs().length;
+
       if (expr.hasTargetColumns()) {
-        Projection projection = (Projection) child;
-        int projectColumnNum = projection.getNamedExprs().length;
         int targetColumnNum = expr.getTargetColumns().length;
 
         if (targetColumnNum > projectColumnNum)  {
@@ -243,6 +245,25 @@
         } else if (targetColumnNum < projectColumnNum) {
           context.state.addVerification("INSERT has more expressions than target columns");
         }
+      } else {
+        if (expr.hasTableName()) {
+          String qualifiedName = expr.getTableName();
+          if (TajoConstants.EMPTY_STRING.equals(CatalogUtil.extractQualifier(expr.getTableName()))) {
+            qualifiedName = CatalogUtil.buildFQName(context.session.getCurrentDatabase(),
+                expr.getTableName());
+          }
+
+          TableDesc table = catalog.getTableDesc(qualifiedName);
+          if (table.hasPartition()) {
+            int columnSize = table.getSchema().getColumns().size();
+            columnSize += table.getPartitionMethod().getExpressionSchema().getColumns().size();
+            if (projectColumnNum < columnSize) {
+              context.state.addVerification("INSERT has smaller expressions than target columns");
+            } else if (projectColumnNum > columnSize) {
+              context.state.addVerification("INSERT has more expressions than target columns");
+            }
+          }
+        }
       }
     }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/UniformRangePartition.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/UniformRangePartition.java
index f6922ed..88cb061 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/UniformRangePartition.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/UniformRangePartition.java
@@ -422,10 +422,10 @@
           break;
         case TIMESTAMP:
           if (overflowFlag[i]) {
-            end.put(i, DatumFactory.createTimeStampFromMillis(
+            end.put(i, DatumFactory.createTimestmpDatumWithJavaMillis(
                 range.getStart().get(i).asInt8() + incs[i].longValue()));
           } else {
-            end.put(i, DatumFactory.createTimeStampFromMillis(last.get(i).asInt8() + incs[i].longValue()));
+            end.put(i, DatumFactory.createTimestmpDatumWithJavaMillis(last.get(i).asInt8() + incs[i].longValue()));
           }
           break;
         case INET4:
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/enforce/Enforcer.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/enforce/Enforcer.java
index 742736c..031569e 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/enforce/Enforcer.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/enforce/Enforcer.java
@@ -169,6 +169,24 @@
     TUtil.putToNestedList(properties, builder.getType(), builder.build());
   }
 
+  public void removeBroadcast(String tableName) {
+    List<EnforceProperty> enforces = properties.get(EnforceType.BROADCAST);
+    if (enforces == null) {
+      return;
+    }
+
+    EnforceProperty found = null;
+    for (EnforceProperty eachProperty: enforces) {
+      BroadcastEnforce enforce = eachProperty.getBroadcast();
+      if (enforce != null && tableName.equals(enforce.getTableName())) {
+        found = eachProperty;
+      }
+    }
+    if (found != null) {
+      enforces.remove(found);
+    }
+  }
+
   public void enforceColumnPartitionAlgorithm(int pid, ColumnPartitionAlgorithm algorithm) {
     EnforceProperty.Builder builder = newProperty();
     ColumnPartitionEnforcer.Builder enforce = ColumnPartitionEnforcer.newBuilder();
@@ -293,6 +311,8 @@
       }
       break;
     case SORTED_INPUT:
+      SortedInputEnforce sortedInput = property.getSortedInput();
+      sb.append("sorted input=" + sortedInput.getTableName());
     }
 
     return sb.toString();
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/ExecutionBlock.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/ExecutionBlock.java
index 7df6b43..1d14996 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/ExecutionBlock.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/ExecutionBlock.java
@@ -34,6 +34,9 @@
   private List<ScanNode> scanlist = new ArrayList<ScanNode>();
   private Enforcer enforcer = new Enforcer();
 
+  // Actual ScanNode's ExecutionBlockId -> Delegated ScanNode's ExecutionBlockId.
+  private Map<ExecutionBlockId, ExecutionBlockId> unionScanMap = new HashMap<ExecutionBlockId, ExecutionBlockId>();
+
   private boolean hasJoinPlan;
   private boolean hasUnionPlan;
 
@@ -83,6 +86,13 @@
     }
   }
 
+  public void addUnionScan(ExecutionBlockId realScanEbId, ExecutionBlockId delegatedScanEbId) {
+    unionScanMap.put(realScanEbId, delegatedScanEbId);
+  }
+
+  public Map<ExecutionBlockId, ExecutionBlockId> getUnionScanMap() {
+    return unionScanMap;
+  }
 
   public LogicalNode getPlan() {
     return plan;
@@ -113,6 +123,11 @@
     enforcer.addBroadcast(tableName);
   }
 
+  public void removeBroadcastTable(String tableName) {
+    broadcasted.remove(tableName);
+    enforcer.removeBroadcast(tableName);
+  }
+
   public boolean isBroadcastTable(String tableName) {
     return broadcasted.contains(tableName);
   }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
index 16def83..edd5674 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/GlobalPlanner.java
@@ -25,16 +25,14 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
+import org.apache.tajo.ExecutionBlockId;
 import org.apache.tajo.algebra.JoinType;
 import org.apache.tajo.catalog.*;
 import org.apache.tajo.catalog.partition.PartitionMethodDesc;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.common.TajoDataTypes;
 import org.apache.tajo.conf.TajoConf;
-import org.apache.tajo.engine.eval.AggregationFunctionCallEval;
-import org.apache.tajo.engine.eval.EvalNode;
-import org.apache.tajo.engine.eval.EvalTreeUtil;
-import org.apache.tajo.engine.eval.FieldEval;
+import org.apache.tajo.engine.eval.*;
 import org.apache.tajo.engine.function.AggFunction;
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.global.builder.DistinctGroupbyBuilder;
@@ -201,7 +199,6 @@
     if (node instanceof RelationNode) {
       switch (node.getType()) {
       case SCAN:
-      case PARTITIONS_SCAN:
         ScanNode scanNode = (ScanNode) node;
         if (scanNode.getTableDesc().getStats() == null) {
           // TODO - this case means that data is not located in HDFS. So, we need additional
@@ -210,6 +207,20 @@
         } else {
           return scanNode.getTableDesc().getStats().getNumBytes();
         }
+      case PARTITIONS_SCAN:
+        PartitionedTableScanNode pScanNode = (PartitionedTableScanNode) node;
+        if (pScanNode.getTableDesc().getStats() == null) {
+          // TODO - this case means that data is not located in HDFS. So, we need additional
+          // broadcast method.
+          return Long.MAX_VALUE;
+        } else {
+          // if there is no selected partition
+          if (pScanNode.getInputPaths() == null || pScanNode.getInputPaths().length == 0) {
+            return 0;
+          } else {
+            return pScanNode.getTableDesc().getStats().getNumBytes();
+          }
+        }
       case TABLE_SUBQUERY:
         return computeDescendentVolume(((TableSubQueryNode) node).getSubQuery());
       default:
@@ -229,11 +240,28 @@
     return node.getType() == NodeType.SCAN || node.getType() == NodeType.PARTITIONS_SCAN;
   }
 
+  /**
+   * Get a volume of a table of a partitioned table
+   * @param scanNode ScanNode corresponding to a table
+   * @return table volume (bytes)
+   */
+  private static long getTableVolume(ScanNode scanNode) {
+    long scanBytes = scanNode.getTableDesc().getStats().getNumBytes();
+    if (scanNode.getType() == NodeType.PARTITIONS_SCAN) {
+      PartitionedTableScanNode pScanNode = (PartitionedTableScanNode)scanNode;
+      if (pScanNode.getInputPaths() == null || pScanNode.getInputPaths().length == 0) {
+        scanBytes = 0L;
+      }
+    }
+
+    return scanBytes;
+  }
+
   private ExecutionBlock buildJoinPlan(GlobalPlanContext context, JoinNode joinNode,
                                         ExecutionBlock leftBlock, ExecutionBlock rightBlock)
       throws PlanningException {
     MasterPlan masterPlan = context.plan;
-    ExecutionBlock currentBlock = null;
+    ExecutionBlock currentBlock;
 
     boolean autoBroadcast = conf.getBoolVar(TajoConf.ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO);
 
@@ -244,8 +272,7 @@
       int numLargeTables = 0;
       for(LogicalNode eachNode: joinNode.getBroadcastTargets()) {
         ScanNode scanNode = (ScanNode)eachNode;
-        TableDesc tableDesc = scanNode.getTableDesc();
-        if (tableDesc.getStats().getNumBytes() < broadcastThreshold) {
+        if (getTableVolume(scanNode) < broadcastThreshold) {
           broadtargetTables.add(scanNode);
           LOG.info("The table " + scanNode.getCanonicalName() + " ("
               + scanNode.getTableDesc().getStats().getNumBytes() + ") is marked a broadcasted table");
@@ -285,10 +312,10 @@
       TableDesc rightDesc = rightScan.getTableDesc();
       long broadcastThreshold = conf.getLongVar(TajoConf.ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD);
 
-      if (leftDesc.getStats().getNumBytes() < broadcastThreshold) {
+      if (getTableVolume(leftScan) < broadcastThreshold) {
         leftBroadcasted = true;
       }
-      if (rightDesc.getStats().getNumBytes() < broadcastThreshold) {
+      if (getTableVolume(rightScan) < broadcastThreshold) {
         rightBroadcasted = true;
       }
 
@@ -313,22 +340,143 @@
     }
 
     // symmetric repartition join
-    currentBlock = masterPlan.newExecutionBlock();
+    boolean leftUnion = leftNode.getType() == NodeType.TABLE_SUBQUERY &&
+        ((TableSubQueryNode)leftNode).getSubQuery().getType() == NodeType.UNION;
+    boolean rightUnion = rightNode.getType() == NodeType.TABLE_SUBQUERY &&
+        ((TableSubQueryNode)rightNode).getSubQuery().getType() == NodeType.UNION;
 
-    DataChannel leftChannel = createDataChannelFromJoin(leftBlock, rightBlock, currentBlock, joinNode, true);
-    DataChannel rightChannel = createDataChannelFromJoin(leftBlock, rightBlock, currentBlock, joinNode, false);
+    if (leftUnion || rightUnion) { // if one of child execution block is union
+      /*
+       Join with tableC and result of union tableA, tableB is expected the following physical plan.
+       But Union execution block is not necessary.
+       |-eb_0001_000006 (Terminal)
+          |-eb_0001_000005 (Join eb_0001_000003, eb_0001_000004)
+             |-eb_0001_000004 (Scan TableC)
+             |-eb_0001_000003 (Union TableA, TableB)
+               |-eb_0001_000002 (Scan TableB)
+               |-eb_0001_000001 (Scan TableA)
 
-    ScanNode leftScan = buildInputExecutor(masterPlan.getLogicalPlan(), leftChannel);
-    ScanNode rightScan = buildInputExecutor(masterPlan.getLogicalPlan(), rightChannel);
+       The above plan can be changed to the following plan.
+       |-eb_0001_000005 (Terminal)
+          |-eb_0001_000003    (Join [eb_0001_000001, eb_0001_000002], eb_0001_000004)
+             |-eb_0001_000004 (Scan TableC)
+             |-eb_0001_000002 (Scan TableB)
+             |-eb_0001_000001 (Scan TableA)
 
-    joinNode.setLeftChild(leftScan);
-    joinNode.setRightChild(rightScan);
-    currentBlock.setPlan(joinNode);
+       eb_0001_000003's left child should be eb_0001_000001 + eb_0001_000001 and right child should be eb_0001_000004.
+       For this eb_0001_000001 is representative of eb_0001_000001, eb_0001_000002.
+       So eb_0001_000003's left child is eb_0001_000001
+       */
+      Column[][] joinColumns = null;
+      if (joinNode.getJoinType() != JoinType.CROSS) {
+        // ShuffleKeys need to not have thea-join condition because Tajo supports only equi-join.
+        joinColumns = PlannerUtil.joinJoinKeyForEachTable(joinNode.getJoinQual(),
+            leftNode.getOutSchema(), rightNode.getOutSchema(), false);
+      }
 
-    masterPlan.addConnect(leftChannel);
-    masterPlan.addConnect(rightChannel);
+      if (leftUnion && !rightUnion) { // if only left is union
+        currentBlock = leftBlock;
+        context.execBlockMap.remove(leftNode.getPID());
+        Column[] shuffleKeys = (joinColumns != null) ? joinColumns[0] : null;
+        Column[] otherSideShuffleKeys = (joinColumns != null) ? joinColumns[1] : null;
+        buildJoinPlanWithUnionChannel(context, joinNode, currentBlock, leftBlock, rightBlock, leftNode,
+            shuffleKeys, otherSideShuffleKeys, true);
+        currentBlock.setPlan(joinNode);
+      } else if (!leftUnion && rightUnion) { // if only right is union
+        currentBlock = rightBlock;
+        context.execBlockMap.remove(rightNode.getPID());
+        Column[] shuffleKeys = (joinColumns != null) ? joinColumns[1] : null;
+        Column[] otherSideShuffleKeys = (joinColumns != null) ? joinColumns[0] : null;
+        buildJoinPlanWithUnionChannel(context, joinNode, currentBlock, rightBlock, leftBlock, rightNode,
+            shuffleKeys, otherSideShuffleKeys, false);
+        currentBlock.setPlan(joinNode);
+      } else { // if both are unions
+        currentBlock = leftBlock;
+        context.execBlockMap.remove(leftNode.getPID());
+        context.execBlockMap.remove(rightNode.getPID());
+        buildJoinPlanWithUnionChannel(context, joinNode, currentBlock, leftBlock, null, leftNode,
+            (joinColumns != null ? joinColumns[0] : null), null, true);
+        buildJoinPlanWithUnionChannel(context, joinNode, currentBlock, rightBlock, null, rightNode,
+            (joinColumns != null ? joinColumns[1] : null), null, false);
+        currentBlock.setPlan(joinNode);
+      }
 
-    return currentBlock;
+      return currentBlock;
+    } else {
+      // !leftUnion && !rightUnion
+      currentBlock = masterPlan.newExecutionBlock();
+      DataChannel leftChannel = createDataChannelFromJoin(leftBlock, rightBlock, currentBlock, joinNode, true);
+      DataChannel rightChannel = createDataChannelFromJoin(leftBlock, rightBlock, currentBlock, joinNode, false);
+
+      ScanNode leftScan = buildInputExecutor(masterPlan.getLogicalPlan(), leftChannel);
+      ScanNode rightScan = buildInputExecutor(masterPlan.getLogicalPlan(), rightChannel);
+
+      joinNode.setLeftChild(leftScan);
+      joinNode.setRightChild(rightScan);
+      currentBlock.setPlan(joinNode);
+
+      masterPlan.addConnect(leftChannel);
+      masterPlan.addConnect(rightChannel);
+
+      return currentBlock;
+    }
+  }
+
+  private void buildJoinPlanWithUnionChannel(GlobalPlanContext context, JoinNode joinNode,
+                                             ExecutionBlock targetBlock,
+                                             ExecutionBlock sourceBlock,
+                                             ExecutionBlock otherSideBlock,
+                                             LogicalNode childNode,
+                                             Column[] shuffleKeys,
+                                             Column[] otherSideShuffleKeys,
+                                             boolean left) {
+    MasterPlan masterPlan = context.getPlan();
+    String subQueryRelationName = ((TableSubQueryNode)childNode).getCanonicalName();
+    ExecutionBlockId dedicatedScanNodeBlock = null;
+    for (DataChannel channel : masterPlan.getIncomingChannels(sourceBlock.getId())) {
+      // If all union and right, add channel to left
+      if (otherSideBlock == null && !left) {
+        DataChannel oldChannel = channel;
+        masterPlan.disconnect(oldChannel.getSrcId(), oldChannel.getTargetId());
+        channel = new DataChannel(oldChannel.getSrcId(), targetBlock.getId());
+      }
+      channel.setSchema(childNode.getOutSchema());
+      channel.setShuffleType(HASH_SHUFFLE);
+      channel.setShuffleOutputNum(32);
+      if (shuffleKeys != null) {
+        channel.setShuffleKeys(shuffleKeys);
+      }
+
+      ScanNode scanNode = buildInputExecutor(masterPlan.getLogicalPlan(), channel);
+      scanNode.getOutSchema().setQualifier(subQueryRelationName);
+      if (dedicatedScanNodeBlock == null) {
+        dedicatedScanNodeBlock = channel.getSrcId();
+        if (left) {
+          joinNode.setLeftChild(scanNode);
+        } else {
+          joinNode.setRightChild(scanNode);
+        }
+      }
+      masterPlan.addConnect(channel);
+      targetBlock.addUnionScan(channel.getSrcId(), dedicatedScanNodeBlock);
+    }
+
+    // create other side channel
+    if (otherSideBlock != null) {
+      DataChannel otherSideChannel = new DataChannel(otherSideBlock, targetBlock, HASH_SHUFFLE, 32);
+      otherSideChannel.setStoreType(storeType);
+      if (otherSideShuffleKeys != null) {
+        otherSideChannel.setShuffleKeys(otherSideShuffleKeys);
+      }
+      masterPlan.addConnect(otherSideChannel);
+
+      ScanNode scan = buildInputExecutor(masterPlan.getLogicalPlan(), otherSideChannel);
+      if (left) {
+        joinNode.setRightChild(scan);
+      } else {
+        joinNode.setLeftChild(scan);
+      }
+    }
   }
 
   private AggregationFunctionCallEval createSumFunction(EvalNode [] args) throws InternalException {
@@ -1195,10 +1343,69 @@
       ExecutionBlock currentBlock = context.execBlockMap.remove(child.getPID());
 
       if (child.getType() == NodeType.UNION) {
+        List<TableSubQueryNode> addedTableSubQueries = new ArrayList<TableSubQueryNode>();
+        TableSubQueryNode leftMostSubQueryNode = null;
         for (ExecutionBlock childBlock : context.plan.getChilds(currentBlock.getId())) {
           TableSubQueryNode copy = PlannerUtil.clone(plan, node);
           copy.setSubQuery(childBlock.getPlan());
           childBlock.setPlan(copy);
+          addedTableSubQueries.add(copy);
+
+          //Find a SubQueryNode which contains all columns in InputSchema matched with Target and OutputSchema's column
+          if (copy.getInSchema().containsAll(copy.getOutSchema().getColumns())) {
+            for (Target eachTarget : copy.getTargets()) {
+              Set<Column> columns = EvalTreeUtil.findUniqueColumns(eachTarget.getEvalTree());
+              if (copy.getInSchema().containsAll(columns)) {
+                leftMostSubQueryNode = copy;
+                break;
+              }
+            }
+          }
+        }
+        if (leftMostSubQueryNode != null) {
+          // replace target column name
+          Target[] targets = leftMostSubQueryNode.getTargets();
+          int[] targetMappings = new int[targets.length];
+          for (int i = 0; i < targets.length; i++) {
+            if (targets[i].getEvalTree().getType() != EvalType.FIELD) {
+              throw new PlanningException("Target of a UnionNode's subquery should be FieldEval.");
+            }
+            int index = leftMostSubQueryNode.getInSchema().getColumnId(targets[i].getNamedColumn().getQualifiedName());
+            if (index < 0) {
+              // If a target has alias, getNamedColumn() only returns alias
+              Set<Column> columns = EvalTreeUtil.findUniqueColumns(targets[i].getEvalTree());
+              Column column = columns.iterator().next();
+              index = leftMostSubQueryNode.getInSchema().getColumnId(column.getQualifiedName());
+            }
+            if (index < 0) {
+              throw new PlanningException("Can't find matched Target in UnionNode's input schema: " + targets[i]
+                  + "->" + leftMostSubQueryNode.getInSchema());
+            }
+            targetMappings[i] = index;
+          }
+
+          for (TableSubQueryNode eachNode: addedTableSubQueries) {
+            if (eachNode.getPID() == leftMostSubQueryNode.getPID()) {
+              continue;
+            }
+            Target[] eachNodeTargets = eachNode.getTargets();
+            if (eachNodeTargets.length != targetMappings.length) {
+              throw new PlanningException("Union query can't have different number of target columns.");
+            }
+            for (int i = 0; i < eachNodeTargets.length; i++) {
+              Column inColumn = eachNode.getInSchema().getColumn(targetMappings[i]);
+              eachNodeTargets[i].setAlias(eachNodeTargets[i].getNamedColumn().getQualifiedName());
+              EvalNode evalNode = eachNodeTargets[i].getEvalTree();
+              if (evalNode.getType() != EvalType.FIELD) {
+                throw new PlanningException("Target of a UnionNode's subquery should be FieldEval.");
+              }
+              FieldEval fieldEval = (FieldEval) evalNode;
+              EvalTreeUtil.changeColumnRef(fieldEval,
+                  fieldEval.getColumnRef().getQualifiedName(), inColumn.getQualifiedName());
+            }
+          }
+        } else {
+          LOG.warn("Can't find left most SubQuery in the UnionNode.");
         }
       } else {
         currentBlock.setPlan(node);
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/MasterPlan.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/MasterPlan.java
index 37b0db1..a8593e5 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/MasterPlan.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/MasterPlan.java
@@ -238,7 +238,11 @@
       if (!isLeaf(block)) {
         sb.append("\n[Incoming]\n");
         for (DataChannel channel : getIncomingChannels(block.getId())) {
-          sb.append(channel).append("\n");
+          sb.append(channel);
+          if (block.getUnionScanMap().containsKey(channel.getSrcId())) {
+            sb.append(", union delegated scan: ").append(block.getUnionScanMap().get(channel.getSrcId()));
+          }
+          sb.append("\n");
         }
       }
 
@@ -250,6 +254,7 @@
         }
       }
 
+
       if (block.getEnforcer().getProperties().size() > 0) {
         sb.append("\n[Enforcers]\n");
         int i = 0;
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/builder/DistinctGroupbyBuilder.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/builder/DistinctGroupbyBuilder.java
index 1ccd9dc..8727b84 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/builder/DistinctGroupbyBuilder.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/global/builder/DistinctGroupbyBuilder.java
@@ -263,7 +263,11 @@
     int[] secondStageColumnIds = new int[secondStageDistinctNode.getOutSchema().size()];
     int columnIdIndex = 0;
     for (Column column: secondStageDistinctNode.getGroupingColumns()) {
-      secondStageColumnIds[originOutputSchema.getColumnId(column.getQualifiedName())] = columnIdIndex;
+      if (column.hasQualifier()) {
+        secondStageColumnIds[originOutputSchema.getColumnId(column.getQualifiedName())] = columnIdIndex;
+      } else {
+        secondStageColumnIds[originOutputSchema.getColumnIdByName(column.getSimpleName())] = columnIdIndex;
+      }
       columnIdIndex++;
     }
 
@@ -312,8 +316,12 @@
           int targetIdx = originGroupColumns.size() + uniqueDistinctColumn.size() + aggFuncIdx;
           Target aggFuncTarget = oldTargets[targetIdx];
           secondGroupbyTargets.add(aggFuncTarget);
-          int outputColumnId = originOutputSchema.getColumnId(aggFuncTarget.getNamedColumn().getQualifiedName());
-          secondStageColumnIds[outputColumnId] = columnIdIndex;
+          Column column = aggFuncTarget.getNamedColumn();
+          if (column.hasQualifier()) {
+            secondStageColumnIds[originOutputSchema.getColumnId(column.getQualifiedName())] = columnIdIndex;
+          } else {
+            secondStageColumnIds[originOutputSchema.getColumnIdByName(column.getSimpleName())] = columnIdIndex;
+          }
           columnIdIndex++;
         }
         secondStageGroupbyNode.setTargets(secondGroupbyTargets.toArray(new Target[]{}));
@@ -336,8 +344,12 @@
           secondStageAggFunction.setArgs(new EvalNode[] {firstEval});
 
           Target secondTarget = secondStageGroupbyNode.getTargets()[secondStageGroupbyNode.getGroupingColumns().length + aggFuncIdx];
-          int outputColumnId = originOutputSchema.getColumnId(secondTarget.getNamedColumn().getQualifiedName());
-          secondStageColumnIds[outputColumnId] = columnIdIndex;
+          Column column = secondTarget.getNamedColumn();
+          if (column.hasQualifier()) {
+            secondStageColumnIds[originOutputSchema.getColumnId(column.getQualifiedName())] = columnIdIndex;
+          } else {
+            secondStageColumnIds[originOutputSchema.getColumnIdByName(column.getSimpleName())] = columnIdIndex;
+          }
           columnIdIndex++;
           aggFuncIdx++;
         }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/GroupbyNode.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/GroupbyNode.java
index 828b06d..75aa52c 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/GroupbyNode.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/GroupbyNode.java
@@ -237,4 +237,18 @@
 
     return planStr;
   }
+
+  /**
+   * It checks if an alias name included in the target of this node is for aggregation function.
+   * If so, it returns TRUE. Otherwise, it returns FALSE.
+   */
+  public boolean isAggregationColumn(String simpleName) {
+    for (int i = groupingColumns.length; i < targets.length; i++) {
+      if (simpleName.equals(targets[i].getNamedColumn().getSimpleName()) ||
+          simpleName.equals(targets[i].getAlias())) {
+        return true;
+      }
+    }
+    return false;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/NodeType.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/NodeType.java
index cc43912..de79f93 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/NodeType.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/NodeType.java
@@ -53,7 +53,8 @@
   CREATE_TABLE(CreateTableNode.class),
   DROP_TABLE(DropTableNode.class),
   ALTER_TABLESPACE (AlterTablespaceNode.class),
-  ALTER_TABLE (AlterTableNode.class);
+  ALTER_TABLE (AlterTableNode.class),
+  TRUNCATE_TABLE (TruncateTableNode.class);
 
   private final Class<? extends LogicalNode> baseClass;
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/TruncateTableNode.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/TruncateTableNode.java
new file mode 100644
index 0000000..b8d9cad
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/TruncateTableNode.java
@@ -0,0 +1,62 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.planner.logical;
+
+import com.google.gson.annotations.Expose;
+import org.apache.tajo.engine.planner.PlanString;
+import org.apache.tajo.util.TUtil;
+
+import java.util.List;
+
+public class TruncateTableNode extends LogicalNode {
+  @Expose
+  private List<String> tableNames;
+
+  public TruncateTableNode(int pid) {
+    super(pid, NodeType.TRUNCATE_TABLE);
+  }
+
+  public List<String> getTableNames() {
+    return tableNames;
+  }
+
+  public void setTableNames(List<String> tableNames) {
+    this.tableNames = tableNames;
+  }
+
+  @Override
+  public String toString() {
+    return "TruncateTable (table=" + TUtil.collectionToString(tableNames) + ")";
+  }
+
+  @Override
+  public void preOrder(LogicalNodeVisitor visitor) {
+    visitor.visit(this);
+  }
+
+  @Override
+  public void postOrder(LogicalNodeVisitor visitor) {
+    visitor.visit(this);
+  }
+
+  @Override
+  public PlanString getPlanString() {
+    return new PlanString(this);
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
index f2bcd77..f65fee7 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/GreedyHeuristicJoinOrderAlgorithm.java
@@ -26,9 +26,9 @@
 import org.apache.tajo.engine.planner.PlanningException;
 import org.apache.tajo.engine.planner.logical.*;
 import org.apache.tajo.engine.utils.SchemaUtil;
+import org.apache.tajo.util.TUtil;
 
-import java.util.LinkedHashSet;
-import java.util.Set;
+import java.util.*;
 
 /**
  * This is a greedy heuristic algorithm to find a bushy join tree. This algorithm finds
@@ -166,6 +166,45 @@
       throws PlanningException {
     JoinEdge foundJoinEdge = null;
 
+    // If outer is outer join, make edge key using all relation names in outer.
+    SortedSet<String> relationNames =
+        new TreeSet<String>(PlannerUtil.getRelationLineageWithinQueryBlock(plan, outer));
+    String outerEdgeKey = TUtil.collectionToString(relationNames);
+    for (String innerName : PlannerUtil.getRelationLineageWithinQueryBlock(plan, inner)) {
+      if (graph.hasEdge(outerEdgeKey, innerName)) {
+        JoinEdge existJoinEdge = graph.getEdge(outerEdgeKey, innerName);
+        if (foundJoinEdge == null) {
+          foundJoinEdge = new JoinEdge(existJoinEdge.getJoinType(), outer, inner,
+              existJoinEdge.getJoinQual());
+        } else {
+          foundJoinEdge.addJoinQual(AlgebraicUtil.createSingletonExprFromCNF(
+              existJoinEdge.getJoinQual()));
+        }
+      }
+    }
+    if (foundJoinEdge != null) {
+      return foundJoinEdge;
+    }
+
+    relationNames =
+        new TreeSet<String>(PlannerUtil.getRelationLineageWithinQueryBlock(plan, inner));
+    outerEdgeKey = TUtil.collectionToString(relationNames);
+    for (String outerName : PlannerUtil.getRelationLineageWithinQueryBlock(plan, outer)) {
+      if (graph.hasEdge(outerEdgeKey, outerName)) {
+        JoinEdge existJoinEdge = graph.getEdge(outerEdgeKey, outerName);
+        if (foundJoinEdge == null) {
+          foundJoinEdge = new JoinEdge(existJoinEdge.getJoinType(), inner, outer,
+              existJoinEdge.getJoinQual());
+        } else {
+          foundJoinEdge.addJoinQual(AlgebraicUtil.createSingletonExprFromCNF(
+              existJoinEdge.getJoinQual()));
+        }
+      }
+    }
+    if (foundJoinEdge != null) {
+      return foundJoinEdge;
+    }
+
     for (String outerName : PlannerUtil.getRelationLineageWithinQueryBlock(plan, outer)) {
       for (String innerName : PlannerUtil.getRelationLineageWithinQueryBlock(plan, inner)) {
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinEdge.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinEdge.java
index e5c29f0..c9bb571 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinEdge.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinEdge.java
@@ -22,7 +22,6 @@
 import org.apache.tajo.algebra.JoinType;
 import org.apache.tajo.engine.eval.EvalNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
-import org.apache.tajo.engine.planner.logical.RelationNode;
 import org.apache.tajo.util.TUtil;
 
 import java.util.Collections;
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinGraph.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinGraph.java
index d384f51..6390a77 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinGraph.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/logical/join/JoinGraph.java
@@ -19,6 +19,7 @@
 package org.apache.tajo.engine.planner.logical.join;
 
 import com.google.common.collect.Sets;
+import org.apache.tajo.algebra.JoinType;
 import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.engine.eval.AlgebraicUtil;
@@ -31,9 +32,10 @@
 import org.apache.tajo.engine.planner.PlanningException;
 import org.apache.tajo.engine.planner.graph.SimpleUndirectedGraph;
 import org.apache.tajo.engine.planner.logical.JoinNode;
+import org.apache.tajo.engine.planner.logical.RelationNode;
+import org.apache.tajo.util.TUtil;
 
-import java.util.Collection;
-import java.util.Set;
+import java.util.*;
 
 public class JoinGraph extends SimpleUndirectedGraph<String, JoinEdge> {
 
@@ -58,7 +60,17 @@
         String qualifier = CatalogUtil.extractQualifier(columnName);
         relationNames[0] = qualifier;
       } else {
-        throw new PlanningException("Cannot expect a referenced relation: " + leftExpr);
+        // search for a relation which evaluates a right term included in a join condition
+        for (RelationNode rel : block.getRelations()) {
+          if (rel.getOutSchema().contains(leftExpr)) {
+            String qualifier = rel.getCanonicalName();
+            relationNames[0] = qualifier;
+          }
+        }
+
+        if (relationNames[0] == null) { // if not found
+          throw new PlanningException("Cannot expect a referenced relation: " + leftExpr);
+        }
       }
     }
 
@@ -70,47 +82,70 @@
         String qualifier = CatalogUtil.extractQualifier(columnName);
         relationNames[1] = qualifier;
       } else {
-        throw new PlanningException("Cannot expect a referenced relation: " + rightExpr);
+        // search for a relation which evaluates a right term included in a join condition
+        for (RelationNode rel : block.getRelations()) {
+          if (rel.getOutSchema().contains(rightExpr)) {
+            String qualifier = rel.getCanonicalName();
+            relationNames[1] = qualifier;
+          }
+        }
+
+        if (relationNames[1] == null) { // if not found
+          throw new PlanningException("Cannot expect a referenced relation: " + rightExpr);
+        }
       }
     }
 
     return relationNames;
   }
+
   public Collection<EvalNode> addJoin(LogicalPlan plan, LogicalPlan.QueryBlock block,
                                       JoinNode joinNode) throws PlanningException {
-    Set<EvalNode> cnf = Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual()));
-    Set<EvalNode> nonJoinQuals = Sets.newHashSet();
-    for (EvalNode singleQual : cnf) {
-      if (EvalTreeUtil.isJoinQual(singleQual, true)) {
+    if (joinNode.getJoinType() == JoinType.LEFT_OUTER || joinNode.getJoinType() == JoinType.RIGHT_OUTER) {
+      JoinEdge edge = new JoinEdge(joinNode.getJoinType(),
+            joinNode.getLeftChild(), joinNode.getRightChild(), joinNode.getJoinQual());
 
-        String [] relations = guessRelationsFromJoinQual(block, (BinaryEval) singleQual);
-        String leftExprRelName = relations[0];
-        String rightExprRelName = relations[1];
+      SortedSet<String> leftNodeRelationName =
+          new TreeSet<String>(PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getLeftChild()));
+      SortedSet<String> rightNodeRelationName =
+          new TreeSet<String>(PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getRightChild()));
 
-        Collection<String> leftLineage = PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getLeftChild());
+      addEdge(TUtil.collectionToString(leftNodeRelationName), TUtil.collectionToString(rightNodeRelationName), edge);
 
-        boolean isLeftExprForLeftTable = leftLineage.contains(leftExprRelName);
-        JoinEdge edge;
-        edge = getEdge(leftExprRelName, rightExprRelName);
+      Set<EvalNode> allInOneCnf = new HashSet<EvalNode>();
+      allInOneCnf.add(joinNode.getJoinQual());
 
-        if (edge != null) {
-          edge.addJoinQual(singleQual);
-        } else {
-          if (isLeftExprForLeftTable) {
-            edge = new JoinEdge(joinNode.getJoinType(),
-                block.getRelation(leftExprRelName), block.getRelation(rightExprRelName), singleQual);
-            addEdge(leftExprRelName, rightExprRelName, edge);
+      return allInOneCnf;
+    } else {
+      Set<EvalNode> cnf = Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual()));
+
+      for (EvalNode singleQual : cnf) {
+        if (EvalTreeUtil.isJoinQual(singleQual, true)) {
+          String[] relations = guessRelationsFromJoinQual(block, (BinaryEval) singleQual);
+          String leftExprRelName = relations[0];
+          String rightExprRelName = relations[1];
+
+          Collection<String> leftLineage = PlannerUtil.getRelationLineageWithinQueryBlock(plan, joinNode.getLeftChild());
+
+          boolean isLeftExprForLeftTable = leftLineage.contains(leftExprRelName);
+
+          JoinEdge edge = getEdge(leftExprRelName, rightExprRelName);
+          if (edge != null) {
+            edge.addJoinQual(singleQual);
           } else {
-            edge = new JoinEdge(joinNode.getJoinType(),
-                block.getRelation(rightExprRelName), block.getRelation(leftExprRelName), singleQual);
-            addEdge(rightExprRelName, leftExprRelName, edge);
+            if (isLeftExprForLeftTable) {
+              edge = new JoinEdge(joinNode.getJoinType(),
+                  block.getRelation(leftExprRelName), block.getRelation(rightExprRelName), singleQual);
+              addEdge(leftExprRelName, rightExprRelName, edge);
+            } else {
+              edge = new JoinEdge(joinNode.getJoinType(),
+                  block.getRelation(rightExprRelName), block.getRelation(leftExprRelName), singleQual);
+              addEdge(rightExprRelName, leftExprRelName, edge);
+            }
           }
         }
-      } else {
-        nonJoinQuals.add(singleQual);
       }
+      return cnf;
     }
-    cnf.retainAll(nonJoinQuals);
-    return cnf;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/AggregationExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/AggregationExec.java
index 208973e..2a671e6 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/AggregationExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/AggregationExec.java
@@ -49,7 +49,11 @@
     Column col;
     for (int idx = 0; idx < plan.getGroupingColumns().length; idx++) {
       col = keyColumns[idx];
-      groupingKeyIds[idx] = inSchema.getColumnId(col.getQualifiedName());
+      if (col.hasQualifier()) {
+        groupingKeyIds[idx] = inSchema.getColumnId(col.getQualifiedName());
+      } else {
+        groupingKeyIds[idx] = inSchema.getColumnIdByName(col.getSimpleName());
+      }
     }
 
     if (plan.hasAggFunctions()) {
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbyHashAggregationExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbyHashAggregationExec.java
index 6458f47..1a4b706 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbyHashAggregationExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbyHashAggregationExec.java
@@ -21,6 +21,7 @@
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.statistics.TableStats;
+import org.apache.tajo.datum.DatumFactory;
 import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.engine.eval.AggregationFunctionCallEval;
 import org.apache.tajo.engine.function.FunctionContext;
@@ -40,7 +41,6 @@
 
   private HashAggregator[] hashAggregators;
   private PhysicalExec child;
-  private int distinctGroupingKeyNum;
   private int distinctGroupingKeyIds[];
   private boolean first = true;
   private int groupbyNodeNum;
@@ -58,14 +58,22 @@
     this.child = subOp;
     this.child.init();
 
-    distinctGroupingKeyNum = plan.getGroupingColumns().length;
-    distinctGroupingKeyIds = new int[distinctGroupingKeyNum];
-
-    Column[] keyColumns = plan.getGroupingColumns();
-    Column col;
-    for (int idx = 0; idx < plan.getGroupingColumns().length; idx++) {
-      col = keyColumns[idx];
-      distinctGroupingKeyIds[idx] = inSchema.getColumnId(col.getQualifiedName());
+    List<Integer> distinctGroupingKeyIdList = new ArrayList<Integer>();
+    for (Column col: plan.getGroupingColumns()) {
+      int keyIndex;
+      if (col.hasQualifier()) {
+        keyIndex = inSchema.getColumnId(col.getQualifiedName());
+      } else {
+        keyIndex = inSchema.getColumnIdByName(col.getSimpleName());
+      }
+      if (!distinctGroupingKeyIdList.contains(keyIndex)) {
+        distinctGroupingKeyIdList.add(keyIndex);
+      }
+    }
+    int idx = 0;
+    distinctGroupingKeyIds = new int[distinctGroupingKeyIdList.size()];
+    for (Integer intVal: distinctGroupingKeyIdList) {
+      distinctGroupingKeyIds[idx++] = intVal.intValue();
     }
 
     List<GroupbyNode> groupbyNodes = plan.getGroupByNodes();
@@ -149,9 +157,48 @@
     if (nullCount == hashAggregators.length) {
       finished = true;
       progress = 1.0f;
-      return null;
+
+      // If DistinctGroupbyHashAggregationExec didn't has any rows,
+      // it should return NullDatum.
+      if (totalNumRows == 0 && groupbyNodeNum == 0) {
+        Tuple tuple = new VTuple(hashAggregators.length);
+        for (int i = 0; i < tuple.size(); i++) {
+          tuple.put(i, DatumFactory.createNullDatum());
+        }
+        return tuple;
+      } else {
+        return null;
+      }
     }
 
+
+    /*
+    Tuple materialization example
+    =============================
+
+    Output Tuple Index: 0(l_orderkey), 1(l_partkey), 2(default.lineitem.l_suppkey), 5(default.lineitem.
+    l_partkey), 8(sum)
+
+              select
+                  lineitem.l_orderkey as l_orderkey,
+                  lineitem.l_partkey as l_partkey,
+                  count(distinct lineitem.l_partkey) as cnt1,
+                  count(distinct lineitem.l_suppkey) as cnt2,
+                  sum(lineitem.l_quantity) as sum1
+              from
+                  lineitem
+              group by
+                  lineitem.l_orderkey, lineitem.l_partkey
+
+    The above case will result in the following materialization
+    ------------------------------------------------------------
+
+    l_orderkey  l_partkey  default.lineitem.l_suppkey  l_orderkey  l_partkey  default.lineitem.l_partkey  l_orderkey  l_partkey  sum
+        1            1              7311                   1            1                1                    1           1      53.0
+        1            1              7706
+
+    */
+
     currentAggregatedTuples = new ArrayList<Tuple>();
     int listIndex = 0;
     while (true) {
@@ -296,9 +343,14 @@
       List<Integer> groupingKeyIdList = new ArrayList<Integer>(distinctGroupingKeyIdSet);
       Column[] keyColumns = groupbyNode.getGroupingColumns();
       Column col;
-      for (int idx = 0; idx < groupbyNode.getGroupingColumns().length; idx++) {
+      for (int idx = 0; idx < keyColumns.length; idx++) {
         col = keyColumns[idx];
-        int keyIndex = inSchema.getColumnId(col.getQualifiedName());
+        int keyIndex;
+        if (col.hasQualifier()) {
+          keyIndex = inSchema.getColumnId(col.getQualifiedName());
+        } else {
+          keyIndex = inSchema.getColumnIdByName(col.getSimpleName());
+        }
         if (!distinctGroupingKeyIdSet.contains(keyIndex)) {
           groupingKeyIdList.add(keyIndex);
         }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbySortAggregationExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbySortAggregationExec.java
index c8457ac..b786672 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbySortAggregationExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/DistinctGroupbySortAggregationExec.java
@@ -18,8 +18,10 @@
 
 package org.apache.tajo.engine.planner.physical;
 
-import org.apache.tajo.catalog.Column;
 import org.apache.tajo.catalog.statistics.TableStats;
+import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.datum.DatumFactory;
+import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.engine.planner.logical.DistinctGroupbyNode;
 import org.apache.tajo.engine.planner.logical.GroupbyNode;
 import org.apache.tajo.storage.Tuple;
@@ -34,8 +36,6 @@
 
   private boolean finished = false;
 
-  private int distinctGroupingKeyNum;
-
   private Tuple[] currentTuples;
   private int outColumnNum;
   private int groupbyNodeNum;
@@ -49,9 +49,6 @@
     this.aggregateExecs = aggregateExecs;
     this.groupbyNodeNum = plan.getGroupByNodes().size();
 
-    final Column[] keyColumns = plan.getGroupingColumns();
-    distinctGroupingKeyNum = keyColumns.length;
-
     currentTuples = new Tuple[groupbyNodeNum];
     outColumnNum = outSchema.size();
 
@@ -85,11 +82,14 @@
     }
 
     boolean allNull = true;
+
     for (int i = 0; i < groupbyNodeNum; i++) {
       if (first && i > 0) {
         // All SortAggregateExec uses same SeqScanExec object.
         // After running sort, rescan() should be called.
-        aggregateExecs[i].rescan();
+        if (currentTuples[i-1] != null) {
+          aggregateExecs[i].rescan();
+        }
       }
       currentTuples[i] = aggregateExecs[i].next();
 
@@ -97,6 +97,13 @@
         allNull = false;
       }
     }
+
+    // If DistinctGroupbySortAggregationExec received NullDatum and didn't has any grouping keys,
+    // it should return primitive values for NullDatum.
+    if (allNull && aggregateExecs[0].groupingKeyNum == 0 && first)   {
+      return getEmptyTuple();
+    }
+
     first = false;
 
     if (allNull) {
@@ -116,10 +123,36 @@
         mergeTupleIndex++;
       }
     }
-
     return mergedTuple;
   }
 
+  private Tuple getEmptyTuple() {
+    Tuple tuple = new VTuple(outColumnNum);
+    NullDatum nullDatum = DatumFactory.createNullDatum();
+
+    for (int i = 0; i < outColumnNum; i++) {
+      TajoDataTypes.Type type = outSchema.getColumn(i).getDataType().getType();
+      if (type == TajoDataTypes.Type.INT8) {
+        tuple.put(i, DatumFactory.createInt8(nullDatum.asInt8()));
+      } else if (type == TajoDataTypes.Type.INT4) {
+        tuple.put(i, DatumFactory.createInt4(nullDatum.asInt4()));
+      } else if (type == TajoDataTypes.Type.INT2) {
+        tuple.put(i, DatumFactory.createInt2(nullDatum.asInt2()));
+      } else if (type == TajoDataTypes.Type.FLOAT4) {
+        tuple.put(i, DatumFactory.createFloat4(nullDatum.asFloat4()));
+      } else if (type == TajoDataTypes.Type.FLOAT8) {
+        tuple.put(i, DatumFactory.createFloat8(nullDatum.asFloat8()));
+      } else {
+        tuple.put(i, DatumFactory.createNullDatum());
+      }
+    }
+
+    finished = true;
+    first = false;
+
+    return tuple;
+  }
+
   @Override
   public void close() throws IOException {
     plan = null;
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashAggregateExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashAggregateExec.java
index c87e01a..3323d1f 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashAggregateExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashAggregateExec.java
@@ -69,6 +69,16 @@
         hashTable.put(keyTuple, contexts);
       }
     }
+
+    // If HashAggregateExec received NullDatum and didn't has any grouping keys,
+    // it should return primitive values for NullLDatum.
+    if (groupingKeyNum == 0 && aggFunctionsNum > 0 && hashTable.entrySet().size() == 0) {
+      FunctionContext[] contexts = new FunctionContext[aggFunctionsNum];
+      for(int i = 0; i < aggFunctionsNum; i++) {
+        contexts[i] = aggFunctions[i].newContext();
+      }
+      hashTable.put(null, contexts);
+    }
   }
 
   @Override
@@ -101,7 +111,7 @@
   }
 
   @Override
-  public void rescan() throws IOException {    
+  public void rescan() throws IOException {
     iterator = hashTable.entrySet().iterator();
   }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashJoinExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashJoinExec.java
index dea0340..a5e9df0 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashJoinExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashJoinExec.java
@@ -19,6 +19,7 @@
 package org.apache.tajo.engine.planner.physical;
 
 import org.apache.tajo.catalog.Column;
+import org.apache.tajo.catalog.statistics.TableStats;
 import org.apache.tajo.engine.eval.EvalNode;
 import org.apache.tajo.engine.planner.PlannerUtil;
 import org.apache.tajo.engine.planner.Projector;
@@ -199,4 +200,29 @@
     return this.plan;
   }
 
+  @Override
+  public TableStats getInputStats() {
+    if (leftChild == null) {
+      return inputStats;
+    }
+    TableStats leftInputStats = leftChild.getInputStats();
+    inputStats.setNumBytes(0);
+    inputStats.setReadBytes(0);
+    inputStats.setNumRows(0);
+
+    if (leftInputStats != null) {
+      inputStats.setNumBytes(leftInputStats.getNumBytes());
+      inputStats.setReadBytes(leftInputStats.getReadBytes());
+      inputStats.setNumRows(leftInputStats.getNumRows());
+    }
+
+    TableStats rightInputStats = rightChild.getInputStats();
+    if (rightInputStats != null) {
+      inputStats.setNumBytes(inputStats.getNumBytes() + rightInputStats.getNumBytes());
+      inputStats.setReadBytes(inputStats.getReadBytes() + rightInputStats.getReadBytes());
+      inputStats.setNumRows(inputStats.getNumRows() + rightInputStats.getNumRows());
+    }
+
+    return inputStats;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashLeftOuterJoinExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashLeftOuterJoinExec.java
index 849dc38..622900f 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashLeftOuterJoinExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/HashLeftOuterJoinExec.java
@@ -141,18 +141,20 @@
 
       // getting a next right tuple on in-memory hash table.
       rightTuple = iterator.next();
-      frameTuple.set(leftTuple, rightTuple); // evaluate a join condition on both tuples
-      if (joinQual.eval(inSchema, frameTuple).isTrue()) { // if both tuples are joinable
-        projector.eval(frameTuple, outTuple);
-        found = true;
-      }
-
       if (!iterator.hasNext()) { // no more right tuples for this hash key
         shouldGetLeftTuple = true;
       }
 
-      if (found) {
-        break;
+      frameTuple.set(leftTuple, rightTuple); // evaluate a join condition on both tuples
+      if (joinQual.eval(inSchema, frameTuple).isTrue()) { // if both tuples are joinable
+        projector.eval(frameTuple, outTuple);
+        return outTuple;
+      } else {
+        // null padding
+        Tuple nullPaddedTuple = TupleUtil.createNullPaddedTuple(rightNumCols);
+        frameTuple.set(leftTuple, nullPaddedTuple);
+        projector.eval(frameTuple, outTuple);
+        return outTuple;
       }
     }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/JoinTupleComparator.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/JoinTupleComparator.java
index 0d4c47b..a59f8d9 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/JoinTupleComparator.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/JoinTupleComparator.java
@@ -61,8 +61,17 @@
   @Override
   public int compare(Tuple outerTuple, Tuple innerTuple) {
     for (int i = 0; i < numSortKey; i++) {
-      outer = outerTuple.get(outerSortKeyIds[i]);
-      inner = innerTuple.get(innerSortKeyIds[i]);
+      if (outerTuple == null) {
+        outer = NullDatum.get();
+      } else {
+        outer = outerTuple.get(outerSortKeyIds[i]);
+      }
+
+      if (innerTuple == null) {
+        inner = NullDatum.get();
+      } else {
+        inner = innerTuple.get(innerSortKeyIds[i]);
+      }
 
       if (outer instanceof NullDatum || inner instanceof NullDatum) {
         if (!outer.equals(inner)) {
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/PartitionMergeScanExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/PartitionMergeScanExec.java
index 7f86ba2..9fa5b76 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/PartitionMergeScanExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/PartitionMergeScanExec.java
@@ -103,6 +103,7 @@
 
   @Override
   public void close() throws IOException {
+    inputStats.reset();
     for (SeqScanExec scanner : scanners) {
       scanner.close();
       TableStats scannerTableStsts = scanner.getInputStats();
@@ -138,6 +139,15 @@
 
   @Override
   public TableStats getInputStats() {
+    if (iterator != null) {
+      inputStats.reset();
+      for (SeqScanExec scanner : scanners) {
+        TableStats scannerTableStats = scanner.getInputStats();
+        if (scannerTableStats != null) {
+          inputStats.merge(scannerTableStats);
+        }
+      }
+    }
     return inputStats;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/RightOuterMergeJoinExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/RightOuterMergeJoinExec.java
index c70174a..365fc22 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/RightOuterMergeJoinExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/RightOuterMergeJoinExec.java
@@ -25,6 +25,7 @@
 import org.apache.tajo.engine.planner.PlannerUtil;
 import org.apache.tajo.engine.planner.Projector;
 import org.apache.tajo.engine.planner.logical.JoinNode;
+import org.apache.tajo.engine.utils.TupleUtil;
 import org.apache.tajo.storage.FrameTuple;
 import org.apache.tajo.storage.Tuple;
 import org.apache.tajo.storage.TupleComparator;
@@ -298,10 +299,16 @@
           posRightTupleSlots = posRightTupleSlots + 1;
 
           frameTuple.set(nextLeft, aTuple);
-          joinQual.eval(inSchema, frameTuple);
-          projector.eval(frameTuple, outTuple);
-          return outTuple;
-
+          if (joinQual.eval(inSchema, frameTuple).asBool()) {
+            projector.eval(frameTuple, outTuple);
+            return outTuple;
+          } else {
+            // padding null
+            Tuple nullPaddedTuple = TupleUtil.createNullPaddedTuple(leftNumCols);
+            frameTuple.set(nullPaddedTuple, aTuple);
+            projector.eval(frameTuple, outTuple);
+            return outTuple;
+          }
         } else {
           // right (inner) slots reached end and should be rewind if there are still tuples in the outer slots
           if(posLeftTupleSlots <= (leftTupleSlots.size() - 1)) {
@@ -313,9 +320,17 @@
             posLeftTupleSlots = posLeftTupleSlots + 1;
 
             frameTuple.set(nextLeft, aTuple);
-            joinQual.eval(inSchema, frameTuple);
-            projector.eval(frameTuple, outTuple);
-            return outTuple;
+
+            if (joinQual.eval(inSchema, frameTuple).asBool()) {
+              projector.eval(frameTuple, outTuple);
+              return outTuple;
+            } else {
+              // padding null
+              Tuple nullPaddedTuple = TupleUtil.createNullPaddedTuple(leftNumCols);
+              frameTuple.set(nullPaddedTuple, aTuple);
+              projector.eval(frameTuple, outTuple);
+              return outTuple;
+            }
           }
         }
       } // the second if end false
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SeqScanExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SeqScanExec.java
index 6dbcc3f..507cb6c 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SeqScanExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SeqScanExec.java
@@ -23,6 +23,7 @@
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.partition.PartitionMethodDesc;
 import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.catalog.proto.CatalogProtos.FragmentProto;
 import org.apache.tajo.catalog.statistics.TableStats;
 import org.apache.tajo.datum.Datum;
 import org.apache.tajo.engine.eval.ConstEval;
@@ -32,15 +33,20 @@
 import org.apache.tajo.engine.planner.Projector;
 import org.apache.tajo.engine.planner.Target;
 import org.apache.tajo.engine.planner.logical.ScanNode;
-import org.apache.tajo.engine.utils.*;
+import org.apache.tajo.engine.utils.SchemaUtil;
+import org.apache.tajo.engine.utils.TupleCache;
+import org.apache.tajo.engine.utils.TupleCacheKey;
+import org.apache.tajo.engine.utils.TupleUtil;
 import org.apache.tajo.storage.*;
-import org.apache.tajo.storage.Scanner;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.storage.fragment.FragmentConvertor;
 import org.apache.tajo.worker.TaskAttemptContext;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
 
 
 public class SeqScanExec extends PhysicalExec {
@@ -69,8 +75,17 @@
     this.fragments = fragments;
 
     if (plan.isBroadcastTable()) {
+      String pathNameKey = "";
+      if (fragments != null) {
+        for (FragmentProto f : fragments) {
+          FileFragment fileFragement = FragmentConvertor.convert(
+              context.getConf(), plan.getTableDesc().getMeta().getStoreType(), f);
+          pathNameKey += fileFragement.getPath();
+        }
+      }
+
       cacheKey = new TupleCacheKey(
-          context.getTaskId().getQueryUnitId().getExecutionBlockId().toString(), plan.getTableName());
+          context.getTaskId().getQueryUnitId().getExecutionBlockId().toString(), plan.getTableName(), pathNameKey);
     }
   }
 
@@ -182,7 +197,6 @@
 
   private void initScanner(Schema projected) throws IOException {
     this.projector = new Projector(inSchema, outSchema, plan.getTargets());
-
     if (fragments != null) {
       if (fragments.length > 1) {
         this.scanner = new MergeScanner(context.getConf(), plan.getPhysicalSchema(), plan.getTableDesc().getMeta(),
@@ -219,8 +233,10 @@
       }
     }
 
-    scanner.close();
-    scanner = null;
+    if (scanner != null) {
+      scanner.close();
+      scanner = null;
+    }
 
     TupleCache.getInstance().addBroadcastCache(cacheKey, broadcastTupleCacheList);
   }
@@ -308,9 +324,9 @@
   @Override
   public String toString() {
     if (scanner != null) {
-      return "SeqScanExec:" + plan.getTableName() + "," + scanner.getClass().getName();
+      return "SeqScanExec:" + plan + "," + scanner.getClass().getName();
     } else {
-      return "SeqScanExec:" + plan.getTableName();
+      return "SeqScanExec:" + plan;
     }
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SortAggregateExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SortAggregateExec.java
index 4c4227f..c4d43a3 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SortAggregateExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/SortAggregateExec.java
@@ -18,6 +18,7 @@
 
 package org.apache.tajo.engine.planner.physical;
 
+import org.apache.tajo.datum.NullDatum;
 import org.apache.tajo.engine.function.FunctionContext;
 import org.apache.tajo.engine.planner.logical.GroupbyNode;
 import org.apache.tajo.storage.Tuple;
@@ -57,7 +58,6 @@
     Tuple outputTuple = null;
 
     while(!context.isStopped() && (tuple = child.next()) != null) {
-
       // get a key tuple
       currentKey = new VTuple(groupingKeyIds.length);
       for(int i = 0; i < groupingKeyIds.length; i++) {
@@ -69,7 +69,12 @@
         if (lastKey == null) {
           for(int i = 0; i < aggFunctionsNum; i++) {
             contexts[i] = aggFunctions[i].newContext();
-            aggFunctions[i].merge(contexts[i], inSchema, tuple);
+
+            // Merge when aggregator doesn't receive NullDatum
+            if (!(groupingKeyNum == 0 && aggFunctionsNum == tuple.size()
+                && tuple.get(i) == NullDatum.get())) {
+              aggFunctions[i].merge(contexts[i], inSchema, tuple);
+            }
           }
           lastKey = currentKey;
         } else {
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/StoreTableExec.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/StoreTableExec.java
index b0c3c31..b1d0400 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/StoreTableExec.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/physical/StoreTableExec.java
@@ -20,9 +20,11 @@
 
 import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.catalog.TableMeta;
+import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.engine.planner.logical.InsertNode;
 import org.apache.tajo.engine.planner.logical.PersistentStoreNode;
 import org.apache.tajo.storage.Appender;
+import org.apache.tajo.storage.StorageConstants;
 import org.apache.tajo.storage.StorageManagerFactory;
 import org.apache.tajo.storage.Tuple;
 import org.apache.tajo.worker.TaskAttemptContext;
@@ -57,6 +59,8 @@
       appender = StorageManagerFactory.getStorageManager(context.getConf()).getAppender(meta,
           createTableNode.getTableSchema(), context.getOutputPath());
     } else {
+      String nullChar = context.getQueryContext().get(ConfVars.CSVFILE_NULL.varname, ConfVars.CSVFILE_NULL.defaultVal);
+      meta.putOption(StorageConstants.CSVFILE_NULL, nullChar);
       appender = StorageManagerFactory.getStorageManager(context.getConf()).getAppender(meta, outSchema,
           context.getOutputPath());
     }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/FilterPushDownRule.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/FilterPushDownRule.java
index 32d4f34..4215423 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/FilterPushDownRule.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/FilterPushDownRule.java
@@ -20,19 +20,58 @@
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.tajo.algebra.JoinType;
+import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.catalog.Column;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.TableDesc;
 import org.apache.tajo.engine.eval.*;
 import org.apache.tajo.engine.exception.InvalidQueryException;
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.logical.*;
+import org.apache.tajo.engine.planner.rewrite.FilterPushDownRule.FilterPushDownContext;
 import org.apache.tajo.util.TUtil;
 
 import java.util.*;
 
-public class FilterPushDownRule extends BasicLogicalPlanVisitor<Set<EvalNode>, LogicalNode> implements RewriteRule {
+/**
+ * This rule tries to push down all filter conditions into logical nodes as lower as possible.
+ * It is likely to significantly reduces the intermediate data.
+ */
+public class FilterPushDownRule extends BasicLogicalPlanVisitor<FilterPushDownContext, LogicalNode>
+    implements RewriteRule {
+  private final static Log LOG = LogFactory.getLog(FilterPushDownRule.class);
   private static final String NAME = "FilterPushDown";
 
+  static class FilterPushDownContext {
+    Set<EvalNode> pushingDownFilters = new HashSet<EvalNode>();
+
+    public void clear() {
+      pushingDownFilters.clear();
+    }
+    public void setFiltersTobePushed(Collection<EvalNode> workingEvals) {
+      this.pushingDownFilters.clear();
+      this.pushingDownFilters.addAll(workingEvals);
+    }
+    public void addFiltersTobePushed(Collection<EvalNode> workingEvals) {
+      this.pushingDownFilters.addAll(workingEvals);
+    }
+
+    public void setToOrigin(Map<EvalNode, EvalNode> evalMap) {
+      //evalMap: copy -> origin
+      List<EvalNode> origins = new ArrayList<EvalNode>();
+      for (EvalNode eval : pushingDownFilters) {
+        EvalNode origin = evalMap.get(eval);
+        if (origin != null) {
+          origins.add(origin);
+        }
+      }
+      setFiltersTobePushed(origins);
+    }
+  }
+
   @Override
   public String getName() {
     return NAME;
@@ -50,23 +89,41 @@
 
   @Override
   public LogicalPlan rewrite(LogicalPlan plan) throws PlanningException {
+    /*
+    FilterPushDown rule: processing when visits each node
+      - If a target which is corresponding on a filter EvalNode's column is not FieldEval, do not PushDown.
+      - Replace filter EvalNode's column with child node's output column.
+        If there is no child node's output column, do not PushDown.
+      - When visit ScanNode, add filter eval to ScanNode's qual
+      - When visit GroupByNode, Find aggregation column in a filter EvalNode and
+        . If a parent is HavingNode, add filter eval to parent HavingNode.
+        . It not, create new HavingNode and set parent's child.
+     */
+    FilterPushDownContext context = new FilterPushDownContext();
     for (LogicalPlan.QueryBlock block : plan.getQueryBlocks()) {
-      this.visit(new HashSet<EvalNode>(), plan, block, block.getRoot(), new Stack<LogicalNode>());
+      context.clear();
+      this.visit(context, plan, block, block.getRoot(), new Stack<LogicalNode>());
     }
 
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("=============================================");
+      LOG.debug("FilterPushDown Optimized Query: \n" + plan.toString());
+      LOG.debug("=============================================");
+    }
     return plan;
   }
 
   @Override
-  public LogicalNode visitFilter(Set<EvalNode> cnf, LogicalPlan plan, LogicalPlan.QueryBlock block,
+  public LogicalNode visitFilter(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
                                  SelectionNode selNode, Stack<LogicalNode> stack) throws PlanningException {
-    cnf.addAll(Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(selNode.getQual())));
+    context.pushingDownFilters.addAll(Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(selNode.getQual())));
 
     stack.push(selNode);
-    visit(cnf, plan, block, selNode.getChild(), stack);
+    visit(context, plan, block, selNode.getChild(), stack);
     stack.pop();
 
-    if(cnf.size() == 0) { // remove the selection operator if there is no search condition after selection push.
+    if(context.pushingDownFilters.size() == 0) {
+      // remove the selection operator if there is no search condition after selection push.
       LogicalNode node = stack.peek();
       if (node instanceof UnaryNode) {
         UnaryNode unary = (UnaryNode) node;
@@ -78,37 +135,31 @@
 
       // check if it can be evaluated here
       Set<EvalNode> matched = TUtil.newHashSet();
-      for (EvalNode eachEval : cnf) {
+      for (EvalNode eachEval : context.pushingDownFilters) {
         if (LogicalPlanner.checkIfBeEvaluatedAtThis(eachEval, selNode)) {
           matched.add(eachEval);
         }
       }
 
-      // if there are search conditions which can be evaluated here, push down them and remove them from cnf.
+      // if there are search conditions which can be evaluated here,
+      // push down them and remove them from context.pushingDownFilters.
       if (matched.size() > 0) {
         selNode.setQual(AlgebraicUtil.createSingletonExprFromCNF(matched.toArray(new EvalNode[matched.size()])));
-        cnf.removeAll(matched);
+        context.pushingDownFilters.removeAll(matched);
       }
     }
 
     return selNode;
   }
 
-  private boolean isOuterJoin(JoinType joinType) {
-    return joinType == JoinType.LEFT_OUTER || joinType == JoinType.RIGHT_OUTER || joinType==JoinType.FULL_OUTER;
-  }
-
   @Override
-  public LogicalNode visitJoin(Set<EvalNode> cnf, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode joinNode,
+  public LogicalNode visitJoin(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode joinNode,
                                Stack<LogicalNode> stack) throws PlanningException {
-    LogicalNode left = joinNode.getRightChild();
-    LogicalNode right = joinNode.getLeftChild();
-
     // here we should stop selection pushdown on the null supplying side(s) of an outer join
     // get the two operands of the join operation as well as the join type
     JoinType joinType = joinNode.getJoinType();
     EvalNode joinQual = joinNode.getJoinQual();
-    if (joinQual != null && isOuterJoin(joinType)) {
+    if (joinQual != null && LogicalPlanner.isOuterJoin(joinType)) {
       BinaryEval binaryEval = (BinaryEval) joinQual;
       // if both are fields
       if (binaryEval.getLeftExpr().getType() == EvalType.FIELD &&
@@ -153,66 +204,116 @@
             throw new InvalidQueryException("Incorrect Logical Query Plan with regard to outer join");
           }
         }
-
-        // retain in this outer join node's JoinQual those selection predicates
-        // related to the outer join's null supplier(s)
-        List<EvalNode> matched2 = Lists.newArrayList();
-        for (EvalNode eval : cnf) {
-
-          Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(eval);
-          Set<String> tableNames = Sets.newHashSet();
-          // getting distinct table references
-          for (Column col : columnRefs) {
-            if (!tableNames.contains(col.getQualifier())) {
-              tableNames.add(col.getQualifier());
-            }
-          }
-
-          //if the predicate involves any of the null suppliers
-          boolean shouldKeep=false;
-          Iterator<String> it2 = nullSuppliers.iterator();
-          while(it2.hasNext()){
-            if(tableNames.contains(it2.next()) == true) {
-              shouldKeep = true;
-            }
-          }
-
-          if(shouldKeep == true) {
-            matched2.add(eval);
-          }
-
-        }
-
-        //merge the retained predicates and establish them in the current outer join node. Then remove them from the cnf
-        EvalNode qual2 = null;
-        if (matched2.size() > 1) {
-          // merged into one eval tree
-          qual2 = AlgebraicUtil.createSingletonExprFromCNF(
-              matched2.toArray(new EvalNode[matched2.size()]));
-        } else if (matched2.size() == 1) {
-          // if the number of matched expr is one
-          qual2 = matched2.get(0);
-        }
-
-        if (qual2 != null) {
-          EvalNode conjQual2 = AlgebraicUtil.createSingletonExprFromCNF(joinNode.getJoinQual(), qual2);
-          joinNode.setJoinQual(conjQual2);
-          cnf.removeAll(matched2);
-        } // for the remaining cnf, push it as usual
       }
     }
 
+    // get evals from ON clause
+    List<EvalNode> onConditions = new ArrayList<EvalNode>();
     if (joinNode.hasJoinQual()) {
-      cnf.addAll(Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual())));
+      onConditions.addAll(Sets.newHashSet(AlgebraicUtil.toConjunctiveNormalFormArray(joinNode.getJoinQual())));
     }
 
-    visit(cnf, plan, block, left, stack);
-    visit(cnf, plan, block, right, stack);
+    boolean isTopMostJoin = stack.peek().getType() != NodeType.JOIN;
 
+    List<EvalNode> outerJoinPredicationEvals = new ArrayList<EvalNode>();
+    List<EvalNode> outerJoinFilterEvalsExcludePredication = new ArrayList<EvalNode>();
+    if (LogicalPlanner.isOuterJoin(joinNode.getJoinType())) {
+      // TAJO-853
+      // In the case of top most JOIN, all filters except JOIN condition aren't pushed down.
+      // That filters are processed by SELECTION NODE.
+      Set<String> nullSupplyingTableNameSet;
+      if (joinNode.getJoinType() == JoinType.RIGHT_OUTER) {
+        nullSupplyingTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getLeftChild()));
+      } else {
+        nullSupplyingTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getRightChild()));
+      }
+
+      Set<String> preservedTableNameSet;
+      if (joinNode.getJoinType() == JoinType.RIGHT_OUTER) {
+        preservedTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getRightChild()));
+      } else {
+        preservedTableNameSet = TUtil.newHashSet(PlannerUtil.getRelationLineage(joinNode.getLeftChild()));
+      }
+
+      List<EvalNode> removedFromFilter = new ArrayList<EvalNode>();
+      for (EvalNode eachEval: context.pushingDownFilters) {
+        if (EvalTreeUtil.isJoinQual(eachEval, true)) {
+          outerJoinPredicationEvals.add(eachEval);
+          removedFromFilter.add(eachEval);
+        } else {
+          Set<Column> columns = EvalTreeUtil.findUniqueColumns(eachEval);
+          boolean canPushDown = true;
+          for (Column eachColumn: columns) {
+            if (nullSupplyingTableNameSet.contains(eachColumn.getQualifier())) {
+              canPushDown = false;
+              break;
+            }
+          }
+          if (!canPushDown) {
+            outerJoinFilterEvalsExcludePredication.add(eachEval);
+            removedFromFilter.add(eachEval);
+          }
+        }
+      }
+
+      context.pushingDownFilters.removeAll(removedFromFilter);
+
+      for (EvalNode eachOnEval: onConditions) {
+        if (EvalTreeUtil.isJoinQual(eachOnEval, true)) {
+          // If join condition, processing in the JoinNode.
+          outerJoinPredicationEvals.add(eachOnEval);
+        } else {
+          // If Eval has a column which belong to Preserved Row table, not using to push down but using JoinCondition
+          Set<Column> columns = EvalTreeUtil.findUniqueColumns(eachOnEval);
+          boolean canPushDown = true;
+          for (Column eachColumn: columns) {
+            if (preservedTableNameSet.contains(eachColumn.getQualifier())) {
+              canPushDown = false;
+              break;
+            }
+          }
+          if (canPushDown) {
+            context.pushingDownFilters.add(eachOnEval);
+          } else {
+            outerJoinPredicationEvals.add(eachOnEval);
+          }
+        }
+      }
+    } else {
+      context.pushingDownFilters.addAll(onConditions);
+    }
+
+    LogicalNode left = joinNode.getLeftChild();
+    LogicalNode right = joinNode.getRightChild();
+
+    List<EvalNode> notMatched = new ArrayList<EvalNode>();
+    // Join's input schema = right child output columns + left child output columns
+    Map<EvalNode, EvalNode> transformedMap = findCanPushdownAndTransform(context, joinNode, left, notMatched, null, true,
+        0);
+    context.setFiltersTobePushed(transformedMap.keySet());
+    visit(context, plan, block, left, stack);
+
+    context.setToOrigin(transformedMap);
+    context.addFiltersTobePushed(notMatched);
+
+    notMatched.clear();
+    transformedMap = findCanPushdownAndTransform(context, joinNode, right, notMatched, null, true, left.getOutSchema().size());
+    context.setFiltersTobePushed(new HashSet<EvalNode>(transformedMap.keySet()));
+
+    visit(context, plan, block, right, stack);
+
+    context.setToOrigin(transformedMap);
+    context.addFiltersTobePushed(notMatched);
+
+    notMatched.clear();
     List<EvalNode> matched = Lists.newArrayList();
-    for (EvalNode eval : cnf) {
-      if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, eval, joinNode, stack.peek().getType() != NodeType.JOIN)) {
-        matched.add(eval);
+    if(LogicalPlanner.isOuterJoin(joinNode.getJoinType())) {
+      matched.addAll(outerJoinPredicationEvals);
+    } else {
+      for (EvalNode eval : context.pushingDownFilters) {
+        if (LogicalPlanner.checkIfBeEvaluatedAtJoin(block, eval, joinNode, isTopMostJoin)) {
+          matched.add(eval);
+        }
       }
     }
 
@@ -232,50 +333,86 @@
       if (joinNode.getJoinType() == JoinType.CROSS) {
         joinNode.setJoinType(JoinType.INNER);
       }
-      cnf.removeAll(matched);
+      context.pushingDownFilters.removeAll(matched);
     }
 
+    context.pushingDownFilters.addAll(outerJoinFilterEvalsExcludePredication);
     return joinNode;
   }
 
-  @Override
-  public LogicalNode visitTableSubQuery(Set<EvalNode> cnf, LogicalPlan plan, LogicalPlan.QueryBlock block,
-                                        TableSubQueryNode node, Stack<LogicalNode> stack) throws PlanningException {
-    List<EvalNode> matched = Lists.newArrayList();
-    for (EvalNode eval : cnf) {
-      if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, eval, node)) {
-        matched.add(eval);
-      }
+  private Map<EvalNode, EvalNode> transformEvalsWidthByPassNode(
+      Collection<EvalNode> originEvals, LogicalPlan plan,
+      LogicalPlan.QueryBlock block,
+      LogicalNode node, LogicalNode childNode) throws PlanningException {
+    // transformed -> pushingDownFilters
+    Map<EvalNode, EvalNode> transformedMap = new HashMap<EvalNode, EvalNode>();
+
+    if (originEvals.isEmpty()) {
+      return transformedMap;
     }
 
-    Map<String, String> columnMap = new HashMap<String, String>();
-    for (int i = 0; i < node.getInSchema().size(); i++) {
-      LogicalNode childNode = node.getSubQuery();
-      if (childNode.getOutSchema().getColumn(i).hasQualifier()) {
-      columnMap.put(node.getInSchema().getColumn(i).getQualifiedName(),
-          childNode.getOutSchema().getColumn(i).getQualifiedName());
-      } else {
-        NamedExprsManager namedExprsMgr = plan.getBlock(node.getSubQuery()).getNamedExprsManager();
-        String originalName = namedExprsMgr.getOriginalName(childNode.getOutSchema().getColumn(i)
-            .getQualifiedName());
-
-        // We need to consider aliased columns of sub-query.
-        // Because we can't get original column name for a special occasion.
-        // For example, if we use an aliased name inside a sub-query and then we use it to where
-        // condition outside the sub-query, we can't find its original name.
-        if (originalName != null) {
-          columnMap.put(node.getInSchema().getColumn(i).getQualifiedName(), originalName);
-        } else {
-          columnMap.put(node.getInSchema().getColumn(i).getQualifiedName(),
-            node.getInSchema().getColumn(i).getQualifiedName());
+    if (node.getType() == NodeType.UNION) {
+      // If node is union, All eval's columns are simple name and matched with child's output schema.
+      Schema childOutSchema = childNode.getOutSchema();
+      for (EvalNode eval : originEvals) {
+        EvalNode copy;
+        try {
+          copy = (EvalNode) eval.clone();
+        } catch (CloneNotSupportedException e) {
+          throw new PlanningException(e);
         }
+
+        Set<Column> columns = EvalTreeUtil.findUniqueColumns(copy);
+        for (Column c : columns) {
+          Column column = childOutSchema.getColumn(c.getSimpleName());
+          if (column == null) {
+            throw new PlanningException(
+                "Invalid Filter PushDown on SubQuery: No such a corresponding column '"
+                    + c.getQualifiedName() + " for FilterPushDown(" + eval + "), " +
+                    "(PID=" + node.getPID() + ", Child=" + childNode.getPID() + ")");
+          }
+          EvalTreeUtil.changeColumnRef(copy, c.getSimpleName(), column.getQualifiedName());
+        }
+
+        transformedMap.put(copy, eval);
       }
+      return transformedMap;
     }
 
-    Set<EvalNode> transformed = new HashSet<EvalNode>();
+    if (childNode.getType() == NodeType.UNION) {
+      // If child is union, remove qualifier from eval's column
+      for (EvalNode eval : originEvals) {
+        EvalNode copy;
+        try {
+          copy = (EvalNode) eval.clone();
+        } catch (CloneNotSupportedException e) {
+          throw new PlanningException(e);
+        }
+
+        Set<Column> columns = EvalTreeUtil.findUniqueColumns(copy);
+        for (Column c : columns) {
+          if (c.hasQualifier()) {
+            EvalTreeUtil.changeColumnRef(copy, c.getQualifiedName(), c.getSimpleName());
+          }
+        }
+
+        transformedMap.put(copy, eval);
+      }
+
+      return transformedMap;
+    }
+
+    // node in column -> child out column
+    Map<String, String> columnMap = new HashMap<String, String>();
+
+    for (int i = 0; i < node.getInSchema().size(); i++) {
+      String inColumnName = node.getInSchema().getColumn(i).getQualifiedName();
+      Column childOutColumn = childNode.getOutSchema().getColumn(i);
+      columnMap.put(inColumnName, childOutColumn.getQualifiedName());
+    }
 
     // Rename from upper block's one to lower block's one
-    for (EvalNode matchedEval : matched) {
+    for (EvalNode matchedEval : originEvals) {
       EvalNode copy;
       try {
         copy = (EvalNode) matchedEval.clone();
@@ -284,31 +421,426 @@
       }
 
       Set<Column> columns = EvalTreeUtil.findUniqueColumns(copy);
+      boolean allMatched = true;
       for (Column c : columns) {
         if (columnMap.containsKey(c.getQualifiedName())) {
           EvalTreeUtil.changeColumnRef(copy, c.getQualifiedName(), columnMap.get(c.getQualifiedName()));
         } else {
-          throw new PlanningException(
-              "Invalid Filter PushDown on SubQuery: No such a corresponding column '"
-                  + c.getQualifiedName());
+          if (childNode.getType() == NodeType.GROUP_BY) {
+            if (((GroupbyNode) childNode).isAggregationColumn(c.getSimpleName())) {
+              allMatched = false;
+              break;
+            }
+          } else {
+            throw new PlanningException(
+                "Invalid Filter PushDown on SubQuery: No such a corresponding column '"
+                    + c.getQualifiedName() + " for FilterPushDown(" + matchedEval + "), " +
+                    "(PID=" + node.getPID() + ", Child=" + childNode.getPID() + ")"
+            );
+          }
         }
       }
-
-      transformed.add(copy);
+      if (allMatched) {
+        transformedMap.put(copy, matchedEval);
+      }
     }
 
-    visit(transformed, plan, plan.getBlock(node.getSubQuery()));
+    return transformedMap;
+  }
 
-    cnf.removeAll(matched);
+  @Override
+  public LogicalNode visitTableSubQuery(FilterPushDownContext context, LogicalPlan plan, LogicalPlan.QueryBlock block,
+                                        TableSubQueryNode node, Stack<LogicalNode> stack) throws PlanningException {
+    List<EvalNode> matched = Lists.newArrayList();
+    for (EvalNode eval : context.pushingDownFilters) {
+      if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, eval, node)) {
+        matched.add(eval);
+      }
+    }
+
+    // transformed -> pushingDownFilters
+    Map<EvalNode, EvalNode> transformedMap =
+        transformEvalsWidthByPassNode(matched, plan, block, node, node.getSubQuery());
+
+    context.setFiltersTobePushed(new HashSet<EvalNode>(transformedMap.keySet()));
+    visit(context, plan, plan.getBlock(node.getSubQuery()));
+
+    context.setToOrigin(transformedMap);
 
     return node;
   }
 
   @Override
-  public LogicalNode visitScan(Set<EvalNode> cnf, LogicalPlan plan, LogicalPlan.QueryBlock block, ScanNode scanNode,
+  public LogicalNode visitUnion(FilterPushDownContext context, LogicalPlan plan,
+                                LogicalPlan.QueryBlock block, UnionNode unionNode,
+                                Stack<LogicalNode> stack) throws PlanningException {
+    LogicalNode leftNode = unionNode.getLeftChild();
+
+    List<EvalNode> origins = new ArrayList<EvalNode>(context.pushingDownFilters);
+
+    // transformed -> pushingDownFilters
+    Map<EvalNode, EvalNode> transformedMap = transformEvalsWidthByPassNode(origins, plan, block, unionNode, leftNode);
+    context.setFiltersTobePushed(new HashSet<EvalNode>(transformedMap.keySet()));
+    visit(context, plan, plan.getBlock(leftNode));
+
+    if (!context.pushingDownFilters.isEmpty()) {
+      errorFilterPushDown(plan, leftNode, context);
+    }
+
+    LogicalNode rightNode = unionNode.getRightChild();
+    transformedMap = transformEvalsWidthByPassNode(origins, plan, block, unionNode, rightNode);
+    context.setFiltersTobePushed(new HashSet<EvalNode>(transformedMap.keySet()));
+    visit(context, plan, plan.getBlock(rightNode), rightNode, stack);
+
+    if (!context.pushingDownFilters.isEmpty()) {
+      errorFilterPushDown(plan, rightNode, context);
+    }
+
+    // notify all filter matched to upper
+    context.pushingDownFilters.clear();
+    return unionNode;
+  }
+
+  @Override
+  public LogicalNode visitProjection(FilterPushDownContext context,
+                                     LogicalPlan plan,
+                                     LogicalPlan.QueryBlock block,
+                                     ProjectionNode projectionNode,
+                                     Stack<LogicalNode> stack) throws PlanningException {
+    LogicalNode childNode = projectionNode.getChild();
+
+    List<EvalNode> notMatched = new ArrayList<EvalNode>();
+
+    //copy -> origin
+    Map<EvalNode, EvalNode> matched = findCanPushdownAndTransform(
+        context, projectionNode, childNode, notMatched, null, false, 0);
+
+    context.setFiltersTobePushed(matched.keySet());
+
+    stack.push(projectionNode);
+    LogicalNode current = visit(context, plan, plan.getBlock(childNode), childNode, stack);
+    stack.pop();
+
+    // find not matched after visiting child
+    for (EvalNode eval: context.pushingDownFilters) {
+      notMatched.add(matched.get(eval));
+    }
+
+    EvalNode qual = null;
+    if (notMatched.size() > 1) {
+      // merged into one eval tree
+      qual = AlgebraicUtil.createSingletonExprFromCNF(notMatched.toArray(new EvalNode[notMatched.size()]));
+    } else if (notMatched.size() == 1) {
+      // if the number of matched expr is one
+      qual = notMatched.get(0);
+    }
+
+    // If there is not matched node add SelectionNode and clear context.pushingDownFilters
+    if (qual != null) {
+      SelectionNode selectionNode = plan.createNode(SelectionNode.class);
+      selectionNode.setInSchema(current.getOutSchema());
+      selectionNode.setOutSchema(current.getOutSchema());
+      selectionNode.setQual(qual);
+      block.registerNode(selectionNode);
+
+      projectionNode.setChild(selectionNode);
+      selectionNode.setChild(current);
+    }
+
+    //notify all eval matched to upper
+    context.pushingDownFilters.clear();
+
+    return current;
+  }
+
+  private Map<EvalNode, EvalNode> findCanPushdownAndTransform(
+      FilterPushDownContext context, Projectable node,
+      LogicalNode childNode, List<EvalNode> notMatched,
+      Set<String> partitionColumns,
+      boolean ignoreJoin, int columnOffset) throws PlanningException {
+    // canonical name -> target
+    Map<String, Target> nodeTargetMap = new HashMap<String, Target>();
+    for (Target target : node.getTargets()) {
+      nodeTargetMap.put(target.getCanonicalName(), target);
+    }
+
+    // copy -> origin
+    Map<EvalNode, EvalNode> matched = new HashMap<EvalNode, EvalNode>();
+
+    for (EvalNode eval : context.pushingDownFilters) {
+      if (ignoreJoin && EvalTreeUtil.isJoinQual(eval, true)) {
+        notMatched.add(eval);
+        continue;
+      }
+      // If all column is field eval, can push down.
+      Set<Column> evalColumns = EvalTreeUtil.findUniqueColumns(eval);
+      boolean columnMatched = true;
+      for (Column c : evalColumns) {
+        Target target = nodeTargetMap.get(c.getQualifiedName());
+        if (target == null) {
+          columnMatched = false;
+          break;
+        }
+        if (target.getEvalTree().getType() != EvalType.FIELD) {
+          columnMatched = false;
+          break;
+        }
+      }
+
+      if (columnMatched) {
+        // transform eval column to child's output column
+        EvalNode copyEvalNode = transformEval(node, childNode, eval, nodeTargetMap, partitionColumns, columnOffset);
+        if (copyEvalNode != null) {
+          matched.put(copyEvalNode, eval);
+        } else {
+          notMatched.add(eval);
+        }
+      } else {
+        notMatched.add(eval);
+      }
+    }
+
+    return matched;
+  }
+
+  private EvalNode transformEval(Projectable node, LogicalNode childNode, EvalNode origin,
+                                 Map<String, Target> targetMap, Set<String> partitionColumns,
+                                 int columnOffset) throws PlanningException {
+    Schema outputSchema = childNode != null ? childNode.getOutSchema() : node.getInSchema();
+    EvalNode copy;
+    try {
+      copy = (EvalNode) origin.clone();
+    } catch (CloneNotSupportedException e) {
+      throw new PlanningException(e);
+    }
+    Set<Column> columns = EvalTreeUtil.findUniqueColumns(copy);
+    for (Column c: columns) {
+      Target target = targetMap.get(c.getQualifiedName());
+      if (target == null) {
+        throw new PlanningException(
+            "Invalid Filter PushDown: No such a corresponding target '"
+                + c.getQualifiedName() + " for FilterPushDown(" + origin + "), " +
+                "(PID=" + node.getPID() + ")"
+        );
+      }
+      EvalNode targetEvalNode = target.getEvalTree();
+      if (targetEvalNode.getType() != EvalType.FIELD) {
+        throw new PlanningException(
+            "Invalid Filter PushDown: '" + c.getQualifiedName() + "' target is not FieldEval " +
+                "(PID=" + node.getPID() + ")"
+        );
+      }
+
+      FieldEval fieldEval = (FieldEval)targetEvalNode;
+      Column targetInputColumn = fieldEval.getColumnRef();
+
+      int index;
+      if (targetInputColumn.hasQualifier()) {
+        index = node.getInSchema().getColumnId(targetInputColumn.getQualifiedName());
+      } else {
+        index = node.getInSchema().getColumnIdByName(targetInputColumn.getQualifiedName());
+      }
+      if (columnOffset > 0) {
+        index = index - columnOffset;
+      }
+      if (index < 0 || index >= outputSchema.size()) {
+        if (partitionColumns != null && !partitionColumns.isEmpty() && node instanceof ScanNode) {
+          ScanNode scanNode = (ScanNode)node;
+          boolean isPartitionColumn = false;
+          if (CatalogUtil.isFQColumnName(partitionColumns.iterator().next())) {
+            isPartitionColumn = partitionColumns.contains(
+                CatalogUtil.buildFQName(scanNode.getTableName(), c.getSimpleName()));
+          } else {
+            isPartitionColumn = partitionColumns.contains(c.getSimpleName());
+          }
+          if (isPartitionColumn) {
+            EvalTreeUtil.changeColumnRef(copy, c.getQualifiedName(),
+                scanNode.getCanonicalName() + "." + c.getSimpleName());
+          } else {
+            return null;
+          }
+        } else {
+          return null;
+        }
+      } else {
+        Column outputColumn = outputSchema.getColumn(index);
+        EvalTreeUtil.changeColumnRef(copy, c.getQualifiedName(), outputColumn.getQualifiedName());
+      }
+    }
+
+    return copy;
+  }
+
+  /**
+   * Find aggregation columns in filter eval and add having clause or add HavingNode.
+   * @param context
+   * @param plan
+   * @param block
+   * @param parentNode  If null, having is parent
+   * @param havingNode      If null, projection is parent
+   * @param groupByNode
+   * @return matched origin eval
+   * @throws PlanningException
+   */
+  private List<EvalNode> addHavingNode(FilterPushDownContext context, LogicalPlan plan,
+                                       LogicalPlan.QueryBlock block,
+                                       UnaryNode parentNode,
+                                       HavingNode havingNode,
+                                       GroupbyNode groupByNode) throws PlanningException {
+    // find aggregation column
+    Set<Column> groupingColumns = new HashSet<Column>(Arrays.asList(groupByNode.getGroupingColumns()));
+    Set<String> aggrFunctionOutColumns = new HashSet<String>();
+    for (Column column : groupByNode.getOutSchema().getColumns()) {
+      if (!groupingColumns.contains(column)) {
+        aggrFunctionOutColumns.add(column.getQualifiedName());
+      }
+    }
+
+    List<EvalNode> aggrEvalOrigins = new ArrayList<EvalNode>();
+    List<EvalNode> aggrEvals = new ArrayList<EvalNode>();
+
+    for (EvalNode eval : context.pushingDownFilters) {
+      EvalNode copy = null;
+      try {
+        copy = (EvalNode)eval.clone();
+      } catch (CloneNotSupportedException e) {
+      }
+      boolean isEvalAggrFunction = false;
+      for (Column evalColumn : EvalTreeUtil.findUniqueColumns(copy)) {
+        if (aggrFunctionOutColumns.contains(evalColumn.getSimpleName())) {
+          EvalTreeUtil.changeColumnRef(copy, evalColumn.getQualifiedName(), evalColumn.getSimpleName());
+          isEvalAggrFunction = true;
+          break;
+        }
+      }
+      if (isEvalAggrFunction) {
+        aggrEvals.add(copy);
+        aggrEvalOrigins.add(eval);
+      }
+    }
+
+    if (aggrEvals.isEmpty()) {
+      return aggrEvalOrigins;
+    }
+
+    // transform
+
+    HavingNode workingHavingNode;
+    if (havingNode != null) {
+      workingHavingNode = havingNode;
+      aggrEvals.add(havingNode.getQual());
+    } else {
+      workingHavingNode = plan.createNode(HavingNode.class);
+      block.registerNode(workingHavingNode);
+      parentNode.setChild(workingHavingNode);
+      workingHavingNode.setChild(groupByNode);
+    }
+
+    EvalNode qual = null;
+    if (aggrEvals.size() > 1) {
+      // merged into one eval tree
+      qual = AlgebraicUtil.createSingletonExprFromCNF(aggrEvals.toArray(new EvalNode[aggrEvals.size()]));
+    } else if (aggrEvals.size() == 1) {
+      // if the number of matched expr is one
+      qual = aggrEvals.get(0);
+    }
+
+    // If there is not matched node add SelectionNode and clear context.pushingDownFilters
+    if (qual != null) {
+      workingHavingNode.setQual(qual);
+    }
+
+    return aggrEvalOrigins;
+  }
+
+  @Override
+  public LogicalNode visitGroupBy(FilterPushDownContext context, LogicalPlan plan,
+                                  LogicalPlan.QueryBlock block, GroupbyNode groupbyNode,
+                                  Stack<LogicalNode> stack) throws PlanningException {
+    LogicalNode parentNode = stack.peek();
+    List<EvalNode> aggrEvals;
+    if (parentNode.getType() == NodeType.HAVING) {
+      aggrEvals = addHavingNode(context, plan, block, null, (HavingNode)parentNode, groupbyNode);
+    } else {
+      aggrEvals = addHavingNode(context, plan, block, (UnaryNode)parentNode, null, groupbyNode);
+    }
+
+    if (aggrEvals != null) {
+      // remove aggregation eval from conext
+      context.pushingDownFilters.removeAll(aggrEvals);
+    }
+
+    List<EvalNode> notMatched = new ArrayList<EvalNode>();
+    // transform
+    Map<EvalNode, EvalNode> tranformed =
+        findCanPushdownAndTransform(context, groupbyNode,groupbyNode.getChild(), notMatched, null, false, 0);
+
+    context.setFiltersTobePushed(tranformed.keySet());
+    LogicalNode current = super.visitGroupBy(context, plan, block, groupbyNode, stack);
+
+    context.setToOrigin(tranformed);
+    context.addFiltersTobePushed(notMatched);
+
+    return current;
+  }
+
+  @Override
+  public LogicalNode visitScan(FilterPushDownContext context, LogicalPlan plan,
+                               LogicalPlan.QueryBlock block, ScanNode scanNode,
                                Stack<LogicalNode> stack) throws PlanningException {
     List<EvalNode> matched = Lists.newArrayList();
-    for (EvalNode eval : cnf) {
+
+    // find partition column and check matching
+    Set<String> partitionColumns = new HashSet<String>();
+    TableDesc table = scanNode.getTableDesc();
+    boolean hasQualifiedName = false;
+    if (table.hasPartition()) {
+      for (Column c: table.getPartitionMethod().getExpressionSchema().getColumns()) {
+        partitionColumns.add(c.getQualifiedName());
+        hasQualifiedName = c.hasQualifier();
+      }
+    }
+    Set<EvalNode> partitionEvals = new HashSet<EvalNode>();
+    for (EvalNode eval : context.pushingDownFilters) {
+      if (table.hasPartition()) {
+        Set<Column> columns = EvalTreeUtil.findUniqueColumns(eval);
+        if (columns.size() != 1) {
+          continue;
+        }
+        Column column = columns.iterator().next();
+
+        // If catalog runs with HCatalog, partition column is a qualified name
+        // Else partition column is a simple name
+        boolean isPartitionColumn = false;
+        if (hasQualifiedName) {
+          isPartitionColumn = partitionColumns.contains(CatalogUtil.buildFQName(table.getName(), column.getSimpleName()));
+        } else {
+          isPartitionColumn = partitionColumns.contains(column.getSimpleName());
+        }
+        if (isPartitionColumn) {
+          EvalNode copy;
+          try {
+            copy = (EvalNode) eval.clone();
+          } catch (CloneNotSupportedException e) {
+            throw new PlanningException(e);
+          }
+          EvalTreeUtil.changeColumnRef(copy, column.getQualifiedName(),
+              scanNode.getCanonicalName() + "." + column.getSimpleName());
+          matched.add(copy);
+          partitionEvals.add(eval);
+        }
+      }
+    }
+
+    context.pushingDownFilters.removeAll(partitionEvals);
+
+    List<EvalNode> notMatched = new ArrayList<EvalNode>();
+
+    // transform
+    Map<EvalNode, EvalNode> transformed =
+        findCanPushdownAndTransform(context, scanNode, null, notMatched, partitionColumns, true, 0);
+
+    for (EvalNode eval : transformed.keySet()) {
       if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, eval, scanNode)) {
         matched.add(eval);
       }
@@ -321,15 +853,33 @@
           matched.toArray(new EvalNode[matched.size()]));
     } else if (matched.size() == 1) {
       // if the number of matched expr is one
-      qual = matched.get(0);
+      qual = matched.iterator().next();
     }
 
     if (qual != null) { // if a matched qual exists
       scanNode.setQual(qual);
     }
 
-    cnf.removeAll(matched);
+    for (EvalNode matchedEval: matched) {
+      transformed.remove(matchedEval);
+    }
+
+    context.setToOrigin(transformed);
+    context.addFiltersTobePushed(notMatched);
 
     return scanNode;
   }
+
+  private void errorFilterPushDown(LogicalPlan plan, LogicalNode node,
+                                   FilterPushDownContext context) throws PlanningException {
+    String notMatchedNodeStr = "";
+    String prefix = "";
+    for (EvalNode notMatchedNode: context.pushingDownFilters) {
+      notMatchedNodeStr += prefix + notMatchedNode;
+      prefix = ", ";
+    }
+    throw new PlanningException("FilterPushDown failed cause some filters not matched: " + notMatchedNodeStr + "\n" +
+        "Error node: " + node.getPlanString() + "\n" +
+        plan.toString());
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/PartitionedTableRewriter.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/PartitionedTableRewriter.java
index e637341..666c5fc 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/PartitionedTableRewriter.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/PartitionedTableRewriter.java
@@ -76,21 +76,8 @@
 
   @Override
   public LogicalPlan rewrite(LogicalPlan plan) throws PlanningException {
-    boolean containsPartitionedTables;
-    for (LogicalPlan.QueryBlock block : plan.getQueryBlocks()) {
-      containsPartitionedTables = false;
-      for (RelationNode relation : block.getRelations()) {
-        if (relation.getType() == NodeType.SCAN) {
-          TableDesc table = ((ScanNode)relation).getTableDesc();
-          if (table.hasPartition()) {
-            containsPartitionedTables = true;
-          }
-        }
-      }
-      if (containsPartitionedTables) {
-        rewriter.visit(block, plan, block, block.getRoot(), new Stack<LogicalNode>());
-      }
-    }
+    LogicalPlan.QueryBlock rootBlock = plan.getRootBlock();
+    rewriter.visit(rootBlock, plan, rootBlock, rootBlock.getRoot(), new Stack<LogicalNode>());
     return plan;
   }
 
@@ -360,7 +347,7 @@
         updateTableStat(rewrittenScanNode);
 
         // if it is topmost node, set it as the rootnode of this block.
-        if (stack.empty()) {
+        if (stack.empty() || block.getRoot().equals(scanNode)) {
           block.setRoot(rewrittenScanNode);
         } else {
           PlannerUtil.replaceNode(plan, stack.peek(), scanNode, rewrittenScanNode);
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/ProjectionPushDownRule.java b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/ProjectionPushDownRule.java
index 8e91dca..4e4b5c3 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/ProjectionPushDownRule.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/planner/rewrite/ProjectionPushDownRule.java
@@ -27,6 +27,7 @@
 import org.apache.tajo.catalog.SortSpec;
 import org.apache.tajo.engine.eval.*;
 import org.apache.tajo.engine.planner.*;
+import org.apache.tajo.engine.planner.LogicalPlan.QueryBlock;
 import org.apache.tajo.engine.planner.logical.*;
 import org.apache.tajo.engine.utils.SchemaUtil;
 import org.apache.tajo.util.TUtil;
@@ -54,11 +55,15 @@
   public boolean isEligible(LogicalPlan plan) {
     LogicalNode toBeOptimized = plan.getRootBlock().getRoot();
 
-    if (PlannerUtil.checkIfDDLPlan(toBeOptimized) || !plan.getRootBlock().hasTableExpression()) {
+    if (PlannerUtil.checkIfDDLPlan(toBeOptimized)) {
       return false;
     }
-
-    return true;
+    for (QueryBlock eachBlock: plan.getQueryBlocks()) {
+      if (eachBlock.hasTableExpression()) {
+        return true;
+      }
+    }
+    return false;
   }
 
   @Override
@@ -135,8 +140,10 @@
     private BiMap<Integer, EvalNode> idToEvalBiMap;
     /** Map: Id -> Names */
     private LinkedHashMap<Integer, List<String>> idToNamesMap;
-    /** Map: Name -> Boolean */
-    private LinkedHashMap<String, Boolean> evaluationStateMap;
+    /** Map: Id -> Boolean */
+    private LinkedHashMap<Integer, Boolean> evaluationStateMap;
+    /** Map: alias name -> Id */
+    private LinkedHashMap<String, Integer> aliasMap;
 
     private LogicalPlan plan;
 
@@ -146,6 +153,7 @@
       idToEvalBiMap = HashBiMap.create();
       idToNamesMap = Maps.newLinkedHashMap();
       evaluationStateMap = Maps.newLinkedHashMap();
+      aliasMap = Maps.newLinkedHashMap();
     }
 
     private int getNextSeqId() {
@@ -153,6 +161,28 @@
     }
 
     /**
+     * If some expression is duplicated, we call an alias indicating the duplicated expression 'native alias'.
+     * This method checks whether a reference is native alias or not.
+     *
+     * @param name The reference name
+     * @return True if the reference is native alias. Otherwise, it will return False.
+     */
+    public boolean isNativeAlias(String name) {
+      return aliasMap.containsKey(name);
+    }
+
+    /**
+     * This method retrieves the name indicating actual expression that an given alias indicate.
+     *
+     * @param name an alias name
+     * @return Real reference name
+     */
+    public String getRealReferenceName(String name) {
+      int refId = aliasMap.get(name);
+      return getPrimaryName(refId);
+    }
+
+    /**
      * Add an expression with a specified name, which is usually an alias.
      * Later, you can refer this expression by the specified name.
      */
@@ -163,12 +193,19 @@
       if (nameToIdBiMap.containsKey(specifiedName)) {
         int refId = nameToIdBiMap.get(specifiedName);
         EvalNode found = idToEvalBiMap.get(refId);
-        if (found != null && !evalNode.equals(found)) {
-          if (found.getType() != EvalType.FIELD && evalNode.getType() != EvalType.FIELD) {
-            throw new PlanningException("Duplicate alias: " + evalNode);
-          }
-          if (found.getType() == EvalType.FIELD) {
-            idToEvalBiMap.forcePut(refId, evalNode);
+        if (found != null) {
+          if (evalNode.equals(found)) { // if input expression already exists
+            return specifiedName;
+          } else {
+            // The case where if existing reference name and a given reference name are the same to each other and
+            // existing EvalNode and a given EvalNode is the different
+            if (found.getType() != EvalType.FIELD && evalNode.getType() != EvalType.FIELD) {
+              throw new PlanningException("Duplicate alias: " + evalNode);
+            }
+
+            if (found.getType() == EvalType.FIELD) {
+              idToEvalBiMap.forcePut(refId, evalNode);
+            }
           }
         }
       }
@@ -176,18 +213,19 @@
       int refId;
       if (idToEvalBiMap.inverse().containsKey(evalNode)) {
         refId = idToEvalBiMap.inverse().get(evalNode);
+        aliasMap.put(specifiedName, refId);
+
       } else {
         refId = getNextSeqId();
         idToEvalBiMap.put(refId, evalNode);
+        TUtil.putToNestedList(idToNamesMap, refId, specifiedName);
+        for (Column column : EvalTreeUtil.findUniqueColumns(evalNode)) {
+          add(new FieldEval(column));
+        }
+        evaluationStateMap.put(refId, false);
       }
 
       nameToIdBiMap.put(specifiedName, refId);
-      TUtil.putToNestedList(idToNamesMap, refId, specifiedName);
-      evaluationStateMap.put(specifiedName, false);
-
-      for (Column column : EvalTreeUtil.findUniqueColumns(evalNode)) {
-        add(new FieldEval(column));
-      }
 
       return specifiedName;
     }
@@ -287,7 +325,8 @@
       if (!nameToIdBiMap.containsKey(name)) {
         throw new RuntimeException("No Such target name: " + name);
       }
-      return evaluationStateMap.get(name);
+      int refId = nameToIdBiMap.get(name);
+      return evaluationStateMap.get(refId);
     }
 
     public void markAsEvaluated(Target target) {
@@ -296,7 +335,7 @@
       if (!idToNamesMap.containsKey(refId)) {
         throw new RuntimeException("No such eval: " + evalNode);
       }
-      evaluationStateMap.put(target.getCanonicalName(), true);
+      evaluationStateMap.put(refId, true);
     }
 
     public Iterator<Target> getFilteredTargets(Set<String> required) {
@@ -305,6 +344,7 @@
 
     class FilteredTargetIterator implements Iterator<Target> {
       List<Target> filtered = TUtil.newList();
+      Iterator<Target> iterator;
 
       public FilteredTargetIterator(Set<String> required) {
         for (String name : nameToIdBiMap.keySet()) {
@@ -312,16 +352,17 @@
             filtered.add(getTarget(name));
           }
         }
+        iterator = filtered.iterator();
       }
 
       @Override
       public boolean hasNext() {
-        return false;
+        return iterator.hasNext();
       }
 
       @Override
       public Target next() {
-        return null;
+        return iterator.next();
       }
 
       @Override
@@ -412,8 +453,15 @@
     for (String referenceName : referenceNames) {
       Target target = context.targetListMgr.getTarget(referenceName);
 
-      if (context.targetListMgr.isEvaluated(referenceName)) {
-        finalTargets.add(new Target(new FieldEval(target.getNamedColumn())));
+      if (target.getEvalTree().getType() == EvalType.CONST) {
+        finalTargets.add(target);
+      } else if (context.targetListMgr.isEvaluated(referenceName)) {
+        if (context.targetListMgr.isNativeAlias(referenceName)) {
+          String realRefName = context.targetListMgr.getRealReferenceName(referenceName);
+          finalTargets.add(new Target(new FieldEval(realRefName, target.getDataType()), referenceName));
+        } else {
+          finalTargets.add(new Target(new FieldEval(target.getNamedColumn())));
+        }
       } else if (LogicalPlanner.checkIfBeEvaluatedAtThis(target.getEvalTree(), node)) {
         finalTargets.add(target);
         context.targetListMgr.markAsEvaluated(target);
@@ -687,12 +735,35 @@
     return node;
   }
 
+  private static void pushDownIfComplexTermInJoinCondition(Context ctx, EvalNode cnf, EvalNode term)
+      throws PlanningException {
+
+    // If one of both terms in a binary operator is a complex expression, the binary operator will require
+    // multiple phases. In this case, join cannot evaluate a binary operator.
+    // So, we should prevent dividing the binary operator into more subexpressions.
+    if (term.getType() != EvalType.FIELD && !(term instanceof BinaryEval)) {
+      String refName = ctx.addExpr(term);
+      EvalTreeUtil.replace(cnf, term, new FieldEval(refName, term.getValueType()));
+    }
+  }
+
   public LogicalNode visitJoin(Context context, LogicalPlan plan, LogicalPlan.QueryBlock block, JoinNode node,
                           Stack<LogicalNode> stack) throws PlanningException {
     Context newContext = new Context(context);
 
     String joinQualReference = null;
     if (node.hasJoinQual()) {
+      for (EvalNode eachQual : AlgebraicUtil.toConjunctiveNormalFormArray(node.getJoinQual())) {
+        if (eachQual instanceof BinaryEval) {
+          BinaryEval binaryQual = (BinaryEval) eachQual;
+
+          for (int i = 0; i < 2; i++) {
+            EvalNode term = binaryQual.getExpr(i);
+            pushDownIfComplexTermInJoinCondition(newContext, eachQual, term);
+          }
+        }
+      }
+
       joinQualReference = newContext.addExpr(node.getJoinQual());
       newContext.addNecessaryReferences(node.getJoinQual());
     }
@@ -874,7 +945,7 @@
       newContext.addExpr(target);
     }
 
-    for (Iterator<Target> it = getFilteredTarget(targets, context.requiredSet); it.hasNext();) {
+    for (Iterator<Target> it = context.targetListMgr.getFilteredTargets(newContext.requiredSet); it.hasNext();) {
       Target target = it.next();
 
       if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, target.getEvalTree(), node)) {
@@ -908,7 +979,7 @@
       newContext.addExpr(target);
     }
 
-    for (Iterator<Target> it = getFilteredTarget(targets, context.requiredSet); it.hasNext();) {
+    for (Iterator<Target> it = context.targetListMgr.getFilteredTargets(newContext.requiredSet); it.hasNext();) {
       Target target = it.next();
 
       if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, target.getEvalTree(), node)) {
@@ -931,8 +1002,6 @@
     node.setSubQuery(child);
     stack.pop();
 
-    Context newContext = new Context(upperContext);
-
     Target [] targets;
     if (node.hasTargets()) {
       targets = node.getTargets();
@@ -941,17 +1010,17 @@
     }
 
     LinkedHashSet<Target> projectedTargets = Sets.newLinkedHashSet();
-    for (Iterator<Target> it = getFilteredTarget(targets, newContext.requiredSet); it.hasNext();) {
+    for (Iterator<Target> it = getFilteredTarget(targets, upperContext.requiredSet); it.hasNext();) {
       Target target = it.next();
-      childContext.addExpr(target);
+      upperContext.addExpr(target);
     }
 
-    for (Iterator<Target> it = getFilteredTarget(targets, upperContext.requiredSet); it.hasNext();) {
+    for (Iterator<Target> it = upperContext.targetListMgr.getFilteredTargets(upperContext.requiredSet); it.hasNext();) {
       Target target = it.next();
 
       if (LogicalPlanner.checkIfBeEvaluatedAtRelation(block, target.getEvalTree(), node)) {
         projectedTargets.add(target);
-        childContext.targetListMgr.markAsEvaluated(target);
+        upperContext.targetListMgr.markAsEvaluated(target);
       }
     }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheKey.java b/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheKey.java
index ad9204f..6f39d32 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheKey.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheKey.java
@@ -21,18 +21,12 @@
 public class TupleCacheKey {
   String ebId;
   String tableName;
+  String pathName;
 
-  public TupleCacheKey(String ebId, String tableName) {
+  public TupleCacheKey(String ebId, String tableName, String pathName) {
     this.ebId = ebId;
     this.tableName = tableName;
-  }
-
-  public String getEbId() {
-    return ebId;
-  }
-
-  public void setEbId(String ebId) {
-    this.ebId = ebId;
+    this.pathName = pathName;
   }
 
   public String getTableName() {
@@ -55,6 +49,6 @@
 
   @Override
   public String toString() {
-    return ebId + "," + tableName;
+    return ebId + "," + tableName + "," + pathName;
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheScanner.java b/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheScanner.java
index 3b91f94..743d70c 100644
--- a/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheScanner.java
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/utils/TupleCacheScanner.java
@@ -51,7 +51,12 @@
   public Tuple next() throws IOException {
     if (it.hasNext()) {
       count++;
-      return it.next();
+      Tuple tuple = it.next();
+      try {
+        return (Tuple)tuple.clone();
+      } catch (CloneNotSupportedException e) {
+        throw new IOException(e.getMessage(), e);
+      }
     } else {
       return null;
     }
diff --git a/tajo-core/src/main/java/org/apache/tajo/engine/utils/test/ErrorInjectionRewriter.java b/tajo-core/src/main/java/org/apache/tajo/engine/utils/test/ErrorInjectionRewriter.java
new file mode 100644
index 0000000..333df11
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/engine/utils/test/ErrorInjectionRewriter.java
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.utils.test;
+
+import org.apache.tajo.engine.planner.LogicalPlan;
+import org.apache.tajo.engine.planner.PlanningException;
+import org.apache.tajo.engine.planner.rewrite.RewriteRule;
+
+public class ErrorInjectionRewriter implements RewriteRule {
+  @Override
+  public String getName() {
+    return "ErrorInjectionRewriter";
+  }
+
+  @Override
+  public boolean isEligible(LogicalPlan plan) {
+    return true;
+  }
+
+  @Override
+  public LogicalPlan rewrite(LogicalPlan plan) throws PlanningException {
+    throw new NullPointerException();
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java b/tajo-core/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
index 5bfac8b..21df4e9 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/DefaultTaskScheduler.java
@@ -30,7 +30,6 @@
 import org.apache.tajo.QueryUnitAttemptId;
 import org.apache.tajo.engine.planner.global.ExecutionBlock;
 import org.apache.tajo.engine.planner.global.MasterPlan;
-import org.apache.tajo.engine.planner.logical.ScanNode;
 import org.apache.tajo.engine.query.QueryUnitRequest;
 import org.apache.tajo.engine.query.QueryUnitRequestImpl;
 import org.apache.tajo.ipc.TajoWorkerProtocol;
@@ -190,7 +189,6 @@
           scheduledObjectNum++;
           if (castEvent.hasRightFragments()) {
             task.addFragments(castEvent.getRightFragments());
-            //scheduledObjectNum += castEvent.getRightFragments().size();
           }
           subQuery.getEventHandler().handle(new TaskEvent(task.getId(), TaskEventType.T_SCHEDULE));
         } else {
@@ -821,7 +819,7 @@
               host, container.getTaskPort()));
           assignedRequest.add(attemptId);
 
-          scheduledObjectNum -= task.getAllFragments().size();
+          scheduledObjectNum--;
           taskRequest.getCallback().run(taskAssign.getProto());
         } else {
           throw new RuntimeException("Illegal State!!!!!!!!!!!!!!!!!!!!!");
@@ -873,11 +871,11 @@
           if (checkIfInterQuery(subQuery.getMasterPlan(), subQuery.getBlock())) {
             taskAssign.setInterQuery();
           }
-          for (ScanNode scan : task.getScanNodes()) {
-            Collection<FetchImpl> fetches = task.getFetch(scan);
+          for(Map.Entry<String, Set<FetchImpl>> entry: task.getFetchMap().entrySet()) {
+            Collection<FetchImpl> fetches = entry.getValue();
             if (fetches != null) {
               for (FetchImpl fetch : fetches) {
-                taskAssign.addFetch(scan.getTableName(), fetch);
+                taskAssign.addFetch(entry.getKey(), fetch);
               }
             }
           }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/GlobalEngine.java b/tajo-core/src/main/java/org/apache/tajo/master/GlobalEngine.java
index 3b81ce2..8954df1 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/GlobalEngine.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/GlobalEngine.java
@@ -45,7 +45,6 @@
 import org.apache.tajo.engine.eval.EvalNode;
 import org.apache.tajo.engine.exception.IllegalQueryStatusException;
 import org.apache.tajo.engine.exception.VerifyException;
-import org.apache.tajo.engine.parser.HiveQLAnalyzer;
 import org.apache.tajo.engine.parser.SQLAnalyzer;
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.logical.*;
@@ -80,7 +79,6 @@
   private final AbstractStorageManager sm;
 
   private SQLAnalyzer analyzer;
-  private HiveQLAnalyzer converter;
   private CatalogService catalog;
   private PreLogicalPlanVerifier preVerifier;
   private LogicalPlanner planner;
@@ -98,7 +96,6 @@
   public void start() {
     try  {
       analyzer = new SQLAnalyzer();
-      converter = new HiveQLAnalyzer();
       preVerifier = new PreLogicalPlanVerifier(context.getCatalog());
       planner = new LogicalPlanner(context.getCatalog());
       optimizer = new LogicalOptimizer(context.getConf());
@@ -120,6 +117,7 @@
   public SubmitQueryResponse executeQuery(Session session, String query, boolean isJson) {
     LOG.info("Query: " + query);
     QueryContext queryContext = new QueryContext();
+    queryContext.putAll(session.getAllVariables());
     Expr planningContext;
 
     try {
@@ -171,17 +169,8 @@
 
   public Expr buildExpressionFromSql(QueryContext queryContext, String sql)
       throws InterruptedException, IOException, IllegalQueryStatusException {
-    final boolean hiveQueryMode = context.getConf().getBoolVar(TajoConf.ConfVars.HIVE_QUERY_MODE);
-    LOG.info("hive.query.mode:" + hiveQueryMode);
-
-    if (hiveQueryMode) {
-      context.getSystemMetrics().counter("Query", "numHiveMode").inc();
-      queryContext.setHiveQueryMode();
-    }
-
     context.getSystemMetrics().counter("Query", "totalQuery").inc();
-
-    return hiveQueryMode ? converter.parse(sql) : analyzer.parse(sql);
+    return analyzer.parse(sql);
   }
 
   private SubmitQueryResponse executeQueryInternal(QueryContext queryContext,
@@ -257,7 +246,7 @@
       boolean isInsert = rootNode.getChild() != null && rootNode.getChild().getType() == NodeType.INSERT;
       if (isInsert) {
         InsertNode insertNode = rootNode.getChild();
-        insertNonFromQuery(insertNode, responseBuilder);
+        insertNonFromQuery(queryContext, insertNode, responseBuilder);
       } else {
         Schema schema = PlannerUtil.targetToSchema(targets);
         RowStoreUtil.RowStoreEncoder encoder = RowStoreUtil.createEncoder(schema);
@@ -300,7 +289,7 @@
     return response;
   }
 
-  private void insertNonFromQuery(InsertNode insertNode, SubmitQueryResponse.Builder responseBuilder)
+  private void insertNonFromQuery(QueryContext queryContext, InsertNode insertNode, SubmitQueryResponse.Builder responseBuilder)
       throws Exception {
     String nodeUniqName = insertNode.getTableName() == null ? insertNode.getPath().getName() : insertNode.getTableName();
     String queryId = nodeUniqName + "_" + System.currentTimeMillis();
@@ -321,7 +310,7 @@
     }
 
     TaskAttemptContext taskAttemptContext =
-        new TaskAttemptContext(context.getConf(), null, (CatalogProtos.FragmentProto[]) null, stagingDir);
+        new TaskAttemptContext(context.getConf(), queryContext, null, (CatalogProtos.FragmentProto[]) null, stagingDir);
     taskAttemptContext.setOutputPath(new Path(stagingResultDir, "part-01-000000"));
 
     EvalExprExec evalExprExec = new EvalExprExec(taskAttemptContext, (EvalExprNode) insertNode.getChild());
@@ -455,6 +444,10 @@
         AlterTableNode alterTable = (AlterTableNode) root;
         alterTable(session,alterTable);
         return true;
+      case TRUNCATE_TABLE:
+        TruncateTableNode truncateTable = (TruncateTableNode) root;
+        truncateTable(session, truncateTable);
+        return true;
       default:
         throw new InternalError("updateQuery cannot handle such query: \n" + root.toJson());
     }
@@ -478,7 +471,7 @@
       LOG.debug("Non Optimized Query: \n" + plan.toString());
       LOG.debug("=============================================");
     }
-    optimizer.optimize(plan);
+    optimizer.optimize(session, plan);
     LOG.info("=============================================");
     LOG.info("Optimized Query: \n" + plan.toString());
     LOG.info("=============================================");
@@ -591,6 +584,57 @@
     }
   }
 
+  /**
+   * Truncate table a given table
+   */
+  public void truncateTable(final Session session, final TruncateTableNode truncateTableNode) throws IOException {
+    List<String> tableNames = truncateTableNode.getTableNames();
+    final CatalogService catalog = context.getCatalog();
+
+    String databaseName;
+    String simpleTableName;
+
+    List<TableDesc> tableDescList = new ArrayList<TableDesc>();
+    for (String eachTableName: tableNames) {
+      if (CatalogUtil.isFQTableName(eachTableName)) {
+        String[] split = CatalogUtil.splitFQTableName(eachTableName);
+        databaseName = split[0];
+        simpleTableName = split[1];
+      } else {
+        databaseName = session.getCurrentDatabase();
+        simpleTableName = eachTableName;
+      }
+      final String qualifiedName = CatalogUtil.buildFQName(databaseName, simpleTableName);
+
+      if (!catalog.existsTable(databaseName, simpleTableName)) {
+        throw new NoSuchTableException(qualifiedName);
+      }
+
+      Path warehousePath = new Path(TajoConf.getWarehouseDir(context.getConf()), databaseName);
+      TableDesc tableDesc = catalog.getTableDesc(databaseName, simpleTableName);
+      Path tablePath = tableDesc.getPath();
+      if (tablePath.getParent() == null ||
+          !tablePath.getParent().toUri().getPath().equals(warehousePath.toUri().getPath())) {
+        throw new IOException("Can't truncate external table:" + eachTableName + ", data dir=" + tablePath +
+            ", warehouse dir=" + warehousePath);
+      }
+      tableDescList.add(tableDesc);
+    }
+
+    for (TableDesc eachTable: tableDescList) {
+      Path path = eachTable.getPath();
+      LOG.info("Truncate table: " + eachTable.getName() + ", delete all data files in " + path);
+      FileSystem fs = path.getFileSystem(context.getConf());
+
+      FileStatus[] files = fs.listStatus(path);
+      if (files != null) {
+        for (FileStatus eachFile: files) {
+          fs.delete(eachFile.getPath(), true);
+        }
+      }
+    }
+  }
+
   private boolean existColumnName(String tableName, String columnName) {
     final TableDesc tableDesc = catalog.getTableDesc(tableName);
     return tableDesc.getSchema().containsByName(columnName) ? true : false;
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/TajoMaster.java b/tajo-core/src/main/java/org/apache/tajo/master/TajoMaster.java
index dfae300..0962ca5 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/TajoMaster.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/TajoMaster.java
@@ -537,8 +537,9 @@
 
   public static List<File> getMountPath() throws Exception {
     BufferedReader mountOutput = null;
+    Process mountProcess = null;
     try {
-      Process mountProcess = Runtime.getRuntime ().exec("mount");
+      mountProcess = Runtime.getRuntime ().exec("mount");
       mountOutput = new BufferedReader(new InputStreamReader(mountProcess.getInputStream()));
       List<File> mountPaths = new ArrayList<File>();
       while (true) {
@@ -560,6 +561,11 @@
       if(mountOutput != null) {
         mountOutput.close();
       }
+      if (mountProcess != null) {
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getInputStream());
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getOutputStream());
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getErrorStream());
+      }
     }
   }
   public static void main(String[] args) throws Exception {
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Query.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Query.java
index 2848095..0ce6d7e 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Query.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Query.java
@@ -37,6 +37,7 @@
 import org.apache.tajo.catalog.TableMeta;
 import org.apache.tajo.catalog.statistics.TableStats;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.engine.planner.global.DataChannel;
 import org.apache.tajo.engine.planner.global.ExecutionBlock;
 import org.apache.tajo.engine.planner.global.ExecutionBlockCursor;
@@ -47,6 +48,7 @@
 import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.event.*;
 import org.apache.tajo.storage.AbstractStorageManager;
+import org.apache.tajo.storage.StorageConstants;
 import org.apache.tajo.util.TUtil;
 
 import java.io.IOException;
@@ -479,6 +481,10 @@
                           Path finalOutputDir) throws Exception {
         SubQuery lastStage = query.getSubQuery(finalExecBlockId);
         TableMeta meta = lastStage.getTableMeta();
+
+        String nullChar = queryContext.get(ConfVars.CSVFILE_NULL.varname, ConfVars.CSVFILE_NULL.defaultVal);
+        meta.putOption(StorageConstants.CSVFILE_NULL, nullChar);
+
         TableStats stats = lastStage.getResultStats();
 
         TableDesc resultTableDesc =
@@ -686,7 +692,11 @@
       try {
         getStateMachine().doTransition(event.getType(), event);
       } catch (InvalidStateTransitonException e) {
-        LOG.error("Can't handle this event at current state", e);
+        LOG.error("Can't handle this event at current state"
+            + ", type:" + event
+            + ", oldState:" + oldState.name()
+            + ", nextState:" + getState().name()
+            , e);
         eventHandler.handle(new QueryEvent(this.id, QueryEventType.INTERNAL_ERROR));
       }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryInProgress.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryInProgress.java
index e561a4c..261200e 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryInProgress.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryInProgress.java
@@ -21,7 +21,6 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.service.CompositeService;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.event.EventHandler;
@@ -43,6 +42,7 @@
 import org.apache.tajo.rpc.NullCallback;
 import org.apache.tajo.rpc.RpcConnectionPool;
 import org.apache.tajo.rpc.protocolrecords.PrimitiveProtos;
+import org.apache.tajo.util.NetUtils;
 
 import java.net.InetSocketAddress;
 import java.util.concurrent.atomic.AtomicBoolean;
@@ -205,8 +205,7 @@
   }
 
   private void connectQueryMaster() throws Exception {
-    InetSocketAddress addr = NetUtils.createSocketAddrForHost(
-        queryInfo.getQueryMasterHost(), queryInfo.getQueryMasterPort());
+    InetSocketAddress addr = NetUtils.createSocketAddr(queryInfo.getQueryMasterHost(), queryInfo.getQueryMasterPort());
     LOG.info("Connect to QueryMaster:" + addr);
     queryMasterRpc =
         RpcConnectionPool.getPool((TajoConf) getConfig()).getConnection(addr, QueryMasterProtocol.class, true);
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryJobManager.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryJobManager.java
index 66db9d6..acaefc9 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryJobManager.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryJobManager.java
@@ -98,15 +98,21 @@
   }
 
   public Collection<QueryInProgress> getSubmittedQueries() {
-    return Collections.unmodifiableCollection(submittedQueries.values());
+    synchronized (submittedQueries){
+      return Collections.unmodifiableCollection(submittedQueries.values());
+    }
   }
 
   public Collection<QueryInProgress> getRunningQueries() {
-    return Collections.unmodifiableCollection(runningQueries.values());
+    synchronized (runningQueries){
+      return Collections.unmodifiableCollection(runningQueries.values());
+    }
   }
 
   public Collection<QueryInProgress> getFinishedQueries() {
-    return Collections.unmodifiableCollection(finishedQueries.values());
+    synchronized (finishedQueries){
+      return Collections.unmodifiableCollection(finishedQueries.values());
+    }
   }
 
   public QueryInfo createNewQueryJob(Session session, QueryContext queryContext, String sql,
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMaster.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMaster.java
index a8c6014..f173c24 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMaster.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMaster.java
@@ -42,7 +42,6 @@
 import org.apache.tajo.rpc.protocolrecords.PrimitiveProtos;
 import org.apache.tajo.storage.AbstractStorageManager;
 import org.apache.tajo.storage.StorageManagerFactory;
-import org.apache.tajo.util.CommonTestingUtil;
 import org.apache.tajo.util.NetUtils;
 import org.apache.tajo.worker.TajoWorker;
 
@@ -337,10 +336,10 @@
 
         try {
           queryMasterTask.stop();
-          if (!systemConf.get(CommonTestingUtil.TAJO_TEST, "FALSE").equalsIgnoreCase("TRUE")
-              && !workerContext.isYarnContainerMode()) {
+          //if (!systemConf.get(CommonTestingUtil.TAJO_TEST, "FALSE").equalsIgnoreCase("TRUE")
+         //     && !workerContext.isYarnContainerMode()) {
             cleanup(queryId);       // TODO We will support yarn mode
-          }
+          //}
         } catch (Exception e) {
           LOG.error(e.getMessage(), e);
         }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterManagerService.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterManagerService.java
index 589a656..826052d 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterManagerService.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterManagerService.java
@@ -217,7 +217,12 @@
                         RpcCallback<PrimitiveProtos.BoolProto> done) {
     QueryId queryId = new QueryId(request);
     QueryMasterTask queryMasterTask = queryMaster.getQueryMasterTask(queryId);
-    queryMasterTask.getQuery().handle(new QueryEvent(queryId, QueryEventType.KILL));
+    if (queryMasterTask != null) {
+      Query query = queryMasterTask.getQuery();
+      if (query != null) {
+        query.handle(new QueryEvent(queryId, QueryEventType.KILL));
+      }
+    }
   }
 
   @Override
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterTask.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterTask.java
index f812715..0061717 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterTask.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryMasterTask.java
@@ -210,7 +210,9 @@
     super.stop();
 
     //TODO change report to tajo master
-    queryMetrics.report(new MetricsConsoleReporter());
+    if (queryMetrics != null) {
+      queryMetrics.report(new MetricsConsoleReporter());
+    }
 
     LOG.info("Stopped QueryMasterTask:" + queryId);
   }
@@ -327,7 +329,7 @@
       LogicalOptimizer optimizer = new LogicalOptimizer(systemConf);
       Expr expr = JsonHelper.fromJson(jsonExpr, Expr.class);
       LogicalPlan plan = planner.createPlan(session, expr);
-      optimizer.optimize(plan);
+      optimizer.optimize(session, plan);
 
       GlobalEngine.DistributedQueryHookManager hookManager = new GlobalEngine.DistributedQueryHookManager();
       hookManager.addHook(new GlobalEngine.InsertHook());
@@ -392,8 +394,12 @@
       }
     } catch (IOException ioe) {
       if (stagingDir != null && defaultFS.exists(stagingDir)) {
-        defaultFS.delete(stagingDir, true);
-        LOG.info("The staging directory '" + stagingDir + "' is deleted");
+        try {
+          defaultFS.delete(stagingDir, true);
+          LOG.info("The staging directory '" + stagingDir + "' is deleted");
+        } catch (Exception e) {
+          LOG.warn(e.getMessage());
+        }
       }
 
       throw ioe;
@@ -490,6 +496,10 @@
     }
   }
 
+  public Throwable getInitError() {
+    return initError;
+  }
+
   public String getErrorMessage() {
     if (isInitError()) {
       return StringUtils.stringifyException(initError);
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnit.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnit.java
index 27625b4..6cada07 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnit.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnit.java
@@ -28,6 +28,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.state.*;
+import org.apache.tajo.ExecutionBlockId;
 import org.apache.tajo.QueryIdFactory;
 import org.apache.tajo.QueryUnitAttemptId;
 import org.apache.tajo.QueryUnitId;
@@ -155,6 +156,11 @@
           .addTransition(TaskState.FAILED, TaskState.FAILED,
               EnumSet.of(TaskEventType.T_KILL, TaskEventType.T_ATTEMPT_KILLED, TaskEventType.T_ATTEMPT_SUCCEEDED))
 
+          // Transitions from KILLED state
+          .addTransition(TaskState.KILLED, TaskState.KILLED,
+              TaskEventType.T_ATTEMPT_KILLED,
+              new KillTaskTransition())
+
           .installTopology();
 
   private final StateMachine<TaskState, TaskEventType, TaskEvent> stateMachine;
@@ -589,7 +595,11 @@
       try {
         stateMachine.doTransition(event.getType(), event);
       } catch (InvalidStateTransitonException e) {
-        LOG.error("Can't handle this event at current state", e);
+        LOG.error("Can't handle this event at current state"
+            + ", eventType:" + event.getType().name()
+            + ", oldState:" + oldState.name()
+            + ", nextState:" + getState().name()
+            , e);
         eventHandler.handle(new QueryEvent(TajoIdUtils.parseQueryId(getId().toString()),
             QueryEventType.INTERNAL_ERROR));
       }
@@ -652,6 +662,7 @@
   }
 
   public static class IntermediateEntry {
+    ExecutionBlockId ebId;
     int taskId;
     int attemptId;
     int partId;
@@ -664,6 +675,14 @@
       this.host = host;
     }
 
+    public ExecutionBlockId getEbId() {
+      return ebId;
+    }
+
+    public void setEbId(ExecutionBlockId ebId) {
+      this.ebId = ebId;
+    }
+
     public int getTaskId() {
       return this.taskId;
     }
@@ -682,7 +701,7 @@
 
     @Override
     public int hashCode() {
-      return Objects.hashCode(taskId, partId, attemptId, host);
+      return Objects.hashCode(ebId, taskId, partId, attemptId, host);
     }
   }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnitAttempt.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnitAttempt.java
index c3aae67..361f88f 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnitAttempt.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/QueryUnitAttempt.java
@@ -161,9 +161,12 @@
           EnumSet.of(
               TaskAttemptEventType.TA_UPDATE))
       .addTransition(TaskAttemptState.TA_KILLED, TaskAttemptState.TA_KILLED,
-          TaskAttemptEventType.TA_LOCAL_KILLED,
+          EnumSet.of(
+              TaskAttemptEventType.TA_LOCAL_KILLED,
+              TaskAttemptEventType.TA_KILL,
+              TaskAttemptEventType.TA_ASSIGNED,
+              TaskAttemptEventType.TA_DONE),
           new TaskKilledCompleteTransition())
-
       .installTopology();
 
   private final StateMachine<TaskAttemptState, TaskAttemptEventType, TaskAttemptEvent>
@@ -427,7 +430,11 @@
       try {
         stateMachine.doTransition(event.getType(), event);
       } catch (InvalidStateTransitonException e) {
-        LOG.error("Can't handle this event at current state of " + event.getTaskAttemptId() + ")", e);
+        LOG.error("Can't handle this event at current state of " + event.getTaskAttemptId() + ")"
+            + ", eventType:" + event.getType().name()
+            + ", oldState:" + oldState.name()
+            + ", nextState:" + getState().name()
+            , e);
         eventHandler.handle(
             new SubQueryDiagnosticsUpdateEvent(event.getTaskAttemptId().getQueryUnitId().getExecutionBlockId(),
                 "Can't handle this event at current state of " + event.getTaskAttemptId() + ")"));
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
index 3a2e79f..0046dbe 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/Repartitioner.java
@@ -87,13 +87,16 @@
     for (int i = 0; i < scans.length; i++) {
       TableDesc tableDesc = masterContext.getTableDescMap().get(scans[i].getCanonicalName());
       if (tableDesc == null) { // if it is a real table stored on storage
-        // TODO - to be fixed (wrong directory)
-        ExecutionBlock [] childBlocks = new ExecutionBlock[2];
-        childBlocks[0] = masterPlan.getChild(execBlock.getId(), 0);
-        childBlocks[1] = masterPlan.getChild(execBlock.getId(), 1);
-
         tablePath = storageManager.getTablePath(scans[i].getTableName());
-        stats[i] = masterContext.getSubQuery(childBlocks[i].getId()).getResultStats().getNumBytes();
+        if (execBlock.getUnionScanMap() != null && !execBlock.getUnionScanMap().isEmpty()) {
+          for (Map.Entry<ExecutionBlockId, ExecutionBlockId> unionScanEntry: execBlock.getUnionScanMap().entrySet()) {
+            ExecutionBlockId originScanEbId = unionScanEntry.getKey();
+            stats[i] += masterContext.getSubQuery(originScanEbId).getResultStats().getNumBytes();
+          }
+        } else {
+          ExecutionBlockId scanEBId = TajoIdUtils.createExecutionBlockId(scans[i].getTableName());
+          stats[i] = masterContext.getSubQuery(scanEBId).getResultStats().getNumBytes();
+        }
         fragments[i] = new FileFragment(scans[i].getCanonicalName(), tablePath, 0, 0, new String[]{UNKNOWN_HOST});
       } else {
         tablePath = tableDesc.getPath();
@@ -115,11 +118,10 @@
       }
     }
 
-    // If one of inner join tables has no input data,
-    // it should return zero rows.
+    // If one of inner join tables has no input data, it should return zero rows.
     JoinNode joinNode = PlannerUtil.findMostBottomNode(execBlock.getPlan(), NodeType.JOIN);
     if (joinNode != null) {
-      if ( (joinNode.getJoinType().equals(JoinType.INNER))) {
+      if ( (joinNode.getJoinType() == JoinType.INNER)) {
         for (int i = 0; i < stats.length; i++) {
           if (stats[i] == 0) {
             return;
@@ -128,20 +130,75 @@
       }
     }
 
+    // If node is outer join and a preserved relation is empty, it should return zero rows.
+    joinNode = PlannerUtil.findTopNode(execBlock.getPlan(), NodeType.JOIN);
+    if (joinNode != null) {
+      // If all stats are zero, return
+      boolean isEmptyAllJoinTables = true;
+      for (int i = 0; i < stats.length; i++) {
+        if (stats[i] > 0) {
+          isEmptyAllJoinTables = false;
+          break;
+        }
+      }
+      if (isEmptyAllJoinTables) {
+        LOG.info("All input join tables are empty.");
+        return;
+      }
+
+      // find left top scan node
+      ScanNode leftScanNode = PlannerUtil.findTopNode(joinNode.getLeftChild(), NodeType.SCAN);
+      ScanNode rightScanNode = PlannerUtil.findTopNode(joinNode.getRightChild(), NodeType.SCAN);
+
+      long leftStats = -1;
+      long rightStats = -1;
+      if (stats.length == 2) {
+        for (int i = 0; i < stats.length; i++) {
+          if (scans[i].equals(leftScanNode)) {
+            leftStats = stats[i];
+          } else if (scans[i].equals(rightScanNode)) {
+            rightStats = stats[i];
+          }
+        }
+        if (joinNode.getJoinType() == JoinType.LEFT_OUTER) {
+          if (leftStats == 0) {
+            return;
+          }
+        }
+        if (joinNode.getJoinType() == JoinType.RIGHT_OUTER) {
+          if (rightStats == 0) {
+            return;
+          }
+        }
+      }
+    }
+
     // Assigning either fragments or fetch urls to query units
     boolean isAllBroadcastTable = true;
     int baseScanIdx = -1;
+    long maxStats = Long.MIN_VALUE;
+    int maxStatsScanIdx = -1;
     for (int i = 0; i < scans.length; i++) {
       if (!execBlock.isBroadcastTable(scans[i].getCanonicalName())) {
         isAllBroadcastTable = false;
         baseScanIdx = i;
       }
+      // finding largest table.
+      if (stats[i] > maxStats) {
+        maxStats = stats[i];
+        maxStatsScanIdx = i;
+      }
     }
 
+
     if (isAllBroadcastTable) {
-      LOG.info("[Distributed Join Strategy] : Immediate " +  fragments.length + " Way Join on Single Machine");
-      SubQuery.scheduleFragment(subQuery, fragments[0], Arrays.asList(Arrays.copyOfRange(fragments, 1, fragments.length)));
-      schedulerContext.setEstimatedTaskNum(1);
+      // set largest table to normal mode
+      baseScanIdx = maxStatsScanIdx;
+      scans[baseScanIdx].setBroadcastTable(false);
+      execBlock.removeBroadcastTable(scans[baseScanIdx].getCanonicalName());
+      LOG.info(String.format("[Distributed Join Strategy] : Broadcast Join with all tables, base_table=%s, base_volume=%d",
+          scans[baseScanIdx].getCanonicalName(), stats[baseScanIdx]));
+      scheduleLeafTasksWithBroadcastTable(schedulerContext, subQuery, baseScanIdx, fragments);
     } else if (!execBlock.getBroadcastTables().isEmpty()) {
       LOG.info(String.format("[Distributed Join Strategy] : Broadcast Join, base_table=%s, base_volume=%d",
           scans[baseScanIdx].getCanonicalName(), stats[baseScanIdx]));
@@ -149,28 +206,38 @@
     } else {
       LOG.info("[Distributed Join Strategy] : Symmetric Repartition Join");
       // The hash map is modeling as follows:
-      // <Part Id, <Table Name, Intermediate Data>>
-      Map<Integer, Map<String, List<IntermediateEntry>>> hashEntries = new HashMap<Integer, Map<String, List<IntermediateEntry>>>();
+      // <Part Id, <EbId, Intermediate Data>>
+      Map<Integer, Map<ExecutionBlockId, List<IntermediateEntry>>> hashEntries =
+          new HashMap<Integer, Map<ExecutionBlockId, List<IntermediateEntry>>>();
 
       // Grouping IntermediateData by a partition key and a table name
-      for (ScanNode scan : scans) {
-        SubQuery childSubQuery = masterContext.getSubQuery(TajoIdUtils.createExecutionBlockId(scan.getCanonicalName()));
-        for (QueryUnit task : childSubQuery.getQueryUnits()) {
+      List<ExecutionBlock> childBlocks = masterPlan.getChilds(subQuery.getId());
+
+      // In the case of join with union, there is one ScanNode for union.
+      Map<ExecutionBlockId, ExecutionBlockId> unionScanMap = execBlock.getUnionScanMap();
+      for (ExecutionBlock childBlock : childBlocks) {
+        ExecutionBlockId scanEbId = unionScanMap.get(childBlock.getId());
+        if (scanEbId == null) {
+          scanEbId = childBlock.getId();
+        }
+        SubQuery childExecSM = subQuery.getContext().getSubQuery(childBlock.getId());
+        for (QueryUnit task : childExecSM.getQueryUnits()) {
           if (task.getIntermediateData() != null && !task.getIntermediateData().isEmpty()) {
             for (IntermediateEntry intermEntry : task.getIntermediateData()) {
+              intermEntry.setEbId(childBlock.getId());
               if (hashEntries.containsKey(intermEntry.getPartId())) {
-                Map<String, List<IntermediateEntry>> tbNameToInterm =
+                Map<ExecutionBlockId, List<IntermediateEntry>> tbNameToInterm =
                     hashEntries.get(intermEntry.getPartId());
 
-                if (tbNameToInterm.containsKey(scan.getCanonicalName())) {
-                  tbNameToInterm.get(scan.getCanonicalName()).add(intermEntry);
+                if (tbNameToInterm.containsKey(scanEbId)) {
+                  tbNameToInterm.get(scanEbId).add(intermEntry);
                 } else {
-                  tbNameToInterm.put(scan.getCanonicalName(), TUtil.newList(intermEntry));
+                  tbNameToInterm.put(scanEbId, TUtil.newList(intermEntry));
                 }
               } else {
-                Map<String, List<IntermediateEntry>> tbNameToInterm =
-                    new HashMap<String, List<IntermediateEntry>>();
-                tbNameToInterm.put(scan.getCanonicalName(), TUtil.newList(intermEntry));
+                Map<ExecutionBlockId, List<IntermediateEntry>> tbNameToInterm =
+                    new HashMap<ExecutionBlockId, List<IntermediateEntry>>();
+                tbNameToInterm.put(scanEbId, TUtil.newList(intermEntry));
                 hashEntries.put(intermEntry.getPartId(), tbNameToInterm);
               }
             }
@@ -178,15 +245,15 @@
             //if no intermidatedata(empty table), make empty entry
             int emptyPartitionId = 0;
             if (hashEntries.containsKey(emptyPartitionId)) {
-              Map<String, List<IntermediateEntry>> tbNameToInterm = hashEntries.get(emptyPartitionId);
-              if (tbNameToInterm.containsKey(scan.getCanonicalName()))
-                tbNameToInterm.get(scan.getCanonicalName())
-                    .addAll(new ArrayList<IntermediateEntry>());
+              Map<ExecutionBlockId, List<IntermediateEntry>> tbNameToInterm = hashEntries.get(emptyPartitionId);
+              if (tbNameToInterm.containsKey(scanEbId))
+                tbNameToInterm.get(scanEbId).addAll(new ArrayList<IntermediateEntry>());
               else
-                tbNameToInterm.put(scan.getCanonicalName(), new ArrayList<IntermediateEntry>());
+                tbNameToInterm.put(scanEbId, new ArrayList<IntermediateEntry>());
             } else {
-              Map<String, List<IntermediateEntry>> tbNameToInterm = new HashMap<String, List<IntermediateEntry>>();
-              tbNameToInterm.put(scan.getCanonicalName(), new ArrayList<IntermediateEntry>());
+              Map<ExecutionBlockId, List<IntermediateEntry>> tbNameToInterm =
+                  new HashMap<ExecutionBlockId, List<IntermediateEntry>>();
+              tbNameToInterm.put(scanEbId, new ArrayList<IntermediateEntry>());
               hashEntries.put(emptyPartitionId, tbNameToInterm);
             }
           }
@@ -222,7 +289,7 @@
       SubQuery.scheduleFragment(subQuery, fragments[0], Arrays.asList(new FileFragment[]{fragments[1]}));
 
       // Assign partitions to tasks in a round robin manner.
-      for (Entry<Integer, Map<String, List<IntermediateEntry>>> entry
+      for (Entry<Integer, Map<ExecutionBlockId, List<IntermediateEntry>>> entry
           : hashEntries.entrySet()) {
         addJoinShuffle(subQuery, entry.getKey(), entry.getValue());
       }
@@ -250,7 +317,6 @@
                                                           int baseScanId, FileFragment[] fragments) throws IOException {
     ExecutionBlock execBlock = subQuery.getBlock();
     ScanNode[] scans = execBlock.getScanNodes();
-    //Preconditions.checkArgument(scans.length == 2, "Must be Join Query");
 
     for (int i = 0; i < scans.length; i++) {
       if (i != baseScanId) {
@@ -258,41 +324,71 @@
       }
     }
 
-    TableMeta meta;
-    ScanNode scan = scans[baseScanId];
-    TableDesc desc = subQuery.getContext().getTableDescMap().get(scan.getCanonicalName());
-    meta = desc.getMeta();
-
-    Collection<FileFragment> baseFragments;
-    if (scan.getType() == NodeType.PARTITIONS_SCAN) {
-      baseFragments = getFragmentsFromPartitionedTable(subQuery.getStorageManager(), scan, desc);
-    } else {
-      baseFragments = subQuery.getStorageManager().getSplits(scan.getCanonicalName(), meta, desc.getSchema(),
-          desc.getPath());
-    }
-
+    // Large table(baseScan)
+    //  -> add all fragment to baseFragments
+    //  -> each fragment is assigned to a Task by DefaultTaskScheduler.handle()
+    // Broadcast table
+    //  all fragments or paths assigned every Large table's scan task.
+    //  -> PARTITIONS_SCAN
+    //     . add all partition paths to node's inputPaths variable
+    //  -> SCAN
+    //     . add all fragments to broadcastFragments
+    Collection<FileFragment> baseFragments = null;
     List<FileFragment> broadcastFragments = new ArrayList<FileFragment>();
-    for (int i = 0; i < fragments.length; i++) {
-      if (i != baseScanId) {
-        broadcastFragments.add(fragments[i]);
+    for (int i = 0; i < scans.length; i++) {
+      ScanNode scan = scans[i];
+      TableDesc desc = subQuery.getContext().getTableDescMap().get(scan.getCanonicalName());
+      TableMeta meta = desc.getMeta();
+
+      Collection<FileFragment> scanFragments;
+      Path[] partitionScanPaths = null;
+      if (scan.getType() == NodeType.PARTITIONS_SCAN) {
+        PartitionedTableScanNode partitionScan = (PartitionedTableScanNode)scan;
+        partitionScanPaths = partitionScan.getInputPaths();
+        // set null to inputPaths in getFragmentsFromPartitionedTable()
+        scanFragments = getFragmentsFromPartitionedTable(subQuery.getStorageManager(), scan, desc);
+      } else {
+        scanFragments = subQuery.getStorageManager().getSplits(scan.getCanonicalName(), meta, desc.getSchema(),
+            desc.getPath());
+      }
+
+      if (scanFragments != null) {
+        if (i == baseScanId) {
+          baseFragments = scanFragments;
+        } else {
+          if (scan.getType() == NodeType.PARTITIONS_SCAN) {
+            PartitionedTableScanNode partitionScan = (PartitionedTableScanNode)scan;
+            // PhisicalPlanner make PartitionMergeScanExec when table is boradcast table and inputpaths is not empty
+            partitionScan.setInputPaths(partitionScanPaths);
+          } else {
+            broadcastFragments.addAll(scanFragments);
+          }
+        }
       }
     }
+
+    if (baseFragments == null) {
+      throw new IOException("No fragments for " + scans[baseScanId].getTableName());
+    }
+
     SubQuery.scheduleFragments(subQuery, baseFragments, broadcastFragments);
     schedulerContext.setEstimatedTaskNum(baseFragments.size());
   }
 
   private static void addJoinShuffle(SubQuery subQuery, int partitionId,
-                                     Map<String, List<IntermediateEntry>> grouppedPartitions) {
+                                     Map<ExecutionBlockId, List<IntermediateEntry>> grouppedPartitions) {
     Map<String, List<FetchImpl>> fetches = new HashMap<String, List<FetchImpl>>();
     for (ExecutionBlock execBlock : subQuery.getMasterPlan().getChilds(subQuery.getId())) {
-      Collection<FetchImpl> requests;
-      if (grouppedPartitions.containsKey(execBlock.getId().toString())) {
-          requests = mergeShuffleRequest(execBlock.getId(), partitionId, HASH_SHUFFLE,
-              grouppedPartitions.get(execBlock.getId().toString()));
-      } else {
-        return;
+      if (grouppedPartitions.containsKey(execBlock.getId())) {
+        Collection<FetchImpl> requests = mergeShuffleRequest(partitionId, HASH_SHUFFLE,
+            grouppedPartitions.get(execBlock.getId()));
+        fetches.put(execBlock.getId().toString(), Lists.newArrayList(requests));
       }
-      fetches.put(execBlock.getId().toString(), Lists.newArrayList(requests));
+    }
+
+    if (fetches.isEmpty()) {
+      LOG.info(subQuery.getId() + "'s " + partitionId + " partition has empty result.");
+      return;
     }
     SubQuery.scheduleFetches(subQuery, fetches);
   }
@@ -303,20 +399,23 @@
    *
    * @return key: pullserver's address, value: a list of requests
    */
-  private static Collection<FetchImpl> mergeShuffleRequest(ExecutionBlockId ebid, int partitionId,
+  private static Collection<FetchImpl> mergeShuffleRequest(int partitionId,
                                                           TajoWorkerProtocol.ShuffleType type,
                                                           List<IntermediateEntry> partitions) {
-    Map<QueryUnit.PullHost, FetchImpl> mergedPartitions = new HashMap<QueryUnit.PullHost, FetchImpl>();
+    // ebId + pullhost -> FetchImmpl
+    Map<String, FetchImpl> mergedPartitions = new HashMap<String, FetchImpl>();
 
     for (IntermediateEntry partition : partitions) {
-      QueryUnit.PullHost host = partition.getPullHost();
-      if (mergedPartitions.containsKey(host)) {
-        FetchImpl fetch = mergedPartitions.get(partition.getPullHost());
+      String mergedKey = partition.getEbId().toString() + "," + partition.getPullHost();
+
+      if (mergedPartitions.containsKey(mergedKey)) {
+        FetchImpl fetch = mergedPartitions.get(mergedKey);
         fetch.addPart(partition.getTaskId(), partition.getAttemptId());
       } else {
-        FetchImpl fetch = new FetchImpl(host, type, ebid, partitionId);
+        // In some cases like union each IntermediateEntry has different EBID.
+        FetchImpl fetch = new FetchImpl(partition.getPullHost(), type, partition.getEbId(), partitionId);
         fetch.addPart(partition.getTaskId(), partition.getAttemptId());
-        mergedPartitions.put(partition.getPullHost(), fetch);
+        mergedPartitions.put(mergedKey, fetch);
       }
     }
     return mergedPartitions.values();
@@ -454,12 +553,6 @@
                                                  SubQuery subQuery, DataChannel channel,
                                                  int maxNum) {
     ExecutionBlock execBlock = subQuery.getBlock();
-    TableStats totalStat = computeChildBlocksStats(subQuery.getContext(), masterPlan, subQuery.getId());
-
-    if (totalStat.getNumRows() == 0) {
-      return;
-    }
-
     ScanNode scan = execBlock.getScanNodes()[0];
     Path tablePath;
     tablePath = subQuery.getContext().getStorageManager().getTablePath(scan.getTableName());
@@ -500,9 +593,15 @@
     // get a proper number of tasks
     int determinedTaskNum = Math.min(maxNum, finalFetches.size());
     LOG.info(subQuery.getId() + ", ScheduleHashShuffledFetches - Max num=" + maxNum + ", finalFetchURI=" + finalFetches.size());
+
     if (groupby != null && groupby.getGroupingColumns().length == 0) {
       determinedTaskNum = 1;
       LOG.info(subQuery.getId() + ", No Grouping Column - determinedTaskNum is set to 1");
+    } else {
+      TableStats totalStat = computeChildBlocksStats(subQuery.getContext(), masterPlan, subQuery.getId());
+      if (totalStat.getNumRows() == 0) {
+        determinedTaskNum = 1;
+      }
     }
 
     // set the proper number of tasks to the estimated task num
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
index 08517ef..d4c94e8 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/querymaster/SubQuery.java
@@ -35,7 +35,10 @@
 import org.apache.tajo.ExecutionBlockId;
 import org.apache.tajo.QueryIdFactory;
 import org.apache.tajo.QueryUnitId;
-import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.CatalogUtil;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.TableDesc;
+import org.apache.tajo.catalog.TableMeta;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.catalog.statistics.ColumnStats;
 import org.apache.tajo.catalog.statistics.StatisticsUtil;
@@ -236,7 +239,8 @@
                   SubQueryEventType.SQ_START,
                   SubQueryEventType.SQ_KILL,
                   SubQueryEventType.SQ_FAILED,
-                  SubQueryEventType.SQ_INTERNAL_ERROR))
+                  SubQueryEventType.SQ_INTERNAL_ERROR,
+                  SubQueryEventType.SQ_SUBQUERY_COMPLETED))
 
           .installTopology();
 
@@ -594,7 +598,11 @@
       try {
         getStateMachine().doTransition(event.getType(), event);
       } catch (InvalidStateTransitonException e) {
-        LOG.error("Can't handle this event at current state", e);
+        LOG.error("Can't handle this event at current state"
+            + ", eventType:" + event.getType().name()
+            + ", oldState:" + oldState.name()
+            + ", nextState:" + getState().name()
+            , e);
         eventHandler.handle(new SubQueryEvent(getId(),
             SubQueryEventType.SQ_INTERNAL_ERROR));
       }
@@ -741,6 +749,10 @@
 
         // determine the number of task
         taskNum = Math.min(taskNum, slots);
+        if (conf.getIntVar(ConfVars.TESTCASE_MIN_TASK_NUM) > 0) {
+          taskNum = conf.getIntVar(ConfVars.TESTCASE_MIN_TASK_NUM);
+          LOG.warn("!!!!! TESTCASE MODE !!!!!");
+        }
         LOG.info(subQuery.getId() + ", The determined number of join partitions is " + taskNum);
 
         // The shuffle output numbers of join may be inconsistent by execution block order.
@@ -899,6 +911,7 @@
       // Otherwise, it creates at least one fragments for a table, which may
       // span a number of blocks or possibly consists of a number of files.
       if (scan.getType() == NodeType.PARTITIONS_SCAN) {
+        // After calling this method, partition paths are removed from the physical plan.
         fragments = Repartitioner.getFragmentsFromPartitionedTable(subQuery.getStorageManager(), scan, table);
       } else {
         Path inputPath = table.getPath();
@@ -1121,7 +1134,7 @@
           return SubQueryState.SUCCEEDED;
         }
       } catch (Throwable t) {
-        LOG.error(t);
+        LOG.error(t.getMessage(), t);
         subQuery.abort(SubQueryState.ERROR);
         return SubQueryState.ERROR;
       }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoRMContext.java b/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoRMContext.java
index a995058..2229f04 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoRMContext.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoRMContext.java
@@ -48,6 +48,9 @@
   private final Set<String> liveQueryMasterWorkerResources =
       Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
 
+  private final Set<QueryId> stoppedQueryIds =
+      Collections.newSetFromMap(new ConcurrentHashMap<QueryId, Boolean>());
+
   public TajoRMContext(Dispatcher dispatcher) {
     this.rmDispatcher = dispatcher;
   }
@@ -81,4 +84,8 @@
   public Set<String> getQueryMasterWorker() {
     return liveQueryMasterWorkerResources;
   }
+
+  public Set<QueryId> getStoppedQueryIds() {
+    return stoppedQueryIds;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoWorkerResourceManager.java b/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoWorkerResourceManager.java
index 15ac6b6..3915225 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoWorkerResourceManager.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/rm/TajoWorkerResourceManager.java
@@ -292,59 +292,66 @@
                 ", liveWorkers=" + rmContext.getWorkers().size());
           }
 
-          List<AllocatedWorkerResource> allocatedWorkerResources = chooseWorkers(resourceRequest);
+          // TajoWorkerResourceManager can't return allocated disk slots occasionally.
+          // Because the rest resource request can remains after QueryMaster stops.
+          // Thus we need to find whether QueryId stopped or not.
+          if (!rmContext.getStoppedQueryIds().contains(resourceRequest.queryId)) {
+            List<AllocatedWorkerResource> allocatedWorkerResources = chooseWorkers(resourceRequest);
 
-          if(allocatedWorkerResources.size() > 0) {
-            List<WorkerAllocatedResource> allocatedResources =
-                new ArrayList<WorkerAllocatedResource>();
+            if(allocatedWorkerResources.size() > 0) {
+              List<WorkerAllocatedResource> allocatedResources =
+                  new ArrayList<WorkerAllocatedResource>();
 
-            for(AllocatedWorkerResource allocatedResource: allocatedWorkerResources) {
-              NodeId nodeId = NodeId.newInstance(allocatedResource.worker.getHostName(),
-                  allocatedResource.worker.getPeerRpcPort());
+              for(AllocatedWorkerResource allocatedResource: allocatedWorkerResources) {
+                NodeId nodeId = NodeId.newInstance(allocatedResource.worker.getHostName(),
+                    allocatedResource.worker.getPeerRpcPort());
 
-              TajoWorkerContainerId containerId = new TajoWorkerContainerId();
+                TajoWorkerContainerId containerId = new TajoWorkerContainerId();
 
-              containerId.setApplicationAttemptId(
-                  ApplicationIdUtils.createApplicationAttemptId(resourceRequest.queryId));
-              containerId.setId(containerIdSeq.incrementAndGet());
+                containerId.setApplicationAttemptId(
+                    ApplicationIdUtils.createApplicationAttemptId(resourceRequest.queryId));
+                containerId.setId(containerIdSeq.incrementAndGet());
 
-              ContainerIdProto containerIdProto = containerId.getProto();
-              allocatedResources.add(WorkerAllocatedResource.newBuilder()
-                  .setContainerId(containerIdProto)
-                  .setNodeId(nodeId.toString())
-                  .setWorkerHost(allocatedResource.worker.getHostName())
-                  .setQueryMasterPort(allocatedResource.worker.getQueryMasterPort())
-                  .setClientPort(allocatedResource.worker.getClientPort())
-                  .setPeerRpcPort(allocatedResource.worker.getPeerRpcPort())
-                  .setWorkerPullServerPort(allocatedResource.worker.getPullServerPort())
-                  .setAllocatedMemoryMB(allocatedResource.allocatedMemoryMB)
-                  .setAllocatedDiskSlots(allocatedResource.allocatedDiskSlots)
-                  .build());
+                ContainerIdProto containerIdProto = containerId.getProto();
+                allocatedResources.add(WorkerAllocatedResource.newBuilder()
+                    .setContainerId(containerIdProto)
+                    .setNodeId(nodeId.toString())
+                    .setWorkerHost(allocatedResource.worker.getHostName())
+                    .setQueryMasterPort(allocatedResource.worker.getQueryMasterPort())
+                    .setClientPort(allocatedResource.worker.getClientPort())
+                    .setPeerRpcPort(allocatedResource.worker.getPeerRpcPort())
+                    .setWorkerPullServerPort(allocatedResource.worker.getPullServerPort())
+                    .setAllocatedMemoryMB(allocatedResource.allocatedMemoryMB)
+                    .setAllocatedDiskSlots(allocatedResource.allocatedDiskSlots)
+                    .build());
 
 
-              allocatedResourceMap.putIfAbsent(containerIdProto, allocatedResource);
-            }
-
-            resourceRequest.callBack.run(WorkerResourceAllocationResponse.newBuilder()
-                .setQueryId(resourceRequest.request.getQueryId())
-                .addAllWorkerAllocatedResource(allocatedResources)
-                .build()
-            );
-
-          } else {
-            if(LOG.isDebugEnabled()) {
-              LOG.debug("=========================================");
-              LOG.debug("Available Workers");
-              for(String liveWorker: rmContext.getWorkers().keySet()) {
-                LOG.debug(rmContext.getWorkers().get(liveWorker).toString());
+                allocatedResourceMap.putIfAbsent(containerIdProto, allocatedResource);
               }
-              LOG.debug("=========================================");
+
+              resourceRequest.callBack.run(WorkerResourceAllocationResponse.newBuilder()
+                  .setQueryId(resourceRequest.request.getQueryId())
+                  .addAllWorkerAllocatedResource(allocatedResources)
+                  .build()
+              );
+
+            } else {
+              if(LOG.isDebugEnabled()) {
+                LOG.debug("=========================================");
+                LOG.debug("Available Workers");
+                for(String liveWorker: rmContext.getWorkers().keySet()) {
+                  LOG.debug(rmContext.getWorkers().get(liveWorker).toString());
+                }
+                LOG.debug("=========================================");
+              }
+              requestQueue.put(resourceRequest);
+              Thread.sleep(100);
             }
-            requestQueue.put(resourceRequest);
-            Thread.sleep(100);
           }
         } catch(InterruptedException ie) {
           LOG.error(ie);
+        } catch (Throwable t) {
+          LOG.error(t);
         }
       }
     }
@@ -524,14 +531,18 @@
 
   @Override
   public void stopQueryMaster(QueryId queryId) {
-    WorkerResource resource = null;
     if(!rmContext.getQueryMasterContainer().containsKey(queryId)) {
       LOG.warn("No QueryMaster resource info for " + queryId);
       return;
     } else {
       ContainerIdProto containerId = rmContext.getQueryMasterContainer().remove(queryId);
       releaseWorkerResource(containerId);
-      LOG.info(String.format("Released QueryMaster (%s) resource:" + resource, queryId.toString()));
+      rmContext.getStoppedQueryIds().add(queryId);
+      LOG.info(String.format("Released QueryMaster (%s) resource." , queryId.toString()));
     }
   }
+
+  public TajoRMContext getRMContext() {
+    return rmContext;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/rm/Worker.java b/tajo-core/src/main/java/org/apache/tajo/master/rm/Worker.java
index 0d6b5ee..de6ee9e 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/rm/Worker.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/rm/Worker.java
@@ -281,7 +281,11 @@
       try {
         stateMachine.doTransition(event.getType(), event);
       } catch (InvalidStateTransitonException e) {
-        LOG.error("Can't handle this event at current state", e);
+        LOG.error("Can't handle this event at current state"
+            + ", eventType:" + event.getType().name()
+            + ", oldState:" + oldState.name()
+            + ", nextState:" + getState().name()
+            , e);
         LOG.error("Invalid event " + event.getType() + " on Worker  " + getWorkerId());
       }
       if (oldState != getState()) {
diff --git a/tajo-core/src/main/java/org/apache/tajo/master/session/Session.java b/tajo-core/src/main/java/org/apache/tajo/master/session/Session.java
index c60f50f..a67b6c8 100644
--- a/tajo-core/src/main/java/org/apache/tajo/master/session/Session.java
+++ b/tajo-core/src/main/java/org/apache/tajo/master/session/Session.java
@@ -85,6 +85,16 @@
     }
   }
 
+  public String getVariable(String name, String defaultValue) {
+    synchronized (sessionVariables) {
+      if (sessionVariables.containsKey(name)) {
+        return sessionVariables.get(name);
+      } else {
+        return defaultValue;
+      }
+    }
+  }
+
   public void removeVariable(String name) {
     synchronized (sessionVariables) {
       sessionVariables.remove(name);
diff --git a/tajo-core/src/main/java/org/apache/tajo/util/JSPUtil.java b/tajo-core/src/main/java/org/apache/tajo/util/JSPUtil.java
index 58a3550..8aebab0 100644
--- a/tajo-core/src/main/java/org/apache/tajo/util/JSPUtil.java
+++ b/tajo-core/src/main/java/org/apache/tajo/util/JSPUtil.java
@@ -20,11 +20,14 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.tajo.catalog.FunctionDesc;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.catalog.statistics.TableStats;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.master.querymaster.QueryInProgress;
 import org.apache.tajo.master.querymaster.QueryMasterTask;
 import org.apache.tajo.master.querymaster.QueryUnit;
 import org.apache.tajo.master.querymaster.SubQuery;
+import org.apache.tajo.worker.TaskRunnerHistory;
 import org.apache.tajo.worker.TaskRunner;
 
 import java.text.DecimalFormat;
@@ -52,6 +55,19 @@
     });
   }
 
+  public static void sortTaskRunnerHistory(List<TaskRunnerHistory> histories) {
+    Collections.sort(histories, new Comparator<TaskRunnerHistory>() {
+      @Override
+      public int compare(TaskRunnerHistory h1, TaskRunnerHistory h2) {
+        int value = h1.getExecutionBlockId().compareTo(h2.getExecutionBlockId());
+        if(value == 0){
+          return h1.getContainerId().compareTo(h2.getContainerId());
+        }
+        return value;
+      }
+    });
+  }
+
   public static String getElapsedTime(long startTime, long finishTime) {
     if(startTime == 0) {
       return "-";
@@ -206,4 +222,28 @@
   public static String percentFormat(float value) {
     return PERCENT_FORMAT.format(value * 100.0f);
   }
+
+  public static String tableStatToString(TableStats tableStats) {
+    if(tableStats != null){
+      return tableStatToString(tableStats.getProto());
+    }
+    else {
+      return "No input statistics";
+    }
+  }
+
+  public static String tableStatToString(CatalogProtos.TableStatsProto tableStats) {
+    if (tableStats == null) {
+      return "No input statistics";
+    }
+
+    String result = "";
+    result += "TotalBytes: " + FileUtil.humanReadableByteCount(tableStats.getNumBytes(), false) + " ("
+        + tableStats.getNumBytes() + " B)";
+    result += ", ReadBytes: " + FileUtil.humanReadableByteCount(tableStats.getReadBytes(), false) + " ("
+        + tableStats.getReadBytes() + " B)";
+    result += ", ReadRows: " + (tableStats.getNumRows() == 0 ? "-" : tableStats.getNumRows());
+
+    return result;
+  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/webapp/QueryExecutorServlet.java b/tajo-core/src/main/java/org/apache/tajo/webapp/QueryExecutorServlet.java
index faeadaf..3cb7d25 100644
--- a/tajo-core/src/main/java/org/apache/tajo/webapp/QueryExecutorServlet.java
+++ b/tajo-core/src/main/java/org/apache/tajo/webapp/QueryExecutorServlet.java
@@ -13,6 +13,7 @@
 import org.apache.tajo.ipc.ClientProtos;
 import org.apache.tajo.jdbc.TajoResultSet;
 import org.apache.tajo.util.JSPUtil;
+import org.apache.tajo.util.TajoIdUtils;
 import org.codehaus.jackson.map.DeserializationConfig;
 import org.codehaus.jackson.map.ObjectMapper;
 
@@ -170,7 +171,24 @@
           }
           queryRunners.clear();
         }
+      } else if("killQuery".equals(action)) {
+        String queryId = request.getParameter("queryId");
+        if(queryId == null || queryId.trim().isEmpty()) {
+          errorResponse(response, "No queryId parameter");
+          return;
+        }
+        QueryStatus status = tajoClient.killQuery(TajoIdUtils.parseQueryId(queryId));
+
+        if (status.getState() == TajoProtos.QueryState.QUERY_KILLED) {
+          returnValue.put("successMessage", queryId + " is killed successfully.");
+        } else if (status.getState() == TajoProtos.QueryState.QUERY_KILL_WAIT) {
+          returnValue.put("successMessage", queryId + " will be finished after a while.");
+        } else {
+          errorResponse(response, "ERROR:" + status.getErrorMessage());
+          return;
+        }
       }
+
       returnValue.put("success", "true");
       writeHttpResponse(response, returnValue);
     } catch (Exception e) {
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/Fetcher.java b/tajo-core/src/main/java/org/apache/tajo/worker/Fetcher.java
index a4836e4..37c653c 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/Fetcher.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/Fetcher.java
@@ -21,6 +21,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.IOUtils;
+import org.apache.tajo.TajoProtos;
 import org.jboss.netty.bootstrap.ClientBootstrap;
 import org.jboss.netty.buffer.ChannelBuffer;
 import org.jboss.netty.channel.*;
@@ -42,6 +43,7 @@
  * a specific file. It aims at asynchronous and efficient data transmit.
  */
 public class Fetcher {
+
   private final static Log LOG = LogFactory.getLog(Fetcher.class);
 
   private final URI uri;
@@ -54,12 +56,14 @@
   private long finishTime;
   private long fileLen;
   private int messageReceiveCount;
+  private TajoProtos.FetcherState state;
 
   private ClientBootstrap bootstrap;
 
   public Fetcher(URI uri, File file, ClientSocketChannelFactory factory) {
     this.uri = uri;
     this.file = file;
+    this.state = TajoProtos.FetcherState.FETCH_INIT;
 
     String scheme = uri.getScheme() == null ? "http" : uri.getScheme();
     this.host = uri.getHost() == null ? "localhost" : uri.getHost();
@@ -93,24 +97,17 @@
     return fileLen;
   }
 
+  public TajoProtos.FetcherState getState() {
+    return state;
+  }
+
   public int getMessageReceiveCount() {
     return messageReceiveCount;
   }
 
-  public String getStatus() {
-    if(startTime == 0) {
-      return "READY";
-    }
-
-    if(startTime > 0 && finishTime == 0) {
-      return "FETCHING";
-    } else {
-      return "FINISH";
-    }
-  }
-
   public File get() throws IOException {
     startTime = System.currentTimeMillis();
+    this.state = TajoProtos.FetcherState.FETCH_FETCHING;
 
     ChannelFuture future = bootstrap.connect(new InetSocketAddress(host, port));
 
@@ -142,6 +139,7 @@
     // Close the channel to exit.
     future.getChannel().close();
     finishTime = System.currentTimeMillis();
+    this.state = TajoProtos.FetcherState.FETCH_FINISHED;
     return file;
   }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorker.java b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorker.java
index 3768edf..ed78e49 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorker.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorker.java
@@ -540,8 +540,9 @@
 
   public static List<File> getMountPath() throws IOException {
     BufferedReader mountOutput = null;
+    Process mountProcess = null;
     try {
-      Process mountProcess = Runtime.getRuntime ().exec("mount");
+      mountProcess = Runtime.getRuntime ().exec("mount");
       mountOutput = new BufferedReader(new InputStreamReader(mountProcess.getInputStream()));
       List<File> mountPaths = new ArrayList<File>();
       while (true) {
@@ -563,6 +564,11 @@
       if(mountOutput != null) {
         mountOutput.close();
       }
+      if (mountProcess != null) {
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getInputStream());
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getOutputStream());
+        org.apache.commons.io.IOUtils.closeQuietly(mountProcess.getErrorStream());
+      }
     }
   }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerClientService.java b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerClientService.java
index 2b947fe..abd4e98 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerClientService.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerClientService.java
@@ -26,6 +26,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.tajo.QueryId;
 import org.apache.tajo.QueryIdFactory;
 import org.apache.tajo.TajoIdProtos;
@@ -210,6 +211,14 @@
           builder.setErrorMessage(firstError.getErrorMessage());
           builder.setErrorTrace(firstError.getErrorTrace());
         }
+
+        if (queryMasterTask.isInitError()) {
+          Throwable initError = queryMasterTask.getInitError();
+          builder.setErrorMessage(
+              initError.getMessage() == null ? initError.getClass().getName() : initError.getMessage());
+          builder.setErrorTrace(StringUtils.stringifyException(initError));
+          builder.setState(queryMasterTask.getState());
+        }
       }
       return builder.build();
     }
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerManagerService.java b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerManagerService.java
index 392a7cf..13ef15d 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerManagerService.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TajoWorkerManagerService.java
@@ -139,7 +139,9 @@
   @Override
   public void killTaskAttempt(RpcController controller, TajoIdProtos.QueryUnitAttemptIdProto request,
                               RpcCallback<PrimitiveProtos.BoolProto> done) {
-    workerContext.getTaskRunnerManager().findTaskByQueryUnitAttemptId(new QueryUnitAttemptId(request)).kill();
+    Task task = workerContext.getTaskRunnerManager().getTaskByQueryUnitAttemptId(new QueryUnitAttemptId(request));
+    if(task != null) task.kill();
+
     done.run(TajoWorker.TRUE_PROTO);
   }
 
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/Task.java b/tajo-core/src/main/java/org/apache/tajo/worker/Task.java
index 5c252fd..c6e2b73 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/Task.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/Task.java
@@ -27,9 +27,9 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.tajo.QueryUnitAttemptId;
 import org.apache.tajo.TajoConstants;
+import org.apache.tajo.TajoProtos;
 import org.apache.tajo.TajoProtos.TaskAttemptState;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableDesc;
@@ -39,10 +39,7 @@
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.engine.json.CoreGsonHelper;
 import org.apache.tajo.engine.planner.PlannerUtil;
-import org.apache.tajo.engine.planner.logical.LogicalNode;
-import org.apache.tajo.engine.planner.logical.NodeType;
-import org.apache.tajo.engine.planner.logical.ScanNode;
-import org.apache.tajo.engine.planner.logical.SortNode;
+import org.apache.tajo.engine.planner.logical.*;
 import org.apache.tajo.engine.planner.physical.PhysicalExec;
 import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.engine.query.QueryUnitRequest;
@@ -91,11 +88,6 @@
   private final Reporter reporter;
   private Path inputTableBaseDir;
 
-  private static int completedTasksNum = 0;
-  private static int succeededTasksNum = 0;
-  private static int killedTasksNum = 0;
-  private static int failedTasksNum = 0;
-
   private long startTime;
   private long finishTime;
 
@@ -145,7 +137,7 @@
     this.taskDir = StorageUtil.concatPath(taskRunnerContext.getBaseDir(),
         taskId.getQueryUnitId().getId() + "_" + taskId.getId());
 
-    this.context = new TaskAttemptContext(systemConf, taskId,
+    this.context = new TaskAttemptContext(systemConf, queryContext, taskId,
         request.getFragments().toArray(new FragmentProto[request.getFragments().size()]), taskDir);
     this.context.setDataChannel(request.getDataChannel());
     this.context.setEnforcer(request.getEnforcer());
@@ -156,9 +148,19 @@
 
     plan = CoreGsonHelper.fromJson(request.getSerializedData(), LogicalNode.class);
     LogicalNode [] scanNode = PlannerUtil.findAllNodes(plan, NodeType.SCAN);
-    for (LogicalNode node : scanNode) {
-      ScanNode scan = (ScanNode)node;
-      descs.put(scan.getCanonicalName(), scan.getTableDesc());
+    if (scanNode != null) {
+      for (LogicalNode node : scanNode) {
+        ScanNode scan = (ScanNode) node;
+        descs.put(scan.getCanonicalName(), scan.getTableDesc());
+      }
+    }
+
+    LogicalNode [] partitionScanNode = PlannerUtil.findAllNodes(plan, NodeType.PARTITIONS_SCAN);
+    if (partitionScanNode != null) {
+      for (LogicalNode node : partitionScanNode) {
+        PartitionedTableScanNode scan = (PartitionedTableScanNode) node;
+        descs.put(scan.getCanonicalName(), scan.getTableDesc());
+      }
     }
 
     interQuery = request.getProto().getInterQuery();
@@ -259,6 +261,10 @@
     return fetcherRunners.size() > 0;
   }
 
+  public List<Fetcher> getFetchers() {
+    return new ArrayList<Fetcher>(fetcherRunners);
+  }
+
   public void fetch() {
     for (Fetcher f : fetcherRunners) {
       taskRunnerContext.getFetchLauncher().submit(new FetchRunner(context, f));
@@ -375,17 +381,15 @@
         context.setProgress(FETCHER_PROGRESS);
       }
 
-      if (context.getFragmentSize() > 0) {
-        this.executor = taskRunnerContext.getTQueryEngine().
-            createPlan(context, plan);
-        this.executor.init();
+      this.executor = taskRunnerContext.getTQueryEngine().
+          createPlan(context, plan);
+      this.executor.init();
 
-        while(!killed && executor.next() != null) {
-        }
-        this.executor.close();
-        reloadInputStats();
-        this.executor = null;
+      while(!killed && executor.next() != null) {
       }
+      this.executor.close();
+      reloadInputStats();
+      this.executor = null;
     } catch (Exception e) {
       error = e ;
       LOG.error(e.getMessage(), e);
@@ -393,7 +397,7 @@
     } finally {
       context.setProgress(1.0f);
       stopped = true;
-      completedTasksNum++;
+      taskRunnerContext.completedTasksNum.incrementAndGet();
 
       if (killed || aborted) {
         context.setExecutorProgress(0.0f);
@@ -401,19 +405,23 @@
         if(killed) {
           context.setState(TaskAttemptState.TA_KILLED);
           masterProxy.statusUpdate(null, getReport(), NullCallback.get());
-          killedTasksNum++;
+          taskRunnerContext.killedTasksNum.incrementAndGet();
         } else {
           context.setState(TaskAttemptState.TA_FAILED);
           TaskFatalErrorReport.Builder errorBuilder =
               TaskFatalErrorReport.newBuilder()
                   .setId(getId().getProto());
           if (error != null) {
-            errorBuilder.setErrorMessage(error.getMessage());
+            if (error.getMessage() == null) {
+              errorBuilder.setErrorMessage(error.getClass().getCanonicalName());
+            } else {
+              errorBuilder.setErrorMessage(error.getMessage());
+            }
             errorBuilder.setErrorTrace(ExceptionUtils.getStackTrace(error));
           }
 
           masterProxy.fatalError(null, errorBuilder.build(), NullCallback.get());
-          failedTasksNum++;
+          taskRunnerContext.failedTasksNum.incrementAndGet();
         }
 
         // stopping the status report
@@ -437,69 +445,76 @@
 
         TaskCompletionReport report = getTaskCompletionReport();
         masterProxy.done(null, report, NullCallback.get());
-        succeededTasksNum++;
+        taskRunnerContext.succeededTasksNum.incrementAndGet();
       }
 
       finishTime = System.currentTimeMillis();
-
+      LOG.info("Worker's task counter - total:" + taskRunnerContext.completedTasksNum.intValue() +
+          ", succeeded: " + taskRunnerContext.succeededTasksNum.intValue()
+          + ", killed: " + taskRunnerContext.killedTasksNum.incrementAndGet()
+          + ", failed: " + taskRunnerContext.failedTasksNum.intValue());
       cleanupTask();
-      LOG.info("Worker's task counter - total:" + completedTasksNum + ", succeeded: " + succeededTasksNum
-          + ", killed: " + killedTasksNum + ", failed: " + failedTasksNum);
     }
   }
 
   public void cleanupTask() {
-    taskRunnerContext.addTaskHistory(getId(), getTaskHistory());
+    taskRunnerContext.addTaskHistory(getId(), createTaskHistory());
     taskRunnerContext.getTasks().remove(getId());
     taskRunnerContext = null;
 
     fetcherRunners.clear();
-    executor = null;
+    fetcherRunners = null;
+    try {
+      if(executor != null) {
+        executor.close();
+        executor = null;
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+    }
     plan = null;
     context = null;
     releaseChannelFactory();
   }
 
-  public TaskHistory getTaskHistory() {
-    TaskHistory taskHistory = new TaskHistory();
-    taskHistory.setStartTime(startTime);
-    taskHistory.setFinishTime(finishTime);
-    if (context.getOutputPath() != null) {
-      taskHistory.setOutputPath(context.getOutputPath().toString());
-    }
-
-    if (context.getWorkDir() != null) {
-      taskHistory.setWorkingPath(context.getWorkDir().toString());
-    }
-
+  public TaskHistory createTaskHistory() {
+    TaskHistory taskHistory = null;
     try {
-      taskHistory.setStatus(getStatus().toString());
-      taskHistory.setProgress(context.getProgress());
+      taskHistory = new TaskHistory(getTaskId(), getStatus(), context.getProgress(),
+          startTime, finishTime, reloadInputStats());
 
-      taskHistory.setInputStats(new TableStats(reloadInputStats()));
+      if (context.getOutputPath() != null) {
+        taskHistory.setOutputPath(context.getOutputPath().toString());
+      }
+
+      if (context.getWorkDir() != null) {
+        taskHistory.setWorkingPath(context.getWorkDir().toString());
+      }
+
       if (context.getResultStats() != null) {
-        taskHistory.setOutputStats((TableStats)context.getResultStats().clone());
+        taskHistory.setOutputStats(context.getResultStats().getProto());
       }
 
       if (hasFetchPhase()) {
-        Map<URI, TaskHistory.FetcherHistory> fetcherHistories = new HashMap<URI, TaskHistory.FetcherHistory>();
+        taskHistory.setTotalFetchCount(fetcherRunners.size());
+        int i = 0;
+        FetcherHistoryProto.Builder builder = FetcherHistoryProto.newBuilder();
+        for (Fetcher fetcher : fetcherRunners) {
+          // TODO store the fetcher histories
+          if (systemConf.getBoolVar(TajoConf.ConfVars.TAJO_DEBUG)) {
+            builder.setStartTime(fetcher.getStartTime());
+            builder.setFinishTime(fetcher.getFinishTime());
+            builder.setFileLength(fetcher.getFileLen());
+            builder.setMessageReceivedCount(fetcher.getMessageReceiveCount());
+            builder.setState(fetcher.getState());
 
-        for(Fetcher eachFetcher: fetcherRunners) {
-          TaskHistory.FetcherHistory fetcherHistory = new TaskHistory.FetcherHistory();
-          fetcherHistory.setStartTime(eachFetcher.getStartTime());
-          fetcherHistory.setFinishTime(eachFetcher.getFinishTime());
-          fetcherHistory.setStatus(eachFetcher.getStatus());
-          fetcherHistory.setUri(eachFetcher.getURI().toString());
-          fetcherHistory.setFileLen(eachFetcher.getFileLen());
-          fetcherHistory.setMessageReceiveCount(eachFetcher.getMessageReceiveCount());
-
-          fetcherHistories.put(eachFetcher.getURI(), fetcherHistory);
+            taskHistory.addFetcherHistory(builder.build());
+          }
+          if (fetcher.getState() == TajoProtos.FetcherState.FETCH_FINISHED) i++;
         }
-
-        taskHistory.setFetchers(fetcherHistories);
+        taskHistory.setFinishedFetchCount(i);
       }
     } catch (Exception e) {
-      taskHistory.setStatus(StringUtils.stringifyException(e));
       e.printStackTrace();
     }
 
@@ -559,7 +574,7 @@
       int retryWaitTime = 1000;
 
       try { // for releasing fetch latch
-        while(retryNum < maxRetryNum) {
+        while(!killed && retryNum < maxRetryNum) {
           if (retryNum > 0) {
             try {
               Thread.sleep(retryWaitTime);
@@ -661,6 +676,7 @@
     private Thread pingThread;
     private AtomicBoolean stop = new AtomicBoolean(false);
     private static final int PROGRESS_INTERVAL = 3000;
+    private static final int MAX_RETRIES = 3;
     private QueryUnitAttemptId taskId;
 
     public Reporter(QueryUnitAttemptId taskId, QueryMasterProtocolService.Interface masterStub) {
@@ -671,7 +687,6 @@
     Runnable createReporterThread() {
 
       return new Runnable() {
-        final int MAX_RETRIES = 3;
         int remainingRetries = MAX_RETRIES;
         @Override
         public void run() {
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TaskAttemptContext.java b/tajo-core/src/main/java/org/apache/tajo/worker/TaskAttemptContext.java
index f42df1d..b1246ec 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TaskAttemptContext.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TaskAttemptContext.java
@@ -30,6 +30,7 @@
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.global.DataChannel;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.fragment.Fragment;
 import org.apache.tajo.storage.fragment.FragmentConvertor;
 
@@ -68,11 +69,13 @@
   private Path outputPath;
   private DataChannel dataChannel;
   private Enforcer enforcer;
+  private QueryContext queryContext;
 
-  public TaskAttemptContext(TajoConf conf, final QueryUnitAttemptId queryId,
+  public TaskAttemptContext(TajoConf conf, QueryContext queryContext, final QueryUnitAttemptId queryId,
                             final FragmentProto[] fragments,
                             final Path workDir) {
     this.conf = conf;
+    this.queryContext = queryContext;
     this.queryId = queryId;
 
     if (fragments != null) {
@@ -94,9 +97,9 @@
   }
 
   @VisibleForTesting
-  public TaskAttemptContext(TajoConf conf, final QueryUnitAttemptId queryId,
+  public TaskAttemptContext(TajoConf conf, QueryContext queryContext, final QueryUnitAttemptId queryId,
                             final Fragment [] fragments,  final Path workDir) {
-    this(conf, queryId, FragmentConvertor.toFragmentProtoArray(fragments), workDir);
+    this(conf, queryContext, queryId, FragmentConvertor.toFragmentProtoArray(fragments), workDir);
   }
 
   public TajoConf getConf() {
@@ -269,4 +272,8 @@
       return false;
     }
   }
+
+  public QueryContext getQueryContext() {
+    return queryContext;
+  }
 }
\ No newline at end of file
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TaskHistory.java b/tajo-core/src/main/java/org/apache/tajo/worker/TaskHistory.java
index 0973aa7..dab6ba3 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TaskHistory.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TaskHistory.java
@@ -18,107 +18,159 @@
 
 package org.apache.tajo.worker;
 
-import org.apache.tajo.catalog.statistics.TableStats;
-import org.apache.tajo.util.FileUtil;
+import com.google.common.base.Objects;
+import com.google.common.collect.Lists;
+import org.apache.tajo.QueryUnitAttemptId;
+import org.apache.tajo.catalog.proto.CatalogProtos;
+import org.apache.tajo.common.ProtoObject;
 
-import java.net.URI;
-import java.util.Collection;
-import java.util.Map;
+import java.util.Collections;
+import java.util.List;
 
-public class TaskHistory {
+import static org.apache.tajo.TajoProtos.TaskAttemptState;
+import static org.apache.tajo.ipc.TajoWorkerProtocol.FetcherHistoryProto;
+import static org.apache.tajo.ipc.TajoWorkerProtocol.TaskHistoryProto;
+
+/**
+ * The history class for Task processing.
+ */
+public class TaskHistory implements ProtoObject<TaskHistoryProto> {
+
+  private QueryUnitAttemptId queryUnitAttemptId;
+  private TaskAttemptState state;
+  private float progress;
   private long startTime;
   private long finishTime;
-
-  private String status;
+  private CatalogProtos.TableStatsProto inputStats;
+  private CatalogProtos.TableStatsProto outputStats;
   private String outputPath;
   private String workingPath;
-  private float progress;
 
-  private TableStats inputStats;
-  private TableStats outputStats;
+  private int finishedFetchCount;
+  private int totalFetchCount;
+  private List<FetcherHistoryProto> fetcherHistories;
 
-  Map<URI, FetcherHistory> fetchers;
+  public TaskHistory(QueryUnitAttemptId queryUnitAttemptId, TaskAttemptState state, float progress,
+                     long startTime, long finishTime, CatalogProtos.TableStatsProto inputStats) {
+    init();
+    this.queryUnitAttemptId = queryUnitAttemptId;
+    this.state = state;
+    this.progress = progress;
+    this.startTime = startTime;
+    this.finishTime = finishTime;
+    this.inputStats = inputStats;
+  }
 
-  public static class FetcherHistory {
-    private long startTime;
-    private long finishTime;
+  public TaskHistory(TaskHistoryProto proto) {
+    this.queryUnitAttemptId = new QueryUnitAttemptId(proto.getQueryUnitAttemptId());
+    this.state = proto.getState();
+    this.progress = proto.getProgress();
+    this.startTime = proto.getStartTime();
+    this.finishTime = proto.getFinishTime();
+    this.inputStats = proto.getInputStats();
 
-    private String status;
-    private String uri;
-    private long fileLen;
-    private int messageReceiveCount;
-
-    public long getStartTime() {
-      return startTime;
+    if (proto.hasOutputStats()) {
+      this.outputStats = proto.getOutputStats();
     }
 
-    public void setStartTime(long startTime) {
-      this.startTime = startTime;
+    if (proto.hasOutputPath()) {
+      this.outputPath = proto.getOutputPath();
     }
 
-    public long getFinishTime() {
-      return finishTime;
+    if (proto.hasWorkingPath()) {
+      this.workingPath = proto.getWorkingPath();
     }
 
-    public void setFinishTime(long finishTime) {
-      this.finishTime = finishTime;
+    if (proto.hasFinishedFetchCount()) {
+      this.finishedFetchCount = proto.getFinishedFetchCount();
     }
 
-    public String getStatus() {
-      return status;
+    if (proto.hasTotalFetchCount()) {
+      this.totalFetchCount = proto.getTotalFetchCount();
     }
 
-    public void setStatus(String status) {
-      this.status = status;
+    this.fetcherHistories = proto.getFetcherHistoriesList();
+  }
+
+  private void init() {
+    this.fetcherHistories = Lists.newArrayList();
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hashCode(queryUnitAttemptId, state);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (o instanceof TaskHistory) {
+      TaskHistory other = (TaskHistory) o;
+      return getProto().equals(other.getProto());
+    }
+    return false;
+  }
+
+  @Override
+  public TaskHistoryProto getProto() {
+    TaskHistoryProto.Builder builder = TaskHistoryProto.newBuilder();
+    builder.setQueryUnitAttemptId(queryUnitAttemptId.getProto());
+    builder.setState(state);
+    builder.setProgress(progress);
+    builder.setStartTime(startTime);
+    builder.setFinishTime(finishTime);
+    builder.setInputStats(inputStats);
+
+    if (outputStats != null) {
+      builder.setOutputStats(outputStats);
     }
 
-    public String getUri() {
-      return uri;
+    if (workingPath != null) {
+      builder.setWorkingPath(workingPath);
     }
 
-    public void setUri(String uri) {
-      this.uri = uri;
+    if (totalFetchCount > 0) {
+      builder.setTotalFetchCount(totalFetchCount);
+      builder.setFinishedFetchCount(finishedFetchCount);
     }
 
-    public long getFileLen() {
-      return fileLen;
-    }
-
-    public void setFileLen(long fileLen) {
-      this.fileLen = fileLen;
-    }
-
-    public int getMessageReceiveCount() {
-      return messageReceiveCount;
-    }
-
-    public void setMessageReceiveCount(int messageReceiveCount) {
-      this.messageReceiveCount = messageReceiveCount;
-    }
+    builder.addAllFetcherHistories(fetcherHistories);
+    return builder.build();
   }
 
   public long getStartTime() {
     return startTime;
   }
 
-  public void setStartTime(long startTime) {
-    this.startTime = startTime;
-  }
-
   public long getFinishTime() {
     return finishTime;
   }
 
-  public void setFinishTime(long finishTime) {
-    this.finishTime = finishTime;
+  public List<FetcherHistoryProto> getFetcherHistories() {
+    return Collections.unmodifiableList(fetcherHistories);
   }
 
-  public String getStatus() {
-    return status;
+  public boolean hasFetcherHistories(){
+    return totalFetchCount > 0;
   }
 
-  public void setStatus(String status) {
-    this.status = status;
+  public void addFetcherHistory(FetcherHistoryProto fetcherHistory) {
+    fetcherHistories.add(fetcherHistory);
+  }
+
+  public QueryUnitAttemptId getQueryUnitAttemptId() {
+    return queryUnitAttemptId;
+  }
+
+  public TaskAttemptState getState() {
+    return state;
+  }
+
+  public float getProgress() {
+    return progress;
+  }
+
+  public CatalogProtos.TableStatsProto getInputStats() {
+    return inputStats;
   }
 
   public String getOutputPath() {
@@ -137,62 +189,27 @@
     this.workingPath = workingPath;
   }
 
-  public Collection<FetcherHistory> getFetchers() {
-    return fetchers.values();
+  public Integer getFinishedFetchCount() {
+    return finishedFetchCount;
   }
 
-  public void setFetchers(Map<URI, FetcherHistory> fetchers) {
-    this.fetchers = fetchers;
+  public void setFinishedFetchCount(int finishedFetchCount) {
+    this.finishedFetchCount = finishedFetchCount;
   }
 
-  public float getProgress() {
-    return progress;
+  public Integer getTotalFetchCount() {
+    return totalFetchCount;
   }
 
-  public void setProgress(float progress) {
-    this.progress = progress;
+  public void setTotalFetchCount(int totalFetchCount) {
+    this.totalFetchCount = totalFetchCount;
   }
 
-  public boolean hasFetcher() {
-    return fetchers != null && !fetchers.isEmpty();
-  }
-
-  public TableStats getInputStats() {
-    return inputStats;
-  }
-
-  public void setInputStats(TableStats inputStats) {
-    this.inputStats = inputStats;
-  }
-
-  public TableStats getOutputStats() {
+  public CatalogProtos.TableStatsProto getOutputStats() {
     return outputStats;
   }
 
-  public void setOutputStats(TableStats outputStats) {
+  public void setOutputStats(CatalogProtos.TableStatsProto outputStats) {
     this.outputStats = outputStats;
   }
-
-  public static String toInputStatsString(TableStats tableStats) {
-    if (tableStats == null) {
-      return "No input statistics";
-    }
-
-    String result = "";
-    result += "TotalBytes: " + FileUtil.humanReadableByteCount(tableStats.getNumBytes(), false) + " ("
-        + tableStats.getNumBytes() + " B)";
-    result += ", ReadBytes: " + FileUtil.humanReadableByteCount(tableStats.getReadBytes(), false) + " ("
-        + tableStats.getReadBytes() + " B)";
-    result += ", ReadRows: " + (tableStats.getNumRows() == 0 ? "-" : tableStats.getNumRows());
-
-    return result;
-  }
-
-  public static String toOutputStatsString(TableStats tableStats) {
-    if (tableStats == null) {
-      return "No output statistics";
-    }
-
-    return tableStats.toJson();
-  }
 }
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunner.java b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunner.java
index 9e904cd..3fcee06 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunner.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunner.java
@@ -37,7 +37,6 @@
 import org.apache.tajo.TajoProtos.TaskAttemptState;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.conf.TajoConf.ConfVars;
-import org.apache.tajo.engine.planner.physical.SeqScanExec;
 import org.apache.tajo.engine.query.QueryUnitRequestImpl;
 import org.apache.tajo.engine.utils.TupleCache;
 import org.apache.tajo.ipc.QueryMasterProtocol;
@@ -51,6 +50,7 @@
 import java.net.InetSocketAddress;
 import java.util.Map;
 import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicInteger;
 
 import static org.apache.tajo.ipc.TajoWorkerProtocol.*;
 
@@ -81,13 +81,10 @@
   private TajoQueryEngine queryEngine;
 
   // for Fetcher
-  private final ExecutorService fetchLauncher;
+  private ExecutorService fetchLauncher;
   // It keeps all of the query unit attempts while a TaskRunner is running.
   private final Map<QueryUnitAttemptId, Task> tasks = new ConcurrentHashMap<QueryUnitAttemptId, Task>();
 
-  private final Map<QueryUnitAttemptId, TaskHistory> taskHistories =
-      new ConcurrentHashMap<QueryUnitAttemptId, TaskHistory>();
-
   private LocalDirAllocator lDirAllocator;
 
   // A thread to receive each assigned query unit and execute the query unit
@@ -110,6 +107,8 @@
 
   private InetSocketAddress qmMasterAddr;
 
+  private TaskRunnerHistory history;
+
   public TaskRunner(TaskRunnerManager taskRunnerManager, TajoConf conf, String[] args) {
     super(TaskRunner.class.getName());
 
@@ -130,6 +129,7 @@
       NodeId nodeId = ConverterUtils.toNodeId(args[2]);
       this.containerId = ConverterUtils.toContainerId(args[3]);
 
+
       // QueryMaster's address
       String host = args[4];
       int port = Integer.parseInt(args[5]);
@@ -157,12 +157,18 @@
       this.taskOwner = taskOwner;
 
       this.taskRunnerContext = new TaskRunnerContext();
+      this.history = new TaskRunnerHistory(containerId, executionBlockId);
+      this.history.setState(getServiceState());
     } catch (Exception e) {
       LOG.error(e.getMessage(), e);
     }
   }
 
   public String getId() {
+    return getId(executionBlockId, containerId);
+  }
+
+  public static String getId(ExecutionBlockId executionBlockId, ContainerId containerId) {
     return executionBlockId + "," + containerId;
   }
 
@@ -193,11 +199,14 @@
     }
 
     super.init(conf);
+    this.history.setState(getServiceState());
   }
 
   @Override
   public void start() {
     super.start();
+    history.setStartTime(getStartTime());
+    this.history.setState(getServiceState());
     run();
   }
 
@@ -206,7 +215,8 @@
     if(isStopped()) {
       return;
     }
-    finishTime = System.currentTimeMillis();
+    this.finishTime = System.currentTimeMillis();
+    this.history.setFinishTime(finishTime);
     // If this flag become true, taskLauncher will be terminated.
     this.stopped = true;
 
@@ -215,11 +225,13 @@
       if (task.getStatus() == TaskAttemptState.TA_PENDING ||
           task.getStatus() == TaskAttemptState.TA_RUNNING) {
         task.setState(TaskAttemptState.TA_FAILED);
+        task.abort();
       }
     }
 
     tasks.clear();
     fetchLauncher.shutdown();
+    fetchLauncher = null;
     this.queryEngine = null;
 
     TupleCache.getInstance().removeBroadcastCache(executionBlockId);
@@ -228,6 +240,8 @@
     synchronized (this) {
       notifyAll();
     }
+    super.stop();
+    this.history.setState(getServiceState());
   }
 
   public long getFinishTime() {
@@ -235,6 +249,11 @@
   }
 
   public class TaskRunnerContext {
+    public AtomicInteger completedTasksNum = new AtomicInteger();
+    public AtomicInteger succeededTasksNum = new AtomicInteger();
+    public AtomicInteger killedTasksNum = new AtomicInteger();
+    public AtomicInteger failedTasksNum = new AtomicInteger();
+
     public TajoConf getConf() {
       return systemConf;
     }
@@ -280,15 +299,11 @@
     }
 
     public void addTaskHistory(QueryUnitAttemptId quAttemptId, TaskHistory taskHistory) {
-      taskHistories.put(quAttemptId, taskHistory);
+      history.addTaskHistory(quAttemptId, taskHistory);
     }
 
-    public TaskHistory getTaskHistory(QueryUnitAttemptId quAttemptId) {
-      return taskHistories.get(quAttemptId);
-    }
-
-    public Map<QueryUnitAttemptId, TaskHistory> getTaskHistories() {
-      return taskHistories;
+    public TaskRunnerHistory getExcutionBlockHistory(){
+      return history;
     }
   }
 
@@ -310,7 +325,6 @@
 
   public void run() {
     LOG.info("TaskRunner startup");
-
     try {
 
       taskLauncher = new Thread(new Runnable() {
@@ -364,6 +378,7 @@
                   if(taskRunnerManager != null) {
                     //notify to TaskRunnerManager
                     taskRunnerManager.stopTask(getId());
+                    taskRunnerManager= null;
                   }
                 } else {
                   taskRunnerManager.getWorkerContext().getWorkerSystemMetrics().counter("query", "task").inc();
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerHistory.java b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerHistory.java
new file mode 100644
index 0000000..df60855
--- /dev/null
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerHistory.java
@@ -0,0 +1,150 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.worker;
+
+import com.google.common.base.Objects;
+import com.google.common.collect.Maps;
+import org.apache.hadoop.service.Service;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.tajo.ExecutionBlockId;
+import org.apache.tajo.QueryUnitAttemptId;
+import org.apache.tajo.common.ProtoObject;
+
+import java.util.Collections;
+import java.util.Map;
+
+import static org.apache.tajo.ipc.TajoWorkerProtocol.TaskHistoryProto;
+import static org.apache.tajo.ipc.TajoWorkerProtocol.TaskRunnerHistoryProto;
+
+/**
+ * The history class for TaskRunner processing.
+ */
+public class TaskRunnerHistory implements ProtoObject<TaskRunnerHistoryProto> {
+
+  private Service.STATE state;
+  private ContainerId containerId;
+  private long startTime;
+  private long finishTime;
+  private ExecutionBlockId executionBlockId;
+  private Map<QueryUnitAttemptId, TaskHistory> taskHistoryMap = null;
+
+  public TaskRunnerHistory(ContainerId containerId, ExecutionBlockId executionBlockId) {
+    init();
+    this.containerId = containerId;
+    this.executionBlockId = executionBlockId;
+  }
+
+  public TaskRunnerHistory(TaskRunnerHistoryProto proto) {
+    this.state = Service.STATE.valueOf(proto.getState());
+    this.containerId = ConverterUtils.toContainerId(proto.getContainerId());
+    this.startTime = proto.getStartTime();
+    this.finishTime = proto.getFinishTime();
+    this.executionBlockId = new ExecutionBlockId(proto.getExecutionBlockId());
+    this.taskHistoryMap = Maps.newHashMap();
+    for (TaskHistoryProto taskHistoryProto : proto.getTaskHistoriesList()) {
+      TaskHistory taskHistory = new TaskHistory(taskHistoryProto);
+      taskHistoryMap.put(taskHistory.getQueryUnitAttemptId(), taskHistory);
+    }
+  }
+
+  private void init() {
+    this.taskHistoryMap = Maps.newHashMap();
+  }
+
+  public int size() {
+    return this.taskHistoryMap.size();
+  }
+
+  @Override
+  public int hashCode() {
+    return Objects.hashCode(containerId, executionBlockId, taskHistoryMap);
+  }
+
+  @Override
+  public boolean equals(Object o) {
+    if (o instanceof TaskRunnerHistory) {
+      TaskRunnerHistory other = (TaskRunnerHistory) o;
+      return getProto().equals(other.getProto());
+    }
+    return false;
+  }
+
+  @Override
+  public TaskRunnerHistoryProto getProto() {
+    TaskRunnerHistoryProto.Builder builder = TaskRunnerHistoryProto.newBuilder();
+    builder.setContainerId(containerId.toString());
+    builder.setState(state.toString());
+    builder.setExecutionBlockId(executionBlockId.getProto());
+    builder.setStartTime(startTime);
+    builder.setFinishTime(finishTime);
+    for (TaskHistory taskHistory : taskHistoryMap.values()){
+      builder.addTaskHistories(taskHistory.getProto());
+    }
+    return builder.build();
+  }
+
+  public long getStartTime() {
+    return startTime;
+  }
+
+  public void setStartTime(long startTime) {
+    this.startTime = startTime;
+  }
+
+  public long getFinishTime() {
+    return finishTime;
+  }
+
+  public void setFinishTime(long finishTime) {
+    this.finishTime = finishTime;
+  }
+
+  public ExecutionBlockId getExecutionBlockId() {
+    return executionBlockId;
+  }
+
+  public Service.STATE getState() {
+    return state;
+  }
+
+  public void setState(Service.STATE state) {
+    this.state = state;
+  }
+
+  public ContainerId getContainerId() {
+    return containerId;
+  }
+
+  public void setContainerId(ContainerId containerId) {
+    this.containerId = containerId;
+  }
+
+  public TaskHistory getTaskHistory(QueryUnitAttemptId queryUnitAttemptId) {
+    return taskHistoryMap.get(queryUnitAttemptId);
+  }
+
+  public Map<QueryUnitAttemptId, TaskHistory> getTaskHistoryMap() {
+    return Collections.unmodifiableMap(taskHistoryMap);
+  }
+
+  public void addTaskHistory(QueryUnitAttemptId queryUnitAttemptId, TaskHistory taskHistory) {
+    taskHistoryMap.put(queryUnitAttemptId, taskHistory);
+  }
+}
diff --git a/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerManager.java b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerManager.java
index da434e4..a8e8730 100644
--- a/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerManager.java
+++ b/tajo-core/src/main/java/org/apache/tajo/worker/TaskRunnerManager.java
@@ -18,11 +18,11 @@
 
 package org.apache.tajo.worker;
 
+import com.google.common.collect.Maps;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.service.CompositeService;
-import org.apache.tajo.ExecutionBlockId;
 import org.apache.tajo.QueryUnitAttemptId;
 import org.apache.tajo.conf.TajoConf;
 
@@ -33,7 +33,7 @@
   private static final Log LOG = LogFactory.getLog(TaskRunnerManager.class);
 
   private final Map<String, TaskRunner> taskRunnerMap = new HashMap<String, TaskRunner>();
-  private final Map<String, TaskRunner> finishedTaskRunnerMap = new HashMap<String, TaskRunner>();
+  private final Map<String, TaskRunnerHistory> taskRunnerHistoryMap = Maps.newConcurrentMap();
   private TajoWorker.WorkerContext workerContext;
   private TajoConf tajoConf;
   private AtomicBoolean stop = new AtomicBoolean(false);
@@ -64,10 +64,10 @@
 
   @Override
   public void stop() {
-    if(stop.get()) {
+    if(stop.getAndSet(true)) {
       return;
     }
-    stop.set(true);
+
     synchronized(taskRunnerMap) {
       for(TaskRunner eachTaskRunner: taskRunnerMap.values()) {
         if(!eachTaskRunner.isStopped()) {
@@ -88,10 +88,7 @@
   public void stopTask(String id) {
     LOG.info("Stop Task:" + id);
     synchronized(taskRunnerMap) {
-      TaskRunner taskRunner = taskRunnerMap.remove(id);
-      if(taskRunner != null) {
-        finishedTaskRunnerMap.put(id, taskRunner);
-      }
+      taskRunnerMap.remove(id);
     }
     if(workerContext.isYarnContainerMode()) {
       stop();
@@ -104,68 +101,39 @@
     }
   }
 
-  public Collection<TaskRunner> getFinishedTaskRunners() {
-    synchronized(finishedTaskRunnerMap) {
-      return Collections.unmodifiableCollection(finishedTaskRunnerMap.values());
+  public Collection<TaskRunnerHistory> getExecutionBlockHistories() {
+    synchronized(taskRunnerHistoryMap) {
+      return Collections.unmodifiableCollection(taskRunnerHistoryMap.values());
     }
   }
 
-  public TaskRunner findTaskRunner(String taskRunnerId) {
+  public TaskRunnerHistory getExcutionBlockHistoryByTaskRunnerId(String taskRunnerId) {
+    synchronized(taskRunnerHistoryMap) {
+      return taskRunnerHistoryMap.get(taskRunnerId);
+    }
+  }
+
+  public TaskRunner getTaskRunner(String taskRunnerId) {
     synchronized(taskRunnerMap) {
-      if(taskRunnerMap.containsKey(taskRunnerId)) {
-        return taskRunnerMap.get(taskRunnerId);
-      }
-    }
-    synchronized(finishedTaskRunnerMap) {
-      return finishedTaskRunnerMap.get(taskRunnerId);
+      return taskRunnerMap.get(taskRunnerId);
     }
   }
 
-  public Task findTaskByQueryUnitAttemptId(QueryUnitAttemptId quAttemptId) {
-    ExecutionBlockId ebid = quAttemptId.getQueryUnitId().getExecutionBlockId();
+  public Task getTaskByQueryUnitAttemptId(QueryUnitAttemptId quAttemptId) {
     synchronized(taskRunnerMap) {
       for (TaskRunner eachTaskRunner: taskRunnerMap.values()) {
-        if (eachTaskRunner.getExecutionBlockId().equals(ebid)) {
-          Task task = eachTaskRunner.getContext().getTask(quAttemptId);
-          if (task != null) {
-            return task;
-          }
-        }
-      }
-    }
-    synchronized(finishedTaskRunnerMap) {
-      for (TaskRunner eachTaskRunner: finishedTaskRunnerMap.values()) {
-        if (eachTaskRunner.getExecutionBlockId().equals(ebid)) {
-          Task task = eachTaskRunner.getContext().getTask(quAttemptId);
-          if (task != null) {
-            return task;
-          }
-        }
+        Task task = eachTaskRunner.getContext().getTask(quAttemptId);
+        if (task != null) return task;
       }
     }
     return null;
   }
 
-  public TaskHistory findTaskHistoryByQueryUnitAttemptId(QueryUnitAttemptId quAttemptId) {
-    ExecutionBlockId ebid = quAttemptId.getQueryUnitId().getExecutionBlockId();
-    synchronized(taskRunnerMap) {
-      for (TaskRunner eachTaskRunner: taskRunnerMap.values()) {
-        if (eachTaskRunner.getExecutionBlockId().equals(ebid)) {
-          TaskHistory taskHistory = eachTaskRunner.getContext().getTaskHistory(quAttemptId);
-          if (taskHistory != null) {
-            return taskHistory;
-          }
-        }
-      }
-    }
-    synchronized(finishedTaskRunnerMap) {
-      for (TaskRunner eachTaskRunner: finishedTaskRunnerMap.values()) {
-        if (eachTaskRunner.getExecutionBlockId().equals(ebid)) {
-          TaskHistory taskHistory = eachTaskRunner.getContext().getTaskHistory(quAttemptId);
-          if (taskHistory != null) {
-            return taskHistory;
-          }
-        }
+  public TaskHistory getTaskHistoryByQueryUnitAttemptId(QueryUnitAttemptId quAttemptId) {
+    synchronized (taskRunnerHistoryMap) {
+      for (TaskRunnerHistory history : taskRunnerHistoryMap.values()) {
+        TaskHistory taskHistory = history.getTaskHistory(quAttemptId);
+        if (taskHistory != null) return taskHistory;
       }
     }
 
@@ -189,6 +157,11 @@
           synchronized(taskRunnerMap) {
             taskRunnerMap.put(taskRunner.getId(), taskRunner);
           }
+
+          synchronized (taskRunnerHistoryMap){
+            taskRunnerHistoryMap.put(taskRunner.getId(), taskRunner.getContext().getExcutionBlockHistory());
+          }
+
           taskRunner.init(systemConf);
           taskRunner.start();
         } catch (Exception e) {
@@ -202,6 +175,7 @@
   }
 
   class FinishedTaskCleanThread extends Thread {
+    //TODO if history size is large, the historyMap should remove immediately
     public void run() {
       int expireIntervalTime = tajoConf.getIntVar(TajoConf.ConfVars.WORKER_HISTORY_EXPIRE_PERIOD);
       LOG.info("FinishedQueryMasterTaskCleanThread started: expire interval minutes = " + expireIntervalTime);
@@ -221,16 +195,16 @@
     }
 
     private void cleanExpiredFinishedQueryMasterTask(long expireTime) {
-      synchronized(finishedTaskRunnerMap) {
+      synchronized(taskRunnerHistoryMap) {
         List<String> expiredIds = new ArrayList<String>();
-        for(Map.Entry<String, TaskRunner> entry: finishedTaskRunnerMap.entrySet()) {
+        for(Map.Entry<String, TaskRunnerHistory> entry: taskRunnerHistoryMap.entrySet()) {
           if(entry.getValue().getStartTime() > expireTime) {
             expiredIds.add(entry.getKey());
           }
         }
 
         for(String eachId: expiredIds) {
-          finishedTaskRunnerMap.remove(eachId);
+          taskRunnerHistoryMap.remove(eachId);
         }
       }
     }
diff --git a/tajo-core/src/main/proto/TajoWorkerProtocol.proto b/tajo-core/src/main/proto/TajoWorkerProtocol.proto
index dbff67f..3bf6e13 100644
--- a/tajo-core/src/main/proto/TajoWorkerProtocol.proto
+++ b/tajo-core/src/main/proto/TajoWorkerProtocol.proto
@@ -285,4 +285,36 @@
 
 message EnforcerProto {
   repeated EnforceProperty properties = 1;
+}
+
+message FetcherHistoryProto {
+  required int64 startTime = 1;
+  optional int64 finishTime = 2;
+  required FetcherState state = 3;
+  required int64 fileLength = 4;
+  required int32 messageReceivedCount = 5;
+}
+
+message TaskHistoryProto {
+  required QueryUnitAttemptIdProto queryUnitAttemptId = 1;
+  required TaskAttemptState state = 2;
+  required float progress = 3;
+  required int64 startTime = 4;
+  required int64 finishTime = 5;
+  required TableStatsProto inputStats = 6;
+  optional TableStatsProto outputStats = 7;
+  optional string outputPath = 8;
+  optional string workingPath = 9;
+  optional int32 finishedFetchCount = 10;
+  optional int32 totalFetchCount = 11;
+  repeated FetcherHistoryProto fetcherHistories = 12;
+}
+
+message TaskRunnerHistoryProto {
+  required ExecutionBlockIdProto executionBlockId = 1;
+  required string state = 2;
+  required string containerId = 3;
+  optional int64 startTime = 4;
+  optional int64 finishTime = 5;
+  repeated TaskHistoryProto taskHistories = 6;
 }
\ No newline at end of file
diff --git a/tajo-core/src/main/resources/webapps/admin/query.jsp b/tajo-core/src/main/resources/webapps/admin/query.jsp
index 4e8d7b0..fecc806 100644
--- a/tajo-core/src/main/resources/webapps/admin/query.jsp
+++ b/tajo-core/src/main/resources/webapps/admin/query.jsp
@@ -60,9 +60,31 @@
 <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
 <html>
 <head>
-  <link rel="stylesheet" type = "text/css" href = "/static/style.css" />
-  <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-  <title>Tajo</title>
+    <link rel="stylesheet" type = "text/css" href = "/static/style.css" />
+    <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+    <title>Tajo</title>
+    <script src="/static/js/jquery.js" type="text/javascript"></script>
+    <script type="text/javascript">
+
+    function killQuery(queryId) {
+        $.ajax({
+            type: "POST",
+            url: "query_exec",
+            data: { action: "killQuery", queryId: queryId }
+        })
+        .done(function(msg) {
+            var resultJson = $.parseJSON(msg);
+            if(resultJson.success == "false") {
+                alert(resultJson.errorMessage);
+            } else {
+                alert(resultJson.successMessage);
+                location.reload();
+            }
+        })
+    }
+
+
+  </script>
 </head>
 <body>
 <%@ include file="header.jsp"%>
@@ -76,7 +98,7 @@
   } else {
 %>
   <table width="100%" border="1" class='border_table'>
-    <tr></tr><th>QueryId</th><th>Query Master</th><th>Started</th><th>Progress</th><th>Time</th><th>Status</th></th><th>sql</th></tr>
+    <tr></tr><th>QueryId</th><th>Query Master</th><th>Started</th><th>Progress</th><th>Time</th><th>Status</th></th><th>sql</th><th>Kill Query</th></tr>
     <%
       for(QueryInProgress eachQuery: runningQueries) {
         long time = System.currentTimeMillis() - eachQuery.getQueryInfo().getStartTime();
@@ -91,6 +113,7 @@
       <td><%=StringUtils.formatTime(time)%></td>
       <td><%=eachQuery.getQueryInfo().getQueryState()%></td>
       <td><%=eachQuery.getQueryInfo().getSql()%></td>
+      <td><input id="btnSubmit" type="submit" value="Kill" onClick="javascript:killQuery('<%=eachQuery.getQueryId()%>');"></td>
     </tr>
     <%
       }
diff --git a/tajo-core/src/main/resources/webapps/worker/header.jsp b/tajo-core/src/main/resources/webapps/worker/header.jsp
index f20eaf0..93f7612 100644
--- a/tajo-core/src/main/resources/webapps/worker/header.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/header.jsp
@@ -18,6 +18,8 @@
   */
 %>
 <%@ page import="org.apache.tajo.util.JSPUtil" %>
+<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="org.apache.tajo.worker.TajoWorker" %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 <%
   TajoWorker tmpTajoWorker = (TajoWorker) StaticHttpServer.getInstance().getAttribute("tajo.info.server.object");
diff --git a/tajo-core/src/main/resources/webapps/worker/index.jsp b/tajo-core/src/main/resources/webapps/worker/index.jsp
index c30a72d..866d663 100644
--- a/tajo-core/src/main/resources/webapps/worker/index.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/index.jsp
@@ -19,12 +19,15 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
-<%@ page import="java.util.*" %>
-<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.*" %>
-<%@ page import="java.text.SimpleDateFormat" %>
-<%@ page import="org.apache.tajo.master.querymaster.QueryMasterTask" %>
 <%@ page import="org.apache.tajo.master.querymaster.Query" %>
+<%@ page import="org.apache.tajo.master.querymaster.QueryMasterTask" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
+<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="org.apache.tajo.worker.TajoWorker" %>
+<%@ page import="org.apache.tajo.worker.TaskRunner" %>
+<%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.ArrayList" %>
+<%@ page import="java.util.List" %>
 
 <%
   TajoWorker tajoWorker = (TajoWorker) StaticHttpServer.getInstance().getAttribute("tajo.info.server.object");
@@ -133,7 +136,7 @@
       for(TaskRunner eachTaskRunner: taskRunners) {
     %>
     <tr>
-      <td><a href="tasks.jsp?containerId=<%=eachTaskRunner.getId()%>"><%=eachTaskRunner.getId()%></a></td>
+      <td><a href="tasks.jsp?taskRunnerId=<%=eachTaskRunner.getId()%>"><%=eachTaskRunner.getId()%></a></td>
       <td><%=df.format(eachTaskRunner.getStartTime())%></td>
       <td><%=eachTaskRunner.getFinishTime() == 0 ? "-" : df.format(eachTaskRunner.getFinishTime())%></td>
       <td><%=JSPUtil.getElapsedTime(eachTaskRunner.getStartTime(), eachTaskRunner.getFinishTime())%></td>
diff --git a/tajo-core/src/main/resources/webapps/worker/querydetail.jsp b/tajo-core/src/main/resources/webapps/worker/querydetail.jsp
index 3de20fe..c0bee9b 100644
--- a/tajo-core/src/main/resources/webapps/worker/querydetail.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/querydetail.jsp
@@ -19,13 +19,16 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
-<%@ page import="org.apache.tajo.master.querymaster.*" %>
-<%@ page import="java.util.*" %>
-<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.*" %>
-<%@ page import="java.text.SimpleDateFormat" %>
 <%@ page import="org.apache.tajo.QueryId" %>
+<%@ page import="org.apache.tajo.master.querymaster.Query" %>
+<%@ page import="org.apache.tajo.master.querymaster.QueryMasterTask" %>
+<%@ page import="org.apache.tajo.master.querymaster.SubQuery" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
 <%@ page import="org.apache.tajo.util.TajoIdUtils" %>
+<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="org.apache.tajo.worker.TajoWorker" %>
+<%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.List" %>
 
 <%
   QueryId queryId = TajoIdUtils.parseQueryId(request.getParameter("queryId"));
diff --git a/tajo-core/src/main/resources/webapps/worker/querytasks.jsp b/tajo-core/src/main/resources/webapps/worker/querytasks.jsp
index ab6ff26..1a325da 100644
--- a/tajo-core/src/main/resources/webapps/worker/querytasks.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/querytasks.jsp
@@ -19,23 +19,24 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
-<%@ page import="org.apache.tajo.master.querymaster.*" %>
-<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.*" %>
-<%@ page import="java.text.SimpleDateFormat" %>
-<%@ page import="org.apache.tajo.QueryId" %>
-<%@ page import="org.apache.tajo.util.TajoIdUtils" %>
 <%@ page import="org.apache.tajo.ExecutionBlockId" %>
-<%@ page import="org.apache.tajo.ipc.TajoMasterProtocol" %>
-<%@ page import="java.util.List" %>
-<%@ page import="java.util.Map" %>
-<%@ page import="java.util.HashMap" %>
+<%@ page import="org.apache.tajo.QueryId" %>
 <%@ page import="org.apache.tajo.QueryUnitAttemptId" %>
 <%@ page import="org.apache.tajo.catalog.statistics.TableStats" %>
-<%@ page import="java.util.Locale" %>
-<%@ page import="java.text.NumberFormat" %>
 <%@ page import="org.apache.tajo.engine.planner.PlannerUtil" %>
+<%@ page import="org.apache.tajo.ipc.TajoMasterProtocol" %>
+<%@ page import="org.apache.tajo.master.querymaster.*" %>
 <%@ page import="org.apache.tajo.util.FileUtil" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
+<%@ page import="org.apache.tajo.util.TajoIdUtils" %>
+<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="org.apache.tajo.worker.TajoWorker" %>
+<%@ page import="java.text.NumberFormat" %>
+<%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.HashMap" %>
+<%@ page import="java.util.List" %>
+<%@ page import="java.util.Locale" %>
+<%@ page import="java.util.Map" %>
 
 <%
   String paramQueryId = request.getParameter("queryId");
@@ -157,7 +158,7 @@
     <tr><td align='right' width='180px'>Status:</td><td><%=subQuery.getState()%></td></tr>
     <tr><td align='right'>Started:</td><td><%=df.format(subQuery.getStartTime())%> ~ <%=subQuery.getFinishTime() == 0 ? "-" : df.format(subQuery.getFinishTime())%></td></tr>
     <tr><td align='right'># Tasks:</td><td><%=numTasks%> (Local Tasks: <%=subQuery.getTaskScheduler().getHostLocalAssigned()%>, Rack Local Tasks: <%=subQuery.getTaskScheduler().getRackLocalAssigned()%>)</td></tr>
-    <tr><td align='right'>Progress:</td><td><%=JSPUtil.percentFormat((float)(totalProgress/numTasks))%>%</td></tr>
+    <tr><td align='right'>Progress:</td><td><%=JSPUtil.percentFormat((float) (totalProgress / numTasks))%>%</td></tr>
     <tr><td align='right'># Shuffles:</td><td><%=numShuffles%></td></tr>
     <tr><td align='right'>Input Bytes:</td><td><%=FileUtil.humanReadableByteCount(totalInputBytes, false) + " (" + nf.format(totalInputBytes) + " B)"%></td></tr>
     <tr><td align='right'>Actual Processed Bytes:</td><td><%=totalReadBytes == 0 ? "-" : FileUtil.humanReadableByteCount(totalReadBytes, false) + " (" + nf.format(totalReadBytes) + " B)"%></td></tr>
diff --git a/tajo-core/src/main/resources/webapps/worker/queryunit.jsp b/tajo-core/src/main/resources/webapps/worker/queryunit.jsp
index 06dca00..18a67d8 100644
--- a/tajo-core/src/main/resources/webapps/worker/queryunit.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/queryunit.jsp
@@ -23,6 +23,7 @@
 <%@ page import="org.apache.tajo.QueryId" %>
 <%@ page import="org.apache.tajo.QueryUnitId" %>
 <%@ page import="org.apache.tajo.catalog.proto.CatalogProtos" %>
+<%@ page import="org.apache.tajo.catalog.statistics.TableStats" %>
 <%@ page import="org.apache.tajo.ipc.TajoWorkerProtocol" %>
 <%@ page import="org.apache.tajo.master.querymaster.Query" %>
 <%@ page import="org.apache.tajo.master.querymaster.QueryMasterTask" %>
@@ -31,16 +32,15 @@
 <%@ page import="org.apache.tajo.storage.DataLocation" %>
 <%@ page import="org.apache.tajo.storage.fragment.FileFragment" %>
 <%@ page import="org.apache.tajo.storage.fragment.FragmentConvertor" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
 <%@ page import="org.apache.tajo.util.TajoIdUtils" %>
 <%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="org.apache.tajo.worker.FetchImpl" %>
 <%@ page import="org.apache.tajo.worker.TajoWorker" %>
 <%@ page import="java.net.URI" %>
 <%@ page import="java.text.SimpleDateFormat" %>
 <%@ page import="java.util.Map" %>
 <%@ page import="java.util.Set" %>
-<%@ page import="org.apache.tajo.catalog.statistics.TableStats" %>
-<%@ page import="org.apache.tajo.worker.TaskHistory" %>
-<%@ page import="org.apache.tajo.worker.FetchImpl" %>
 
 <%
     String paramQueryId = request.getParameter("queryId");
@@ -165,8 +165,8 @@
         <tr><td align="right">Shuffles</td><td># Shuffle Outputs: <%=numShuffles%>, Shuffle Key: <%=shuffleKey%>, Shuffle file: <%=shuffleFileName%></td></tr>
         <tr><td align="right">Data Locations</td><td><%=dataLocationInfos%></td></tr>
         <tr><td align="right">Fragment</td><td><%=fragmentInfo%></td></tr>
-        <tr><td align="right">Input Statistics</td><td><%=TaskHistory.toInputStatsString(inputStat)%></td></tr>
-        <tr><td align="right">Output Statistics</td><td><%=TaskHistory.toOutputStatsString(outputStat)%></td></tr>
+        <tr><td align="right">Input Statistics</td><td><%=JSPUtil.tableStatToString(inputStat)%></td></tr>
+        <tr><td align="right">Output Statistics</td><td><%=JSPUtil.tableStatToString(outputStat)%></td></tr>
         <tr><td align="right">Fetches</td><td><%=fetchInfo%></td></tr>
     </table>
 </div>
diff --git a/tajo-core/src/main/resources/webapps/worker/taskcontainers.jsp b/tajo-core/src/main/resources/webapps/worker/taskcontainers.jsp
index be19a42..bb5e90d 100644
--- a/tajo-core/src/main/resources/webapps/worker/taskcontainers.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/taskcontainers.jsp
@@ -19,19 +19,24 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
-<%@ page import="java.util.*" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
 <%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.*" %>
+<%@ page import="org.apache.tajo.worker.TajoWorker" %>
+<%@ page import="org.apache.tajo.worker.TaskRunner" %>
 <%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.ArrayList" %>
+<%@ page import="java.util.List" %>
+<%@ page import="org.apache.tajo.worker.TaskRunnerHistory" %>
+<%@ page import="org.apache.tajo.worker.TaskRunnerHistory" %>
 
 <%
   TajoWorker tajoWorker = (TajoWorker) StaticHttpServer.getInstance().getAttribute("tajo.info.server.object");
 
   List<TaskRunner> taskRunners = new ArrayList<TaskRunner>(tajoWorker.getWorkerContext().getTaskRunnerManager().getTaskRunners());
-  List<TaskRunner> finishedTaskRunners = new ArrayList<TaskRunner>(tajoWorker.getWorkerContext().getTaskRunnerManager().getFinishedTaskRunners());
+  List<TaskRunnerHistory> histories = new ArrayList<TaskRunnerHistory>(tajoWorker.getWorkerContext().getTaskRunnerManager().getExecutionBlockHistories());
 
   JSPUtil.sortTaskRunner(taskRunners);
-  JSPUtil.sortTaskRunner(finishedTaskRunners);
+  JSPUtil.sortTaskRunnerHistory(histories);
 
   SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 %>
@@ -55,7 +60,7 @@
       for(TaskRunner eachTaskRunner: taskRunners) {
 %>
     <tr>
-      <td><a href="tasks.jsp?containerId=<%=eachTaskRunner.getId()%>"><%=eachTaskRunner.getId()%></a></td>
+      <td><a href="tasks.jsp?taskRunnerId=<%=eachTaskRunner.getId()%>"><%=eachTaskRunner.getId()%></a></td>
       <td><%=df.format(eachTaskRunner.getStartTime())%></td>
       <td><%=eachTaskRunner.getFinishTime() == 0 ? "-" : df.format(eachTaskRunner.getFinishTime())%></td>
       <td><%=JSPUtil.getElapsedTime(eachTaskRunner.getStartTime(), eachTaskRunner.getFinishTime())%></td>
@@ -70,14 +75,15 @@
   <table width="100%" border="1" class="border_table">
     <tr><th>ContainerId</th><th>StartTime</th><th>FinishTime</th><th>RunTime</th><th>Status</th></tr>
 <%
-      for(TaskRunner eachTaskRunner: finishedTaskRunners) {
+      for(TaskRunnerHistory history: histories) {
+          String taskRunnerId = TaskRunner.getId(history.getExecutionBlockId(), history.getContainerId());
 %>
     <tr>
-      <td><a href="tasks.jsp?containerId=<%=eachTaskRunner.getId()%>"><%=eachTaskRunner.getId()%></a></td>
-      <td><%=df.format(eachTaskRunner.getStartTime())%></td>
-      <td><%=eachTaskRunner.getFinishTime() == 0 ? "-" : df.format(eachTaskRunner.getFinishTime())%></td>
-      <td><%=JSPUtil.getElapsedTime(eachTaskRunner.getStartTime(), eachTaskRunner.getFinishTime())%></td>
-      <td><%=eachTaskRunner.getServiceState()%></td>
+        <td><a href="tasks.jsp?taskRunnerId=<%=taskRunnerId%>"><%=taskRunnerId%></a></td>
+      <td><%=df.format(history.getStartTime())%></td>
+      <td><%=history.getFinishTime() == 0 ? "-" : df.format(history.getFinishTime())%></td>
+      <td><%=JSPUtil.getElapsedTime(history.getStartTime(), history.getFinishTime())%></td>
+      <td><%=history.getState()%></td>
 <%
   }
 %>
diff --git a/tajo-core/src/main/resources/webapps/worker/taskdetail.jsp b/tajo-core/src/main/resources/webapps/worker/taskdetail.jsp
index b264081..e20ab03 100644
--- a/tajo-core/src/main/resources/webapps/worker/taskdetail.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/taskdetail.jsp
@@ -19,14 +19,13 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
+<%@ page import="org.apache.commons.lang.StringUtils" %>
 <%@ page import="org.apache.tajo.QueryUnitAttemptId" %>
+<%@ page import="org.apache.tajo.ipc.TajoWorkerProtocol" %>
 <%@ page import="org.apache.tajo.util.TajoIdUtils" %>
-<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.TajoWorker" %>
-<%@ page import="org.apache.tajo.worker.Task" %>
-<%@ page import="org.apache.tajo.worker.TaskHistory" %>
-<%@ page import="org.apache.tajo.worker.TaskRunner" %>
+<%@ page import="org.apache.tajo.worker.*" %>
 <%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.List" %>
 
 <%
     TajoWorker tajoWorker = (TajoWorker) StaticHttpServer.getInstance().getAttribute("tajo.info.server.object");
@@ -37,42 +36,49 @@
     Task task = null;
     TaskHistory taskHistory = null;
     if(containerId == null || containerId.isEmpty() || "null".equals(containerId)) {
-        task = tajoWorker.getWorkerContext().getTaskRunnerManager().findTaskByQueryUnitAttemptId(queryUnitAttemptId);
+        task = tajoWorker.getWorkerContext().getTaskRunnerManager().getTaskByQueryUnitAttemptId(queryUnitAttemptId);
         if (task != null) {
-            taskHistory = task.getTaskHistory();
+            taskHistory = task.createTaskHistory();
         } else {
-            taskHistory = tajoWorker.getWorkerContext().getTaskRunnerManager().findTaskHistoryByQueryUnitAttemptId(queryUnitAttemptId);
+            taskHistory = tajoWorker.getWorkerContext().getTaskRunnerManager().getTaskHistoryByQueryUnitAttemptId(queryUnitAttemptId);
         }
     } else {
-        TaskRunner taskRunner = tajoWorker.getWorkerContext().getTaskRunnerManager().findTaskRunner(containerId);
-        if(taskRunner != null) {
-            task = taskRunner.getContext().getTask(queryUnitAttemptId);
+        TaskRunner runner = tajoWorker.getWorkerContext().getTaskRunnerManager().getTaskRunner(containerId);
+        if(runner != null) {
+            task = runner.getContext().getTask(queryUnitAttemptId);
             if (task != null) {
-                taskHistory = task.getTaskHistory();
+                taskHistory = task.createTaskHistory();
             } else {
-                taskHistory = taskRunner.getContext().getTaskHistory(queryUnitAttemptId);
+                TaskRunnerHistory history = tajoWorker.getWorkerContext().getTaskRunnerManager().getExcutionBlockHistoryByTaskRunnerId(containerId);
+                if(history != null) {
+                    taskHistory = history.getTaskHistory(queryUnitAttemptId);
+                }
             }
         }
     }
-    if(taskHistory == null) {
-%>
-<script type="text/javascript">
-    alert("No Task Info for" + quAttemptId);
-    document.history.back();
-</script>
-<%
-        return;
-    }
-
     SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 %>
-
 <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
 <html>
 <head>
-    <link rel="stylesheet" type = "text/css" href = "/static/style.css" />
+    <link rel="stylesheet" type="text/css" href="/static/style.css"/>
     <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
     <title>tajo worker</title>
+    <%
+        if (taskHistory == null) {
+    %>
+    <script type="text/javascript">
+        alert("No Task Info for" + quAttemptId);
+        document.history.back();
+    </script>
+    </head>
+</div>
+</body>
+</html>
+    <%
+            return;
+        }
+    %>
 </head>
 <body>
 <%@ include file="header.jsp"%>
@@ -82,46 +88,127 @@
     <h3>Task Detail: <%=quAttemptId%></h3>
     <table border="1" width="100%" class="border_table">
         <tr><td width="200" align="right">ID</td><td><%=quAttemptId%></td></tr>
-        <tr><td align="right">State</td><td><%=taskHistory.getStatus()%></td></tr>
+        <tr><td align="right">State</td><td><%=taskHistory.getState()%></td></tr>
         <tr><td align="right">Start Time</td><td><%=taskHistory.getStartTime() == 0 ? "-" : df.format(taskHistory.getStartTime())%></td></tr>
         <tr><td align="right">Finish Time</td><td><%=taskHistory.getFinishTime() == 0 ? "-" : df.format(taskHistory.getFinishTime())%></td></tr>
         <tr><td align="right">Running Time</td><td><%=JSPUtil.getElapsedTime(taskHistory.getStartTime(), taskHistory.getFinishTime())%></td></tr>
         <tr><td align="right">Progress</td><td><%=JSPUtil.percentFormat(taskHistory.getProgress())%>%</td></tr>
         <tr><td align="right">Output Path</td><td><%=taskHistory.getOutputPath()%></td></tr>
         <tr><td align="right">Working Path</td><td><%=taskHistory.getWorkingPath()%></td></tr>
-        <tr><td align="right">Input Statistics</td><td><%=TaskHistory.toInputStatsString(taskHistory.getInputStats())%></td></tr>
-        <tr><td align="right">Output Statistics</td><td><%=TaskHistory.toOutputStatsString(taskHistory.getOutputStats())%></td></tr>
+        <tr><td align="right">Input Statistics</td><td><%=JSPUtil.tableStatToString(taskHistory.getInputStats())%></td></tr>
+        <tr><td align="right">Output Statistics</td><td><%=JSPUtil.tableStatToString(taskHistory.getOutputStats())%></td></tr>
     </table>
-
-<%
-    if(taskHistory.hasFetcher()) {
-%>
     <hr/>
-    <h3>Fetch Status</h3>
+    <%
+        if (taskHistory.hasFetcherHistories()) {
+    %>
+    <h3>Fetch Status &nbsp;
+        <span><%= taskHistory.getFinishedFetchCount() + "/" + taskHistory.getTotalFetchCount() %> (Finished/Total)</span>
+    </h3>
+
+    <%
+        int index = 1;
+        int pageSize = 1000; //TODO pagination
+
+        List<TajoWorkerProtocol.FetcherHistoryProto> fetcherHistories = taskHistory.getFetcherHistories();
+        if (fetcherHistories.size() > 0) {
+
+    %>
+
     <table border="1" width="100%" class="border_table">
-        <tr><th>No</th><th>StartTime</th><th>FinishTime</th><th>RunTime</th><th>Status</th><th>File Length</th><th># Messages</th><th>URI</th></tr>
-<%
-    int index = 1;
-    for(TaskHistory.FetcherHistory eachFetcher: taskHistory.getFetchers()) {
-%>
         <tr>
-            <td><%=index%></td>
-            <td><%=df.format(eachFetcher.getStartTime())%></td>
-            <td><%=eachFetcher.getFinishTime() == 0 ? "-" : df.format(eachFetcher.getFinishTime())%></td>
-            <td><%=JSPUtil.getElapsedTime(eachFetcher.getStartTime(), eachFetcher.getFinishTime())%></td>
-            <td><%=eachFetcher.getStatus()%></td>
-            <td align="right"><%=eachFetcher.getFileLen()%></td>
-            <td align="right"><%=eachFetcher.getMessageReceiveCount()%></td>
-            <td><a href="<%=eachFetcher.getUri()%>"><%=eachFetcher.getUri()%></a></td>
+            <th>No</th>
+            <th>StartTime</th>
+            <th>FinishTime</th>
+            <th>RunTime</th>
+            <th>Status</th>
+            <th>File Length</th>
+            <th># Messages</th>
         </tr>
-<%
-        index++;
-    }
-%>
+        <%
+            for (TajoWorkerProtocol.FetcherHistoryProto eachFetcher : fetcherHistories) {
+        %>
+        <tr>
+            <td><%=index%>
+            </td>
+            <td><%=df.format(eachFetcher.getStartTime())%>
+            </td>
+            <td><%=eachFetcher.getFinishTime() == 0 ? "-" : df.format(eachFetcher.getFinishTime())%>
+            </td>
+            <td><%=JSPUtil.getElapsedTime(eachFetcher.getStartTime(), eachFetcher.getFinishTime())%>
+            </td>
+            <td><%=eachFetcher.getState()%>
+            </td>
+            <td align="right"><%=eachFetcher.getFileLength()%>
+            </td>
+            <td align="right"><%=eachFetcher.getMessageReceivedCount()%>
+            </td>
+        </tr>
+        <%
+            index++;
+            if (pageSize < index) {
+        %>
+        <tr>
+            <td colspan="8">has more ...</td>
+        </tr>
+        <%
+                    break;
+                }
+            }
+        %>
     </table>
-<%
-    }
-%>
+    <%
+    } else if (task != null) {
+    %>
+    <table border="1" width="100%" class="border_table">
+        <tr>
+            <th>No</th>
+            <th>StartTime</th>
+            <th>FinishTime</th>
+            <th>RunTime</th>
+            <th>Status</th>
+            <th>File Length</th>
+            <th># Messages</th>
+            <th>URI</th>
+        </tr>
+        <%
+            for (Fetcher eachFetcher : task.getFetchers()) {
+        %>
+        <tr>
+            <td><%=index%>
+            </td>
+            <td><%=df.format(eachFetcher.getStartTime())%>
+            </td>
+            <td><%=eachFetcher.getFinishTime() == 0 ? "-" : df.format(eachFetcher.getFinishTime())%>
+            </td>
+            <td><%=JSPUtil.getElapsedTime(eachFetcher.getStartTime(), eachFetcher.getFinishTime())%>
+            </td>
+            <td><%=eachFetcher.getState()%>
+            </td>
+            <td align="right"><%=eachFetcher.getFileLen()%>
+            </td>
+            <td align="right"><%=eachFetcher.getMessageReceiveCount()%>
+            </td>
+            <td><a href="<%=eachFetcher.getURI()%>"><%=StringUtils.abbreviate(eachFetcher.getURI().toString(), 50)%>
+            </a></td>
+        </tr>
+        <%
+            index++;
+            if (pageSize < index) {
+        %>
+        <tr>
+            <td colspan="8">has more ...</td>
+        </tr>
+        <%
+                    break;
+                }
+            }
+        %>
+    </table>
+    <%
+            }
+        }
+    %>
 </div>
 </body>
 </html>
\ No newline at end of file
diff --git a/tajo-core/src/main/resources/webapps/worker/tasks.jsp b/tajo-core/src/main/resources/webapps/worker/tasks.jsp
index 7b65989..b5fb9d7 100644
--- a/tajo-core/src/main/resources/webapps/worker/tasks.jsp
+++ b/tajo-core/src/main/resources/webapps/worker/tasks.jsp
@@ -19,37 +19,41 @@
 %>
 <%@ page language="java" contentType="text/html; charset=UTF-8" pageEncoding="UTF-8"%>
 
-<%@ page import="java.util.*" %>
-<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
-<%@ page import="org.apache.tajo.worker.*" %>
-<%@ page import="java.text.SimpleDateFormat" %>
 <%@ page import="org.apache.tajo.QueryUnitAttemptId" %>
+<%@ page import="org.apache.tajo.util.JSPUtil" %>
+<%@ page import="org.apache.tajo.webapp.StaticHttpServer" %>
+<%@ page import="java.text.SimpleDateFormat" %>
+<%@ page import="java.util.Map" %>
+<%@ page import="org.apache.tajo.worker.*" %>
 
 <%
-    String containerId = request.getParameter("containerId");
+    String containerId = request.getParameter("taskRunnerId");
     TajoWorker tajoWorker = (TajoWorker) StaticHttpServer.getInstance().getAttribute("tajo.info.server.object");
 
-    TaskRunner taskRunner = tajoWorker.getWorkerContext().getTaskRunnerManager().findTaskRunner(containerId);
-    if(taskRunner == null) {
-%>
-<script type="text/javascript">
-    alert("No Task Container for" + containerId);
-    document.history.back();
-</script>
-<%
-        return;
-    }
-
-    TaskRunner.TaskRunnerContext taskRunnerContext = taskRunner.getContext();
+    TaskRunner taskRunner = tajoWorker.getWorkerContext().getTaskRunnerManager().getTaskRunner(containerId);
+    org.apache.tajo.worker.TaskRunnerHistory history = tajoWorker.getWorkerContext().getTaskRunnerManager().getExcutionBlockHistoryByTaskRunnerId(containerId);
     SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
 %>
-
 <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
 <html>
 <head>
-    <link rel="stylesheet" type = "text/css" href = "/static/style.css" />
+    <link rel="stylesheet" type="text/css" href="/static/style.css"/>
     <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
     <title>tajo worker</title>
+    <%
+        if (taskRunner == null && history == null) {
+    %>
+    <script type="text/javascript">
+        alert("No Task Container for" + containerId);
+        document.history.back();
+    </script>
+</head>
+</body>
+</html>
+<%
+        return;
+    }
+%>
 </head>
 <body>
 <%@ include file="header.jsp"%>
@@ -59,35 +63,44 @@
     <h3>Tasks</h3>
     <table width="100%" border="1" class="border_table">
         <tr><th>Id</th><th>StartTime</th><th>FinishTime</th><th>RunTime</th><th>Status</th></tr>
-<%
-    for(Map.Entry<QueryUnitAttemptId, Task> entry: taskRunnerContext.getTasks().entrySet()) {
-        QueryUnitAttemptId queryUnitId = entry.getKey();
-        TaskHistory eachTask = entry.getValue().getTaskHistory();
-%>
-        <tr>
-            <td><a href="taskdetail.jsp?containerId=<%=containerId%>&queryUnitAttemptId=<%=queryUnitId%>"><%=queryUnitId%></a></td>
-            <td><%=df.format(eachTask.getStartTime())%></td>
-            <td><%=eachTask.getFinishTime() == 0 ? "-" : df.format(eachTask.getFinishTime())%></td>
-            <td><%=JSPUtil.getElapsedTime(eachTask.getStartTime(), eachTask.getFinishTime())%></td>
-            <td><%=eachTask.getStatus()%></td>
-        </tr>
-<%
-    }
+        <%
+            if (taskRunner != null) {
+                TaskRunner.TaskRunnerContext taskRunnerContext = taskRunner.getContext();
 
-    for(Map.Entry<QueryUnitAttemptId, TaskHistory> entry: taskRunnerContext.getTaskHistories().entrySet()) {
-        QueryUnitAttemptId queryUnitId = entry.getKey();
-        TaskHistory eachTask = entry.getValue();
-%>
-        <tr>
-            <td><a href="taskdetail.jsp?containerId=<%=containerId%>&queryUnitAttemptId=<%=queryUnitId%>"><%=queryUnitId%></a></td>
-            <td><%=df.format(eachTask.getStartTime())%></td>
-            <td><%=eachTask.getFinishTime() == 0 ? "-" : df.format(eachTask.getFinishTime())%></td>
-            <td><%=JSPUtil.getElapsedTime(eachTask.getStartTime(), eachTask.getFinishTime())%></td>
-            <td><%=eachTask.getStatus()%></td>
-        </tr>
-<%
-    }
-%>
+                for (Map.Entry<QueryUnitAttemptId, Task> entry : taskRunnerContext.getTasks().entrySet()) {
+                    QueryUnitAttemptId queryUnitId = entry.getKey();
+                    TaskHistory eachTask = entry.getValue().createTaskHistory();
+        %>
+                    <tr>
+                        <td>
+                            <a href="taskdetail.jsp?containerId=<%=containerId%>&queryUnitAttemptId=<%=queryUnitId%>"><%=queryUnitId%></a></td>
+                        <td><%=df.format(eachTask.getStartTime())%></td>
+                        <td><%=eachTask.getFinishTime() == 0 ? "-" : df.format(eachTask.getFinishTime())%></td>
+                        <td><%=JSPUtil.getElapsedTime(eachTask.getStartTime(), eachTask.getFinishTime())%></td>
+                        <td><%=eachTask.getState()%></td>
+                    </tr>
+        <%
+                }
+            }
+
+            if (history != null) {
+
+
+                for (Map.Entry<QueryUnitAttemptId, TaskHistory> entry : history.getTaskHistoryMap().entrySet()) {
+                    QueryUnitAttemptId queryUnitId = entry.getKey();
+                    TaskHistory eachTask = entry.getValue();
+        %>
+                        <tr>
+                            <td><a href="taskdetail.jsp?containerId=<%=containerId%>&queryUnitAttemptId=<%=queryUnitId%>"><%=queryUnitId%></a></td>
+                            <td><%=df.format(eachTask.getStartTime())%></td>
+                            <td><%=eachTask.getFinishTime() == 0 ? "-" : df.format(eachTask.getFinishTime())%></td>
+                            <td><%=JSPUtil.getElapsedTime(eachTask.getStartTime(), eachTask.getFinishTime())%></td>
+                            <td><%=eachTask.getState()%></td>
+                        </tr>
+        <%
+                }
+            }
+        %>
     </table>
 </div>
 </body>
diff --git a/tajo-core/src/test/java/org/apache/tajo/QueryTestCaseBase.java b/tajo-core/src/test/java/org/apache/tajo/QueryTestCaseBase.java
index 1d6ee86..70c73f9 100644
--- a/tajo-core/src/test/java/org/apache/tajo/QueryTestCaseBase.java
+++ b/tajo-core/src/test/java/org/apache/tajo/QueryTestCaseBase.java
@@ -21,6 +21,7 @@
 import com.google.protobuf.ServiceException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.tajo.algebra.*;
@@ -40,6 +41,7 @@
 
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.net.URL;
 import java.sql.ResultSet;
 import java.sql.ResultSetMetaData;
@@ -234,11 +236,20 @@
    * @return ResultSet of query execution.
    */
   public ResultSet executeQuery() throws Exception {
-    return executeFile(name.getMethodName() + ".sql");
+    return executeFile(getMethodName() + ".sql");
+  }
+
+  private String getMethodName() {
+    String methodName = name.getMethodName();
+    // In the case of parameter execution name's pattern is methodName[0]
+    if (methodName.endsWith("]")) {
+      methodName = methodName.substring(0, methodName.length() - 3);
+    }
+    return methodName;
   }
 
   public ResultSet executeJsonQuery() throws Exception {
-    return executeJsonFile(name.getMethodName() + ".json");
+    return executeJsonFile(getMethodName() + ".json");
   }
 
   /**
@@ -279,7 +290,7 @@
    * @param result Query result to be compared.
    */
   public final void assertResultSet(ResultSet result) throws IOException {
-    assertResultSet("Result Verification", result, name.getMethodName() + ".result");
+    assertResultSet("Result Verification", result, getMethodName() + ".result");
   }
 
   /**
@@ -312,7 +323,7 @@
   }
 
   public final void assertStrings(String actual) throws IOException {
-    assertStrings(actual, name.getMethodName() + ".result");
+    assertStrings(actual, getMethodName() + ".result");
   }
 
   public final void assertStrings(String actual, String resultFileName) throws IOException {
@@ -410,7 +421,10 @@
     while (resultSet.next()) {
       for (int i = 1; i <= numOfColumns; i++) {
         if (i > 1) sb.append(",");
-        String columnValue = resultSet.getObject(i).toString();
+        String columnValue = resultSet.getString(i);
+        if (resultSet.wasNull()) {
+          columnValue = "null";
+        }
         sb.append(columnValue);
       }
       sb.append("\n");
@@ -550,4 +564,61 @@
     }
     return result;
   }
+
+  /**
+   * Reads data file from Test Cluster's HDFS
+   * @param path data parent path
+   * @return data file's contents
+   * @throws Exception
+   */
+  public String getTableFileContents(Path path) throws Exception {
+    FileSystem fs = path.getFileSystem(conf);
+
+    FileStatus[] files = fs.listStatus(path);
+
+    if (files == null || files.length == 0) {
+      return null;
+    }
+
+    StringBuilder sb = new StringBuilder();
+    byte[] buf = new byte[1024];
+
+    for (FileStatus file: files) {
+      if (file.isDirectory()) {
+        continue;
+      }
+
+      InputStream in = fs.open(file.getPath());
+      try {
+        while (true) {
+          int readBytes = in.read(buf);
+          if (readBytes <= 0) {
+            break;
+          }
+
+          sb.append(new String(buf, 0, readBytes));
+        }
+      } finally {
+        in.close();
+      }
+    }
+
+    return sb.toString();
+  }
+
+  /**
+   * Reads data file from Test Cluster's HDFS
+   * @param tableName
+   * @return data file's contents
+   * @throws Exception
+   */
+  public String getTableFileContents(String tableName) throws Exception {
+    TableDesc tableDesc = testingCluster.getMaster().getCatalog().getTableDesc(getCurrentDatabase(), tableName);
+    if (tableDesc == null) {
+      return null;
+    }
+
+    Path path = tableDesc.getPath();
+    return getTableFileContents(path);
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/TajoTestingCluster.java b/tajo-core/src/test/java/org/apache/tajo/TajoTestingCluster.java
index 011ed07..38462bb 100644
--- a/tajo-core/src/test/java/org/apache/tajo/TajoTestingCluster.java
+++ b/tajo-core/src/test/java/org/apache/tajo/TajoTestingCluster.java
@@ -33,7 +33,6 @@
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.tajo.catalog.*;
-import org.apache.tajo.util.KeyValueSet;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.client.TajoClient;
 import org.apache.tajo.conf.TajoConf;
@@ -41,6 +40,7 @@
 import org.apache.tajo.master.TajoMaster;
 import org.apache.tajo.master.rm.TajoWorkerResourceManager;
 import org.apache.tajo.util.CommonTestingUtil;
+import org.apache.tajo.util.KeyValueSet;
 import org.apache.tajo.util.NetUtils;
 import org.apache.tajo.worker.TajoWorker;
 
@@ -560,7 +560,7 @@
 
   public static ResultSet run(String[] names,
                               Schema[] schemas,
-                              KeyValueSet option,
+                              KeyValueSet tableOption,
                               String[][] tables,
                               String query) throws Exception {
     TpchTestBase instance = TpchTestBase.getInstance();
@@ -573,26 +573,73 @@
     }
     TajoConf conf = util.getConfiguration();
     TajoClient client = new TajoClient(conf);
-
-    FileSystem fs = util.getDefaultFileSystem();
-    Path rootDir = util.getMaster().
-        getStorageManager().getWarehouseDir();
-    fs.mkdirs(rootDir);
-    for (int i = 0; i < names.length; i++) {
-      Path tablePath = new Path(rootDir, names[i]);
-      fs.mkdirs(tablePath);
-      Path dfsPath = new Path(tablePath, names[i] + ".tbl");
-      FSDataOutputStream out = fs.create(dfsPath);
-      for (int j = 0; j < tables[i].length; j++) {
-        out.write((tables[i][j]+"\n").getBytes());
+    try {
+      FileSystem fs = util.getDefaultFileSystem();
+      Path rootDir = util.getMaster().
+          getStorageManager().getWarehouseDir();
+      fs.mkdirs(rootDir);
+      for (int i = 0; i < names.length; i++) {
+        createTable(names[i], schemas[i], tableOption, tables[i]);
       }
-      out.close();
-      TableMeta meta = CatalogUtil.newTableMeta(CatalogProtos.StoreType.CSV, option);
-      client.createExternalTable(names[i], schemas[i], tablePath, meta);
+      Thread.sleep(1000);
+      ResultSet res = client.executeQueryAndGetResult(query);
+      return res;
+    } finally {
+      client.close();
     }
-    Thread.sleep(1000);
-    ResultSet res = client.executeQueryAndGetResult(query);
-    return res;
+  }
+
+  public static void createTable(String tableName, Schema schema,
+                                 KeyValueSet tableOption, String[] tableDatas) throws Exception {
+    createTable(tableName, schema, tableOption, tableDatas, 1);
+  }
+
+  public static void createTable(String tableName, Schema schema,
+                                 KeyValueSet tableOption, String[] tableDatas, int numDataFiles) throws Exception {
+    TpchTestBase instance = TpchTestBase.getInstance();
+    TajoTestingCluster util = instance.getTestingCluster();
+    while(true) {
+      if(util.getMaster().isMasterRunning()) {
+        break;
+      }
+      Thread.sleep(1000);
+    }
+    TajoConf conf = util.getConfiguration();
+    TajoClient client = new TajoClient(conf);
+    try {
+      FileSystem fs = util.getDefaultFileSystem();
+      Path rootDir = util.getMaster().
+          getStorageManager().getWarehouseDir();
+      if (!fs.exists(rootDir)) {
+        fs.mkdirs(rootDir);
+      }
+      Path tablePath = new Path(rootDir, tableName);
+      fs.mkdirs(tablePath);
+      if (tableDatas.length > 0) {
+        int recordPerFile = tableDatas.length / numDataFiles;
+        if (recordPerFile == 0) {
+          recordPerFile = 1;
+        }
+        FSDataOutputStream out = null;
+        for (int j = 0; j < tableDatas.length; j++) {
+          if (out == null || j % recordPerFile == 0) {
+            if (out != null) {
+              out.close();
+            }
+            Path dfsPath = new Path(tablePath, tableName + j + ".tbl");
+            out = fs.create(dfsPath);
+          }
+          out.write((tableDatas[j] + "\n").getBytes());
+        }
+        if (out != null) {
+          out.close();
+        }
+      }
+      TableMeta meta = CatalogUtil.newTableMeta(CatalogProtos.StoreType.CSV, tableOption);
+      client.createExternalTable(tableName, schema, tablePath, meta);
+    } finally {
+      client.close();
+    }
   }
 
     /**
@@ -617,6 +664,10 @@
 
   public void setAllTajoDaemonConfValue(String key, String value) {
     tajoMaster.getContext().getConf().set(key, value);
+    setAllWorkersConfValue(key, value);
+  }
+
+  public void setAllWorkersConfValue(String key, String value) {
     for (TajoWorker eachWorker: tajoWorkers) {
       eachWorker.getConfig().set(key, value);
     }
diff --git a/tajo-core/src/test/java/org/apache/tajo/cli/TestTajoCli.java b/tajo-core/src/test/java/org/apache/tajo/cli/TestTajoCli.java
index b3fbda4..f862cb1 100644
--- a/tajo-core/src/test/java/org/apache/tajo/cli/TestTajoCli.java
+++ b/tajo-core/src/test/java/org/apache/tajo/cli/TestTajoCli.java
@@ -25,6 +25,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.tajo.TajoTestingCluster;
 import org.apache.tajo.TpchTestBase;
+import org.apache.tajo.client.QueryStatus;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.storage.StorageUtil;
@@ -36,6 +37,7 @@
 
 import java.io.ByteArrayOutputStream;
 import java.io.File;
+import java.io.PrintWriter;
 import java.net.URL;
 
 import static org.junit.Assert.assertEquals;
@@ -195,6 +197,58 @@
     }
   }
 
+  @Test
+  public void testSelectResultWithNullFalse() throws Exception {
+    String sql =
+        "select\n" +
+            "  c_custkey,\n" +
+            "  orders.o_orderkey,\n" +
+            "  orders.o_orderstatus \n" +
+            "from\n" +
+            "  orders full outer join customer on c_custkey = o_orderkey\n" +
+            "order by\n" +
+            "  c_custkey,\n" +
+            "  orders.o_orderkey;\n";
+
+    TajoConf tajoConf = TpchTestBase.getInstance().getTestingCluster().getConfiguration();
+    tajoConf.setVar(ConfVars.CLI_OUTPUT_FORMATTER_CLASS, TajoCliOutputTestFormatter.class.getName());
+
+    tajoConf.setVar(ConfVars.CLI_NULL_CHAR, "");
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    tajoCli = new TajoCli(tajoConf, new String[]{}, System.in, out);
+    tajoCli.executeScript(sql);
+
+    String consoleResult = new String(out.toByteArray());
+    assertOutputResult(consoleResult);
+  }
+
+  @Test
+  public void testSelectResultWithNullTrue() throws Exception {
+    String sql =
+        "select\n" +
+        "  c_custkey,\n" +
+        "  orders.o_orderkey,\n" +
+        "  orders.o_orderstatus \n" +
+        "from\n" +
+        "  orders full outer join customer on c_custkey = o_orderkey\n" +
+        "order by\n" +
+        "  c_custkey,\n" +
+        "  orders.o_orderkey;\n";
+
+    TajoConf tajoConf = TpchTestBase.getInstance().getTestingCluster().getConfiguration();
+    tajoConf.setVar(ConfVars.CLI_OUTPUT_FORMATTER_CLASS, TajoCliOutputTestFormatter.class.getName());
+
+    tajoConf.setVar(ConfVars.CLI_NULL_CHAR, "testnull");
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    tajoCli = new TajoCli(tajoConf, new String[]{}, System.in, out);
+    tajoCli.executeScript(sql);
+
+    String consoleResult = new String(out.toByteArray());
+    assertOutputResult(consoleResult);
+  }
+
   private void assertOutputResult(String actual) throws Exception {
     assertOutputResult(name.getMethodName() + ".result", actual);
   }
@@ -221,10 +275,31 @@
     assertEquals(expectedResult, actual);
   }
 
+  @Test
+  public void testStopWhenError() throws Exception {
+    TajoConf tajoConf = TpchTestBase.getInstance().getTestingCluster().getConfiguration();
+    tajoConf.setVar(ConfVars.CLI_OUTPUT_FORMATTER_CLASS, TajoCliOutputTestFormatter.class.getName());
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    tajoCli = new TajoCli(tajoConf, new String[]{}, System.in, out);
+    tajoCli.executeMetaCommand("\\set tajo.cli.error.stop true");
+
+    tajoCli.executeScript("select count(*) from lineitem; " +
+        "select count(*) from lineitem2; " +
+        "select count(*) from orders");
+
+    String consoleResult = new String(out.toByteArray());
+    assertOutputResult(consoleResult);
+  }
+
   public static class TajoCliOutputTestFormatter extends DefaultTajoCliOutputFormatter {
     @Override
     protected String getResponseTimeReadable(float responseTime) {
       return "";
     }
+    @Override
+    public void printProgress(PrintWriter sout, QueryStatus status) {
+      //nothing to do
+    }
   }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/client/TestTajoClient.java b/tajo-core/src/test/java/org/apache/tajo/client/TestTajoClient.java
index 49e6874..732f02c 100644
--- a/tajo-core/src/test/java/org/apache/tajo/client/TestTajoClient.java
+++ b/tajo-core/src/test/java/org/apache/tajo/client/TestTajoClient.java
@@ -24,6 +24,7 @@
 import com.google.protobuf.ServiceException;
 import com.sun.org.apache.commons.logging.Log;
 import com.sun.org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.tajo.*;
@@ -32,8 +33,10 @@
 import org.apache.tajo.catalog.TableDesc;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.conf.TajoConf.ConfVars;
 import org.apache.tajo.ipc.ClientProtos;
 import org.apache.tajo.jdbc.TajoResultSet;
+import org.apache.tajo.storage.StorageConstants;
 import org.apache.tajo.storage.StorageUtil;
 import org.apache.tajo.util.CommonTestingUtil;
 import org.junit.AfterClass;
@@ -42,12 +45,12 @@
 import org.junit.experimental.categories.Category;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.util.*;
 
 import static org.junit.Assert.*;
-import static org.junit.Assert.assertEquals;
 
 @Category(IntegrationTest.class)
 public class TestTajoClient {
@@ -162,6 +165,13 @@
   public final void testSessionVariables() throws IOException, ServiceException, InterruptedException {
     String prefixName = "key_";
     String prefixValue = "val_";
+
+    List<String> unsetList = new ArrayList<String>();
+    for(Map.Entry<String, String> entry: client.getAllSessionVariables().entrySet()) {
+      unsetList.add(entry.getKey());
+    }
+    client.unsetSessionVariables(unsetList);
+
     for (int i = 0; i < 10; i++) {
       String key = prefixName + i;
       String val = prefixValue + i;
@@ -663,4 +673,53 @@
       client.closeQuery(queryId);
     }
   }
+
+  @Test
+  public void testSetCvsNull() throws Exception {
+    String sql =
+        "select\n" +
+            "  c_custkey,\n" +
+            "  orders.o_orderkey,\n" +
+            "  orders.o_orderstatus \n" +
+            "from\n" +
+            "  orders full outer join customer on c_custkey = o_orderkey\n" +
+            "order by\n" +
+            "  c_custkey,\n" +
+            "  orders.o_orderkey;\n";
+
+    TajoConf tajoConf = TpchTestBase.getInstance().getTestingCluster().getConfiguration();
+
+    Map<String, String> variables = new HashMap<String, String>();
+    variables.put(ConfVars.CSVFILE_NULL.varname, "\\\\T");
+    client.updateSessionVariables(variables);
+
+    TajoResultSet res = (TajoResultSet)client.executeQueryAndGetResult(sql);
+
+    assertEquals(res.getTableDesc().getMeta().getOption(StorageConstants.CSVFILE_NULL), "\\\\T");
+
+    Path path = res.getTableDesc().getPath();
+    FileSystem fs = path.getFileSystem(tajoConf);
+
+    FileStatus[] files = fs.listStatus(path);
+    assertNotNull(files);
+    assertEquals(1, files.length);
+
+    InputStream in = fs.open(files[0].getPath());
+    byte[] buf = new byte[1024];
+
+
+    int readBytes = in.read(buf);
+    assertTrue(readBytes > 0);
+
+    // text type field's value is replaced with \T
+    String expected = "1|1|O\n" +
+        "2|2|O\n" +
+        "3|3|F\n" +
+        "4||\\T\n" +
+        "5||\\T\n";
+
+    String resultDatas = new String(buf, 0, readBytes);
+
+    assertEquals(expected, resultDatas);
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/eval/ExprTestBase.java b/tajo-core/src/test/java/org/apache/tajo/engine/eval/ExprTestBase.java
index bab6ec7..0742a80 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/eval/ExprTestBase.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/eval/ExprTestBase.java
@@ -26,8 +26,9 @@
 import org.apache.tajo.cli.InvalidStatementException;
 import org.apache.tajo.cli.ParsedResult;
 import org.apache.tajo.cli.SimpleParser;
-import org.apache.tajo.datum.NullDatum;
-import org.apache.tajo.datum.TextDatum;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.*;
 import org.apache.tajo.engine.json.CoreGsonHelper;
 import org.apache.tajo.engine.parser.SQLAnalyzer;
 import org.apache.tajo.engine.planner.*;
@@ -39,6 +40,7 @@
 import org.apache.tajo.storage.VTuple;
 import org.apache.tajo.util.Bytes;
 import org.apache.tajo.util.CommonTestingUtil;
+import org.apache.tajo.util.datetime.DateTimeUtil;
 import org.apache.tajo.util.KeyValueSet;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -60,6 +62,10 @@
   private static LogicalOptimizer optimizer;
   private static LogicalPlanVerifier annotatedPlanVerifier;
 
+  public static String getUserTimeZoneDisplay() {
+    return DateTimeUtil.getTimeZoneDisplayTime(TajoConf.getCurrentTimeZone());
+  }
+
   @BeforeClass
   public static void setUp() throws Exception {
     util = new TajoTestingCluster();
@@ -190,7 +196,16 @@
       }
 
       for (int i = 0; i < expected.length; i++) {
-        assertEquals(query, expected[i], outTuple.get(i).asChars());
+        Datum datum = outTuple.get(i);
+        String outTupleAsChars;
+        if (datum.type() == Type.TIMESTAMP) {
+          outTupleAsChars = ((TimestampDatum) datum).asChars(TajoConf.getCurrentTimeZone(), true);
+        } else if (datum.type() == Type.TIME) {
+          outTupleAsChars = ((TimeDatum) datum).asChars(TajoConf.getCurrentTimeZone(), true);
+        } else {
+          outTupleAsChars = datum.asChars();
+        }
+        assertEquals(query, expected[i], outTupleAsChars);
       }
     } catch (InvalidStatementException e) {
       assertFalse(e.getMessage(), true);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestIntervalType.java b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestIntervalType.java
index 4d18be3..c054fd1 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestIntervalType.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestIntervalType.java
@@ -28,24 +28,32 @@
 public class TestIntervalType extends ExprTestBase {
   @Test
   public void testIntervalPostgresqlCase() throws IOException {
+
     // http://www.postgresql.org/docs/8.2/static/functions-datetime.html
     testSimpleEval("select date '2001-09-28' + 7", new String[]{"2001-10-05"});
-    testSimpleEval("select date '2001-09-28' + interval '1 hour'", new String[]{"2001-09-28 01:00:00"});
+    testSimpleEval("select date '2001-09-28' + interval '1 hour'",
+        new String[]{"2001-09-28 01:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("select date '2001-09-28' + time '03:00'", new String[]{"2001-09-28 03:00:00"});
+    testSimpleEval("select date '2001-09-28' + time '03:00'",
+        new String[]{"2001-09-28 03:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select time '03:00' + date '2001-09-28'",
+        new String[]{"2001-09-28 03:00:00" + getUserTimeZoneDisplay()});
     testSimpleEval("select interval '1 day' + interval '1 hour'", new String[]{"1 day 01:00:00"});
 
-    testSimpleEval("select timestamp '2001-09-28 01:00' + interval '23 hours'", new String[]{"2001-09-29 00:00:00"});
+    testSimpleEval("select timestamp '2001-09-28 01:00' + interval '23 hours'",
+        new String[]{"2001-09-29 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("select time '01:00' + interval '3 hours'", new String[]{"04:00:00"});
+    testSimpleEval("select time '01:00' + interval '3 hours'", new String[]{"04:00:00" + getUserTimeZoneDisplay()});
 
     testSimpleEval("select date '2001-10-01' - date '2001-09-28'", new String[]{"3"});
     testSimpleEval("select date '2001-10-01' - 7", new String[]{"2001-09-24"});
-    testSimpleEval("select date '2001-09-28' - interval '1 hour'", new String[]{"2001-09-27 23:00:00"});
+    testSimpleEval("select date '2001-09-28' - interval '1 hour'",
+        new String[]{"2001-09-27 23:00:00" + getUserTimeZoneDisplay()});
 
     testSimpleEval("select time '05:00' - time '03:00'", new String[]{"02:00:00"});
-    testSimpleEval("select time '05:00' - interval '2 hours'", new String[]{"03:00:00"});
-    testSimpleEval("select timestamp '2001-09-28 23:00' - interval '23 hours'", new String[]{"2001-09-28 00:00:00"});
+    testSimpleEval("select time '05:00' - interval '2 hours'", new String[]{"03:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select timestamp '2001-09-28 23:00' - interval '23 hours'",
+        new String[]{"2001-09-28 00:00:00" + getUserTimeZoneDisplay()});
 
     testSimpleEval("select interval '1 day' - interval '1 hour'", new String[]{"23:00:00"});
 
@@ -58,21 +66,33 @@
 
   @Test
   public void testCaseByCase() throws Exception {
-    testSimpleEval("select date '2001-08-28' + interval '10 day 1 hour'", new String[]{"2001-09-07 01:00:00"});
-    testSimpleEval("select interval '10 day 01:00:00' + date '2001-08-28'", new String[]{"2001-09-07 01:00:00"});
-    testSimpleEval("select time '10:20:30' + interval '1 day 01:00:00'", new String[]{"11:20:30"});
-    testSimpleEval("select interval '1 day 01:00:00' + time '10:20:30'", new String[]{"11:20:30"});
-    testSimpleEval("select time '10:20:30' - interval '1 day 01:00:00'", new String[]{"09:20:30"});
+    testSimpleEval("select date '2001-08-28' + interval '10 day 1 hour'",
+        new String[]{"2001-09-07 01:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select interval '10 day 01:00:00' + date '2001-08-28'",
+        new String[]{"2001-09-07 01:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select time '10:20:30' + interval '1 day 01:00:00'",
+        new String[]{"11:20:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select interval '1 day 01:00:00' + time '10:20:30'",
+        new String[]{"11:20:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select time '10:20:30' - interval '1 day 01:00:00'",
+        new String[]{"09:20:30" + getUserTimeZoneDisplay()});
 
     testSimpleEval("select (interval '1 month 20 day' + interval '50 day')", new String[]{"1 month 70 days"});
-    testSimpleEval("select date '2013-01-01' + interval '1 month 70 day'", new String[]{"2013-04-12 00:00:00"});
-    testSimpleEval("select date '2013-01-01' + (interval '1 month 20 day' + interval '50 day')", new String[]{"2013-04-12 00:00:00"});
-    testSimpleEval("select interval '1 month 70 day' + date '2013-01-01'", new String[]{"2013-04-12 00:00:00"});
-    testSimpleEval("select date '2013-01-01' - interval '1 month 70 day'", new String[]{"2012-09-22 00:00:00"});
+    testSimpleEval("select date '2013-01-01' + interval '1 month 70 day'",
+        new String[]{"2013-04-12 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select date '2013-01-01' + (interval '1 month 20 day' + interval '50 day')",
+        new String[]{"2013-04-12 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select interval '1 month 70 day' + date '2013-01-01'",
+        new String[]{"2013-04-12 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select date '2013-01-01' - interval '1 month 70 day'",
+        new String[]{"2012-09-22 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("select timestamp '2001-09-28 23:00' - interval '1 month 2 day 10:20:30'", new String[]{"2001-08-26 12:39:30"});
-    testSimpleEval("select timestamp '2001-09-28 23:00' + interval '1 month 2 day 10:20:30'", new String[]{"2001-10-31 09:20:30"});
-    testSimpleEval("select interval '1 month 2 day 10:20:30' + timestamp '2001-09-28 23:00'", new String[]{"2001-10-31 09:20:30"});
+    testSimpleEval("select timestamp '2001-09-28 23:00' - interval '1 month 2 day 10:20:30'",
+        new String[]{"2001-08-26 12:39:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select timestamp '2001-09-28 23:00' + interval '1 month 2 day 10:20:30'",
+        new String[]{"2001-10-31 09:20:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select interval '1 month 2 day 10:20:30' + timestamp '2001-09-28 23:00'",
+        new String[]{"2001-10-31 09:20:30" + getUserTimeZoneDisplay()});
 
 
     testSimpleEval("select interval '5 month' / 3", new String[]{"1 month 20 days"});
@@ -82,6 +102,15 @@
 
     testSimpleEval("select interval '1 month' * 2.3", new String[]{"2 months 9 days"});
     testSimpleEval("select interval '3 year 5 month 1 hour' / 1.5", new String[]{"2 years 3 months 10 days 00:40:00"});
+
+    testSimpleEval("select date '2001-09-28' - time '03:00'",
+        new String[]{"2001-09-27 21:00:00" + getUserTimeZoneDisplay()});
+
+    testSimpleEval("select date '2014-03-20' + interval '1 day'",
+        new String[]{"2014-03-21 00:00:00" + getUserTimeZoneDisplay()});
+
+    testSimpleEval("select date '2014-03-20' - interval '1 day'",
+        new String[]{"2014-03-19 00:00:00" + getUserTimeZoneDisplay()});
   }
 
   @Test
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLDateTimeTypes.java b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLDateTimeTypes.java
index c9c8dd4..60f7dcd 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLDateTimeTypes.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLDateTimeTypes.java
@@ -26,22 +26,26 @@
 
   @Test
   public void testTimestamp() throws IOException {
-    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37';", new String[]{"1970-01-17 10:09:37"});
-    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.5';", new String[]{"1970-01-17 10:09:37.5"});
-    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.01';", new String[]{"1970-01-17 10:09:37.01"});
-    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.003';", new String[]{"1970-01-17 10:09:37.003"});
+    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37';",
+        new String[]{"1970-01-17 10:09:37" + getUserTimeZoneDisplay()});
+    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.5';",
+        new String[]{"1970-01-17 10:09:37.5" + getUserTimeZoneDisplay()});
+    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.01';",
+        new String[]{"1970-01-17 10:09:37.01" + getUserTimeZoneDisplay()});
+    testSimpleEval("select TIMESTAMP '1970-01-17 10:09:37.003';",
+        new String[]{"1970-01-17 10:09:37.003" + getUserTimeZoneDisplay()});
   }
 
   @Test
   public void testToTimestamp() throws IOException {
-    testSimpleEval("select to_char(TIMESTAMP '1970-01-17 10:09:37', 'yyyy-MM-dd HH:mm:ss');",
+    testSimpleEval("select to_char(TIMESTAMP '1970-01-17 10:09:37', 'YYYY-MM-DD HH24:MI:SS');",
         new String[]{"1970-01-17 10:09:37"});
   }
 
   @Test
   public void testTimeLiteral() throws IOException {
     testSimpleEval("select TIME '10:09:37';",
-        new String[]{"10:09:37"});
+        new String[]{"10:09:37" + getUserTimeZoneDisplay()});
   }
 
   @Test
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLExpression.java b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLExpression.java
index 6e35d3b..af084d9 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLExpression.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/eval/TestSQLExpression.java
@@ -22,12 +22,17 @@
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.exception.NoSuchFunctionException;
 import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.DatumFactory;
 import org.apache.tajo.datum.TimestampDatum;
+import org.apache.tajo.util.datetime.DateTimeUtil;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.util.TimeZone;
 
 import static org.apache.tajo.common.TajoDataTypes.Type.TEXT;
+import static org.junit.Assert.fail;
 
 public class TestSQLExpression extends ExprTestBase {
 
@@ -43,9 +48,16 @@
         new String[]{"3", "234.0"});
   }
 
-  @Test(expected = NoSuchFunctionException.class)
+  @Test
   public void testNoSuchFunction() throws IOException {
-    testSimpleEval("select test123('abc') col1 ", new String[]{"abc"});
+    try {
+      testSimpleEval("select test123('abc') col1 ", new String[]{"abc"});
+      fail("This test should throw NoSuchFunctionException");
+    } catch (NoSuchFunctionException e) {
+      //success
+    } catch (Exception e) {
+      fail("This test should throw NoSuchFunctionException: " + e);
+    }
   }
 
   @Test
@@ -841,29 +853,39 @@
 
   @Test
   public void testCastWithNestedFunction() throws IOException {
-    int timestamp = (int) (System.currentTimeMillis() / 1000);
-    TimestampDatum expected = new TimestampDatum(timestamp);
-    testSimpleEval(String.format("select to_timestamp(CAST(split_part('%d.999', '.', 1) as INT8));", timestamp),
-        new String[] {expected.asChars()});
+    int unixtime = 1389071574;//(int) (System.currentTimeMillis() / 1000);
+    TimestampDatum expected = DatumFactory.createTimestmpDatumWithUnixTime(unixtime);
+    testSimpleEval(String.format("select to_timestamp(CAST(split_part('%d.999', '.', 1) as INT8));", unixtime),
+        new String[] {expected.asChars(TajoConf.getCurrentTimeZone(), true)});
   }
 
   @Test
   public void testCastFromTable() throws IOException {
-    Schema schema = new Schema();
-    schema.addColumn("col1", TEXT);
-    schema.addColumn("col2", TEXT);
-    testEval(schema, "table1", "123,234", "select cast(col1 as float) as b, cast(col2 as float) as a from table1",
-        new String[]{"123.0", "234.0"});
-    testEval(schema, "table1", "123,234", "select col1::float, col2::float from table1",
-        new String[]{"123.0", "234.0"});
-    testEval(schema, "table1", "1980-04-01 01:50:01,234", "select col1::timestamp as t1, col2::float from table1 " +
-        "where t1 = '1980-04-01 01:50:01'::timestamp",
-        new String[]{"1980-04-01 01:50:01", "234.0"});
+    TimeZone originTimeZone = TajoConf.setCurrentTimeZone(TimeZone.getTimeZone("GMT-6"));
+    try {
+      Schema schema = new Schema();
+      schema.addColumn("col1", TEXT);
+      schema.addColumn("col2", TEXT);
+      testEval(schema, "table1", "123,234", "select cast(col1 as float) as b, cast(col2 as float) as a from table1",
+          new String[]{"123.0", "234.0"});
+      testEval(schema, "table1", "123,234", "select col1::float, col2::float from table1",
+          new String[]{"123.0", "234.0"});
 
-    testSimpleEval("select '1980-04-01 01:50:01'::timestamp;", new String [] {"1980-04-01 01:50:01"});
-    testSimpleEval("select '1980-04-01 01:50:01'::timestamp::text", new String [] {"1980-04-01 01:50:01"});
+      TimestampDatum timestamp = DatumFactory.createTimestamp("1980-04-01 01:50:01" +
+          DateTimeUtil.getTimeZoneDisplayTime(TajoConf.getCurrentTimeZone()));
 
-    testSimpleEval("select (cast ('99999'::int8 as text))::int4 + 1", new String [] {"100000"});
+      testEval(schema, "table1", "1980-04-01 01:50:01,234", "select col1::timestamp as t1, col2::float from table1 " +
+              "where t1 = '1980-04-01 01:50:01'::timestamp",
+          new String[]{timestamp.asChars(TajoConf.getCurrentTimeZone(), true), "234.0"}
+      );
+
+      testSimpleEval("select '1980-04-01 01:50:01'::timestamp;", new String[]{timestamp.asChars(TajoConf.getCurrentTimeZone(), true)});
+      testSimpleEval("select '1980-04-01 01:50:01'::timestamp::text", new String[]{"1980-04-01 01:50:01"});
+
+      testSimpleEval("select (cast ('99999'::int8 as text))::int4 + 1", new String[]{"100000"});
+    } finally {
+      TajoConf.setCurrentTimeZone(originTimeZone);
+    }
   }
 
   @Test
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
index 65612f7..4fefe07 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestBuiltinFunctions.java
@@ -26,8 +26,6 @@
 
 import java.sql.ResultSet;
 
-import static org.junit.Assert.assertTrue;
-
 @Category(IntegrationTest.class)
 public class TestBuiltinFunctions extends QueryTestCaseBase {
 
@@ -91,14 +89,14 @@
     cleanupQuery(res);
   }
 
-  @Test
-  public void testRandom() throws Exception {
-    ResultSet res = executeQuery();
-    while(res.next()) {
-      assertTrue(res.getInt(2) >= 0 && res.getInt(2) < 3);
-    }
-    cleanupQuery(res);
-  }
+//  @Test
+//  public void testRandom() throws Exception {
+//    ResultSet res = executeQuery();
+//    while(res.next()) {
+//      assertTrue(res.getInt(2) >= 0 && res.getInt(2) < 3);
+//    }
+//    cleanupQuery(res);
+//  }
 
   @Test
   public void testSplitPart() throws Exception {
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestConditionalExpressions.java b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestConditionalExpressions.java
index af86387..ece34e7 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestConditionalExpressions.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestConditionalExpressions.java
@@ -79,4 +79,44 @@
       //success
     }
   }
+
+  @Test
+  public void testCoalesceBoolean() throws Exception {
+    testSimpleEval("select coalesce(null, false);", new String[]{"f"});
+    testSimpleEval("select coalesce(null, null, true);", new String[]{"t"});
+    testSimpleEval("select coalesce(true, null, false);", new String[]{"t"});
+    testSimpleEval("select coalesce(null, true, false);", new String[]{"t"});
+ }
+
+  @Test
+  public void testCoalesceTimestamp() throws Exception {
+    testSimpleEval("select coalesce(null, timestamp '2014-01-01 00:00:00');",
+        new String[]{"2014-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(null, null, timestamp '2014-01-01 00:00:00');",
+        new String[]{"2014-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(timestamp '2014-01-01 00:00:00', null, timestamp '2014-01-02 00:00:00');",
+        new String[]{"2014-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(null, timestamp '2014-01-01 00:00:00', timestamp '2014-02-01 00:00:00');",
+        new String[]{"2014-01-01 00:00:00" + getUserTimeZoneDisplay()});
+  }
+
+  @Test
+  public void testCoalesceTime() throws Exception {
+    testSimpleEval("select coalesce(null, time '12:00:00');",
+        new String[]{"12:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(null, null, time '12:00:00');",
+        new String[]{"12:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(time '12:00:00', null, time '13:00:00');",
+        new String[]{"12:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select coalesce(null, time '12:00:00', time '13:00:00');",
+        new String[]{"12:00:00" + getUserTimeZoneDisplay()});
+  }
+
+  @Test
+  public void testCoalesceDate() throws Exception {
+    testSimpleEval("select coalesce(null, date '2014-01-01');", new String[]{"2014-01-01"});
+    testSimpleEval("select coalesce(null, null, date '2014-01-01');", new String[]{"2014-01-01"});
+    testSimpleEval("select coalesce(date '2014-01-01', null, date '2014-02-01');", new String[]{"2014-01-01"});
+    testSimpleEval("select coalesce(null, date '2014-01-01', date '2014-02-01');", new String[]{"2014-01-01"});
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestDateTimeFunctions.java b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestDateTimeFunctions.java
index 507ef61..ab8772b 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/function/TestDateTimeFunctions.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/function/TestDateTimeFunctions.java
@@ -20,25 +20,80 @@
 
 
 import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.DatumFactory;
 import org.apache.tajo.datum.TimestampDatum;
 import org.apache.tajo.engine.eval.ExprTestBase;
 import org.joda.time.DateTime;
 import org.junit.Test;
 
 import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.TimeZone;
 
 import static org.apache.tajo.common.TajoDataTypes.Type.*;
 
 public class TestDateTimeFunctions extends ExprTestBase {
-
   @Test
   public void testToTimestamp() throws IOException {
     long expectedTimestamp = System.currentTimeMillis();
-    DateTime expectedDateTime = new DateTime(expectedTimestamp);
+    TimestampDatum expected = DatumFactory.createTimestmpDatumWithUnixTime((int)(expectedTimestamp/ 1000));
 
     // (expectedTimestamp / 1000) means the translation from millis seconds to unix timestamp
     String q1 = String.format("select to_timestamp(%d);", (expectedTimestamp / 1000));
-    testSimpleEval(q1, new String[]{expectedDateTime.toString(TimestampDatum.DEFAULT_FORMAT_STRING)});
+    testSimpleEval(q1, new String[]{expected.toString(TajoConf.getCurrentTimeZone(), true)});
+
+    testSimpleEval("select to_timestamp('1997-12-30 11:40:50.345', 'YYYY-MM-DD HH24:MI:SS.MS');",
+        new String[]{"1997-12-30 11:40:50.345" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('1997-12-30 11:40:50.345 PM', 'YYYY-MM-DD HH24:MI:SS.MS PM');",
+        new String[]{"1997-12-30 23:40:50.345" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('0097/Feb/16 --> 08:14:30', 'YYYY/Mon/DD --> HH:MI:SS');",
+        new String[]{"0097-02-16 08:14:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('97/2/16 8:14:30', 'FMYYYY/FMMM/FMDD FMHH:FMMI:FMSS');",
+        new String[]{"0097-02-16 08:14:30" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('1985 September 12', 'YYYY FMMonth DD');",
+        new String[]{"1985-09-12 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('1,582nd VIII 21', 'Y,YYYth FMRM DD');",
+        new String[]{"1582-08-21 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('05121445482000', 'MMDDHH24MISSYYYY');",
+        new String[]{"2000-05-12 14:45:48" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2000January09Sunday', 'YYYYFMMonthDDFMDay');",
+        new String[]{"2000-01-09 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('97/Feb/16', 'YY/Mon/DD');",
+        new String[]{"1997-02-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('19971116', 'YYYYMMDD');",
+        new String[]{"1997-11-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('20000-1116', 'YYYY-MMDD');",
+        new String[]{"20000-11-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('9-1116', 'Y-MMDD');",
+        new String[]{"2009-11-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('95-1116', 'YY-MMDD');",
+        new String[]{"1995-11-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('995-1116', 'YYY-MMDD');",
+        new String[]{"1995-11-16 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2005426', 'YYYYWWD');",
+        new String[]{"2005-10-15 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2005300', 'YYYYDDD');",
+        new String[]{"2005-10-27 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2005527', 'IYYYIWID');",
+        new String[]{"2006-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('005527', 'IYYIWID');",
+        new String[]{"2006-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('05527', 'IYIWID');",
+        new String[]{"2006-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('5527', 'IIWID');",
+        new String[]{"2006-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2005364', 'IYYYIDDD');",
+        new String[]{"2006-01-01 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('20050302', 'YYYYMMDD');",
+        new String[]{"2005-03-02 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('2005 03 02', 'YYYYMMDD');",
+        new String[]{"2005-03-02 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp(' 2005 03 02', 'YYYYMMDD');",
+        new String[]{"2005-03-02 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("select to_timestamp('  20050302', 'YYYYMMDD');",
+        new String[]{"2005-03-02 00:00:00" + getUserTimeZoneDisplay()});
   }
 
   @Test
@@ -56,14 +111,15 @@
     Schema schema2 = new Schema();
     schema2.addColumn("col1", TIMESTAMP);
     testEval(schema2, "table1",
-        "1970-01-17 10:09:37",
+        "1970-01-17 10:09:37" + getUserTimeZoneDisplay(),
         "select extract(year from col1), extract(month from col1), extract(day from col1) from table1;",
         new String[]{"1970.0", "1.0", "17.0"});
 
+    // Currently TIME type can be loaded with INT8 type.
     Schema schema3 = new Schema();
     schema3.addColumn("col1", TIME);
     testEval(schema3, "table1",
-        "10:09:37.5",
+        "10:09:37.5" + getUserTimeZoneDisplay(),
         "select extract(hour from col1), extract(minute from col1), extract(second from col1) from table1;",
         new String[]{"10.0", "9.0", "37.5"});
 
@@ -74,9 +130,9 @@
         "select extract(year from col1), extract(month from col1), extract(day from col1) from table1;",
         new String[]{"1970.0", "1.0", "17.0"});
 
-    testSimpleEval("select extract(century from TIMESTAMP '1970-01-17 10:09:37');", new String[]{"19.0"});
+    testSimpleEval("select extract(century from TIMESTAMP '1970-01-17 10:09:37');", new String[]{"20.0"});
 
-    testSimpleEval("select extract(century from DATE '1970-01-17');", new String[]{"19.0"});
+    testSimpleEval("select extract(century from DATE '1970-01-17');", new String[]{"20.0"});
 
     testSimpleEval("select extract(decade from TIMESTAMP '1970-01-17 10:09:37');", new String[]{"197.0"});
 
@@ -152,13 +208,13 @@
     Schema schema2 = new Schema();
     schema2.addColumn("col1", TIMESTAMP);
     testEval(schema2, "table1",
-        "1970-01-17 10:09:37",
+        "1970-01-17 22:09:37" + getUserTimeZoneDisplay(),
         "select date_part('year', col1), date_part('month', col1), date_part('day', col1) from table1;",
         new String[]{"1970.0", "1.0", "17.0"});
 
     Schema schema3 = new Schema();
     schema3.addColumn("col1", TIME);
-    testEval(schema3, "table1", "10:09:37.5",
+    testEval(schema3, "table1", "10:09:37.5" + getUserTimeZoneDisplay(),
         "select date_part('hour', col1), date_part('minute', col1), date_part('second', col1) from table1;",
         new String[]{"10.0", "9.0", "37.5"});
 
@@ -169,9 +225,9 @@
         "select date_part('year', col1), date_part('month', col1), date_part('day', col1) from table1;",
         new String[]{"1970.0", "1.0", "17.0"});
 
-    testSimpleEval("select date_part('century', TIMESTAMP '1970-01-17 10:09:37');", new String[]{"19.0"});
+    testSimpleEval("select date_part('century', TIMESTAMP '1970-01-17 10:09:37');", new String[]{"20.0"});
 
-    testSimpleEval("select date_part('century', DATE '1970-01-17');", new String[]{"19.0"});
+    testSimpleEval("select date_part('century', DATE '1970-01-17');", new String[]{"20.0"});
 
     testSimpleEval("select date_part('decade', TIMESTAMP '1970-01-17 10:09:37');", new String[]{"197.0"});
 
@@ -253,45 +309,121 @@
 
   @Test
   public void testToDate() throws IOException {
-    testSimpleEval("select to_date('2014-01-04', 'yyyy-MM-dd')", new String[]{"2014-01-04"});
-    testSimpleEval("select to_date('2014-01-04', 'yyyy-MM-dd') + interval '1 day'", new String[]{"2014-01-05 00:00:00"});
+    testSimpleEval("select to_date('2014-01-04', 'YYYY-MM-DD')", new String[]{"2014-01-04"});
+    testSimpleEval("select to_date('2014-01-04', 'YYYY-MM-DD') + interval '1 day'",
+        new String[]{"2014-01-05 00:00:00" + getUserTimeZoneDisplay()});
+
+    testSimpleEval("SELECT to_date('201404', 'yyyymm');", new String[]{"2014-04-01"});
   }
 
   @Test
   public void testAddMonths() throws Exception {
-    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT2);", new String[]{"2014-02-17 00:00:00"});
-    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT4);", new String[]{"2014-02-17 00:00:00"});
-    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT8);", new String[]{"2014-02-17 00:00:00"});
+    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT2);",
+        new String[]{"2014-02-17 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT4);",
+        new String[]{"2014-02-17 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(date '2013-12-17', 2::INT8);",
+        new String[]{"2014-02-17 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT2);", new String[]{"2014-02-17 12:10:20"});
-    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT4);", new String[]{"2014-02-17 12:10:20"});
-    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT8);", new String[]{"2014-02-17 12:10:20"});
+    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT2);",
+        new String[]{"2014-02-17 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT4);",
+        new String[]{"2014-02-17 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(timestamp '2013-12-17 12:10:20', 2::INT8);",
+        new String[]{"2014-02-17 12:10:20" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT2);", new String[]{"2013-11-05 00:00:00"});
-    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT4);", new String[]{"2013-11-05 00:00:00"});
-    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT8);", new String[]{"2013-11-05 00:00:00"});
+    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT2);",
+        new String[]{"2013-11-05 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT4);",
+        new String[]{"2013-11-05 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(date '2014-02-05', -3::INT8);",
+        new String[]{"2013-11-05 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT2);", new String[]{"2013-11-05 12:10:20"});
-    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT4);", new String[]{"2013-11-05 12:10:20"});
-    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT8);", new String[]{"2013-11-05 12:10:20"});
+    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT2);",
+        new String[]{"2013-11-05 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT4);",
+        new String[]{"2013-11-05 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_months(timestamp '2014-02-05 12:10:20', -3::INT8);",
+        new String[]{"2013-11-05 12:10:20" + getUserTimeZoneDisplay()});
   }
 
   @Test
   public void testAddDays() throws IOException {
-    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT2);", new String[]{"2014-01-04 00:00:00"});
-    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT4);", new String[]{"2014-01-04 00:00:00"});
-    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT8);", new String[]{"2014-01-04 00:00:00"});
+    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT2);",
+        new String[]{"2014-01-04 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT4);",
+        new String[]{"2014-01-04 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(date '2013-12-30', 5::INT8);",
+        new String[]{"2014-01-04 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT2);", new String[]{"2014-01-04 12:10:20"});
-    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT4);", new String[]{"2014-01-04 12:10:20"});
-    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT8);", new String[]{"2014-01-04 12:10:20"});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT2);",
+        new String[]{"2014-01-04 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT4);",
+        new String[]{"2014-01-04 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-30 12:10:20', 5::INT8);",
+        new String[]{"2014-01-04 12:10:20" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT2);", new String[]{"2013-11-28 00:00:00"});
-    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT4);", new String[]{"2013-11-28 00:00:00"});
-    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT8);", new String[]{"2013-11-28 00:00:00"});
+    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT2);",
+        new String[]{"2013-11-28 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT4);",
+        new String[]{"2013-11-28 00:00:00" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(date '2013-12-05', -7::INT8);",
+        new String[]{"2013-11-28 00:00:00" + getUserTimeZoneDisplay()});
 
-    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT2);", new String[]{"2013-11-28 12:10:20"});
-    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT4);", new String[]{"2013-11-28 12:10:20"});
-    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT8);", new String[]{"2013-11-28 12:10:20"});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT2);",
+        new String[]{"2013-11-28 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT4);",
+        new String[]{"2013-11-28 12:10:20" + getUserTimeZoneDisplay()});
+    testSimpleEval("SELECT add_days(timestamp '2013-12-05 12:10:20', -7::INT8);",
+        new String[]{"2013-11-28 12:10:20" + getUserTimeZoneDisplay()});
+  }
+
+  @Test
+  public void testDateTimeNow() throws IOException {
+    TimeZone originTimeZone = TajoConf.setCurrentTimeZone(TimeZone.getTimeZone("GMT-6"));
+    TimeZone systemOriginTimeZone = TimeZone.getDefault();
+    TimeZone.setDefault(TimeZone.getTimeZone("GMT-6"));
+    try {
+      Date expectedDate = new Date(System.currentTimeMillis());
+
+      testSimpleEval("select to_char(now(), 'yyyy-MM-dd');",
+          new String[]{dateFormat(expectedDate, "yyyy-MM-dd")});
+      testSimpleEval("select cast(extract(year from now()) as INT4);",
+          new String[]{dateFormat(expectedDate, "yyyy")});
+      testSimpleEval("select current_date();",
+          new String[]{dateFormat(expectedDate, "yyyy-MM-dd")});
+      testSimpleEval("select cast(extract(hour from current_time()) as INT4);",
+          new String[]{String.valueOf(Integer.parseInt(dateFormat(expectedDate, "HH")))});
+    } finally {
+      TajoConf.setCurrentTimeZone(originTimeZone);
+      TimeZone.setDefault(systemOriginTimeZone);
+    }
+  }
+
+  @Test
+  public void testTimeValueKeyword() throws IOException {
+    TimeZone originTimeZone = TajoConf.setCurrentTimeZone(TimeZone.getTimeZone("GMT-6"));
+    TimeZone systemOriginTimeZone = TimeZone.getDefault();
+    TimeZone.setDefault(TimeZone.getTimeZone("GMT-6"));
+    try {
+      Date expectedDate = new Date(System.currentTimeMillis());
+
+      testSimpleEval("select to_char(current_timestamp, 'yyyy-MM-dd');",
+          new String[]{dateFormat(expectedDate, "yyyy-MM-dd")});
+      testSimpleEval("select cast(extract(year from current_timestamp) as INT4);",
+          new String[]{dateFormat(expectedDate, "yyyy")});
+      testSimpleEval("select current_date;",
+          new String[]{dateFormat(expectedDate, "yyyy-MM-dd")});
+      testSimpleEval("select cast(extract(hour from current_time) as INT4);",
+          new String[]{String.valueOf(Integer.parseInt(dateFormat(expectedDate, "HH")))});
+    } finally {
+      TajoConf.setCurrentTimeZone(originTimeZone);
+      TimeZone.setDefault(systemOriginTimeZone);
+    }
+  }
+
+  private String dateFormat(Date date, String format) {
+    SimpleDateFormat df = new SimpleDateFormat(format);
+    return df.format(date);
   }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestHiveQLAnalyzer.java b/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestHiveQLAnalyzer.java
deleted file mode 100644
index ef21dc3..0000000
--- a/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestHiveQLAnalyzer.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.tajo.engine.parser;
-
-import com.google.common.base.Preconditions;
-import org.antlr.v4.runtime.ANTLRInputStream;
-import org.antlr.v4.runtime.CommonTokenStream;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.tajo.engine.parser.SQLParser.SqlContext;
-import org.apache.tajo.algebra.Expr;
-import org.apache.tajo.util.FileUtil;
-import org.junit.Test;
-
-import java.io.File;
-import java.io.IOException;
-
-import static org.junit.Assert.assertEquals;
-
-public class TestHiveQLAnalyzer {
-  private static final Log LOG = LogFactory.getLog(TestHiveQLAnalyzer.class.getName());
-  protected static final String BASE_PATH = "src/test/resources/queries/default/";
-
-  public static Expr parseQuery(String sql) {
-    ANTLRInputStream input = new ANTLRInputStream(sql);
-    SQLLexer lexer = new SQLLexer(input);
-    CommonTokenStream tokens = new CommonTokenStream(lexer);
-    SQLParser parser = new SQLParser(tokens);
-    parser.setBuildParseTree(true);
-    SQLAnalyzer visitor = new SQLAnalyzer();
-    SqlContext context = parser.sql();
-    return visitor.visitSql(context);
-  }
-
-  public static Expr parseHiveQL(String sql) {
-    HiveQLAnalyzer converter = new HiveQLAnalyzer();
-    return converter.parse(sql);
-  }
-
-  public static String getMethodName(int depth) {
-    final StackTraceElement[] ste = Thread.currentThread().getStackTrace();
-    return ste[depth].getMethodName();
-  }
-
-  public static void compareJsonResult(String sqlPath) throws IOException {
-      Preconditions.checkNotNull(sqlPath);
-      compareJsonResult(sqlPath, sqlPath);
-  }
-
-  public static void compareJsonResult(String sqlPath, String hiveqlPath) throws IOException {
-    Preconditions.checkNotNull(sqlPath, hiveqlPath);
-    String sql = FileUtil.readTextFile(new File(BASE_PATH + sqlPath));
-    String hiveQL = FileUtil.readTextFile(new File(BASE_PATH + hiveqlPath));
-    Expr expr = parseQuery(sql);
-    Expr hiveExpr = parseHiveQL(hiveQL);
-    assertEquals(expr.toJson(), hiveExpr.toJson());
-  }
-
-  @Test
-  public void testSelect1() throws IOException {
-    compareJsonResult("select_1.sql");
-  }
-
-  @Test
-  public void testSelect3() throws IOException {
-    compareJsonResult("select_3.sql");
-  }
-
-  @Test
-  public void testSelect4() throws IOException {
-    compareJsonResult("select_4.sql");
-  }
-
-  @Test
-  public void testSelect5() throws IOException {
-    compareJsonResult("select_5.sql");
-  }
-
-  @Test
-  public void testSelect7() throws IOException {
-    compareJsonResult("select_7.sql");
-  }
-
-  @Test
-  public void testSelect8() throws IOException {
-    compareJsonResult("select_8.sql");
-  }
-
-  @Test
-  public void testSelect9() throws IOException {
-    compareJsonResult("select_9.sql", "select_9.hiveql");
-  }
-
-  @Test
-  public void testSelect10() throws IOException {
-    compareJsonResult("select_10.sql", "select_10.hiveql");
-  }
-
-  //TODO: support beween condition
-  //@Test
-//  public void testSelect11() throws IOException {
-//    compareJsonResult("select_11.sql", "select_11.hiveql");
-//  }
-
-  @Test
-  public void testSelect12() throws IOException {
-    compareJsonResult("select_12.hiveql");
-  }
-
-  @Test
-  public void testSelect13() throws IOException {
-    compareJsonResult("select_13.sql", "select_13.hiveql");
-  }
-
-  @Test
-  public void testSelect14() throws IOException {
-    compareJsonResult("select_14.sql");
-  }
-
-  @Test
-  public void testSelect15() throws IOException {
-    compareJsonResult("select_15.sql", "select_15.hiveql");
-  }
-
-  @Test
-  public void testAsterisk1() throws IOException {
-    compareJsonResult("asterisk_1.sql");
-  }
-
-  @Test
-  public void testAsterisk2() throws IOException {
-    compareJsonResult("asterisk_2.sql");
-  }
-
-  @Test
-  public void testAsterisk3() throws IOException {
-    compareJsonResult("asterisk_3.sql");
-  }
-
-  @Test
-  public void testAsterisk4() throws IOException {
-    compareJsonResult("asterisk_4.sql");
-  }
-
-  @Test
-  public void testGroupby1() throws IOException {
-    compareJsonResult("groupby_1.sql");
-  }
-
-  @Test
-  public void testGroupby2() throws IOException {
-    compareJsonResult("groupby_2.sql");
-  }
-
-  @Test
-  public void testGroupby3() throws IOException {
-    compareJsonResult("groupby_3.sql");
-  }
-
-  @Test
-  public void testGroupby4() throws IOException {
-    compareJsonResult("groupby_4.sql");
-  }
-
-  @Test
-  public void testGroupby5() throws IOException {
-    compareJsonResult("groupby_5.sql");
-  }
-
-  @Test
-  public void testJoin2() throws IOException {
-    compareJsonResult("join_2.sql");
-  }
-
-  @Test
-  public void testJoin5() throws IOException {
-    compareJsonResult("join_5.sql");
-  }
-
-  @Test
-  public void testJoin6() throws IOException {
-    compareJsonResult("join_6.sql");
-  }
-
-  @Test
-  public void testJoin7() throws IOException {
-    compareJsonResult("join_7.sql");
-  }
-
-    //TODO: support complex join conditions
-    //@Test
-//  public void testJoin9() throws IOException {
-//    compareJsonResult("join_9.sql");
-//  }
-
-  @Test
-  public void testJoin12() throws IOException {
-    compareJsonResult("join_12.sql");
-  }
-
-  @Test
-  public void testJoin13() throws IOException {
-    compareJsonResult("join_13.sql");
-  }
-
-  @Test
-  public void testJoin14() throws IOException {
-    compareJsonResult("join_14.sql");
-  }
-
-  @Test
-  public void testJoin15() throws IOException {
-    compareJsonResult("join_15.sql", "join_15.hiveql");
-  }
-
-  @Test
-  public void testUnion1() throws IOException {
-    compareJsonResult("union_1.hiveql");
-  }
-
-  @Test
-  public void testInsert1() throws IOException {
-    compareJsonResult("insert_into_select_1.sql");
-  }
-
-  @Test
-  public void testInsert2() throws IOException {
-    compareJsonResult("insert_overwrite_into_select_2.sql", "insert_overwrite_into_select_2.hiveql");
-  }
-
-  @Test
-  public void testCreate1() throws IOException {
-    compareJsonResult("create_table_1.sql", "create_table_1.hiveql");
-  }
-
-  @Test
-  public void testCreate2() throws IOException {
-    compareJsonResult("create_table_2.sql", "create_table_2.hiveql");
-  }
-
-  @Test
-  public void testCreate11() throws IOException {
-    compareJsonResult("create_table_11.sql", "create_table_11.hiveql");
-  }
-
-  @Test
-  public void testCreate12() throws IOException {
-    compareJsonResult("create_table_12.sql", "create_table_12.hiveql");
-  }
-
-  @Test
-  public void testDrop() throws IOException {
-    compareJsonResult("drop_table.sql");
-  }
-}
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestSQLAnalyzer.java b/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestSQLAnalyzer.java
index f372ade..1dc54f6 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestSQLAnalyzer.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/parser/TestSQLAnalyzer.java
@@ -278,6 +278,15 @@
   }
 
   @Test
+  public void testCreateTableLike1() throws IOException {
+    String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_like_1.sql"));
+    Expr expr = parseQuery(sql);
+    assertEquals(OpType.CreateTable, expr.getType());
+    CreateTable createTable = (CreateTable) expr;
+    assertEquals("orig_name", createTable.getLikeParentTableName());
+  }
+
+  @Test
   public void testCreateTablePartitionByHash1() throws IOException {
     String sql = FileUtil.readTextFile(new File("src/test/resources/queries/default/create_table_partition_by_hash_1.sql"));
     Expr expr = parseQuery(sql);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/global/TestBroadcastJoinPlan.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/global/TestBroadcastJoinPlan.java
index b56ab47..0e223c4 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/global/TestBroadcastJoinPlan.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/global/TestBroadcastJoinPlan.java
@@ -348,8 +348,8 @@
   @Test
   public final void testBroadcastJoinSubquery() throws IOException, PlanningException {
     String query = "select count(*) from large1 " +
-        "join (select * from small1) a on large1_id = a.small1_id " +
-        "join small2 on large1_id = small2_id";
+        "join small2 on large1_id = small2_id " +
+        "join (select * from small1) a on large1_id = a.small1_id";
 
     LogicalPlanner planner = new LogicalPlanner(catalog);
     LogicalOptimizer optimizer = new LogicalOptimizer(conf);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBNLJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBNLJoinExec.java
index c79796b..d84796a 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBNLJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBNLJoinExec.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -151,7 +152,7 @@
         Integer.MAX_VALUE);
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testBNLCrossJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -188,7 +189,8 @@
     enforcer.enforceJoinAlgorithm(joinNode.getPID(), JoinAlgorithm.BLOCK_NESTED_LOOP_JOIN);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testBNLInnerJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(),
         merged, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBSTIndexExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBSTIndexExec.java
index a47bde3..bfc3522 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBSTIndexExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestBSTIndexExec.java
@@ -38,6 +38,7 @@
 import org.apache.tajo.engine.planner.PhysicalPlannerImpl;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.ScanNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.storage.fragment.FragmentConvertor;
@@ -168,7 +169,7 @@
     
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", meta, tablePath, Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testEqual");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), new FileFragment[] { frags[0] }, workDir);
     Expr expr = analyzer.parse(QUERY);
     LogicalPlan plan = planner.createPlan(LocalTajoTestingUtility.createDummySession(), expr);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestExternalSortExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestExternalSortExec.java
index ff3befe..1ce5b5b 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestExternalSortExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestExternalSortExec.java
@@ -33,6 +33,7 @@
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -120,7 +121,7 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
     Path workDir = new Path(testDir, TestExternalSortExec.class.getName());
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterHashJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterHashJoinExec.java
index b05688d..1a8a90e 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterHashJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterHashJoinExec.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -269,7 +270,7 @@
     FileFragment[] merged = TUtil.concat(dep3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestFullOuterHashJoinExec0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -306,7 +307,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestFullOuter_HashJoinExec1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -342,7 +343,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, job3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestFullOuterHashJoinExec2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -380,7 +381,8 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, phone3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestFullOuterHashJoinExec3");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(), merged,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(), merged,
         workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterMergeJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterMergeJoinExec.java
index 0386179..50e5906 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterMergeJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestFullOuterMergeJoinExec.java
@@ -35,6 +35,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -315,7 +316,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, dep3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -352,7 +353,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -389,7 +390,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -427,7 +428,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, dep4Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin3");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -468,7 +469,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, phone3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin4");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -507,7 +508,7 @@
     FileFragment[] merged = TUtil.concat(phone3Frags,emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testFullOuterMergeJoin5");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashAntiJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashAntiJoinExec.java
index 1dbbcf0..794ca79 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashAntiJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashAntiJoinExec.java
@@ -32,6 +32,7 @@
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -153,7 +154,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testHashAntiJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashJoinExec.java
index 66222da..0e07536 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashJoinExec.java
@@ -35,6 +35,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -151,7 +152,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testHashInnerJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -194,7 +195,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testHashInnerJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashSemiJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashSemiJoinExec.java
index 4e5de98..835260f 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashSemiJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestHashSemiJoinExec.java
@@ -32,6 +32,7 @@
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -157,7 +158,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testHashSemiJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterHashJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterHashJoinExec.java
index de3d298..bb25875 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterHashJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterHashJoinExec.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -271,7 +272,7 @@
     FileFragment[] merged = TUtil.concat(dep3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuterHashJoinExec0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -302,7 +303,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_HashJoinExec1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[1]);
@@ -343,7 +344,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, job3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_HashJoinExec2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[2]);
@@ -385,7 +386,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, phone3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_HashJoinExec3");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[3]);
@@ -427,7 +428,7 @@
     FileFragment[] merged = TUtil.concat(phone3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_HashJoinExec4");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[4]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterNLJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterNLJoinExec.java
index e806e55..e935f57 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterNLJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestLeftOuterNLJoinExec.java
@@ -35,6 +35,7 @@
 import org.apache.tajo.engine.planner.PlanningException;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -260,7 +261,7 @@
     FileFragment[] merged = TUtil.concat(dep3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuterNLJoinExec0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[0]);
@@ -302,7 +303,7 @@
 
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_NLJoinExec1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[1]);
@@ -346,7 +347,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, job3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_NLJoinExec2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[2]);
@@ -391,7 +392,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, phone3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_NLJoinExec3");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[3]);
@@ -435,7 +436,7 @@
     FileFragment[] merged = TUtil.concat(phone3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestLeftOuter_NLJoinExec4");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[4]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestMergeJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestMergeJoinExec.java
index 0e4fd9a..a4449fa 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestMergeJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestMergeJoinExec.java
@@ -36,6 +36,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -165,7 +166,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testMergeInnerJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestNLJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestNLJoinExec.java
index 120113f..019929d 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestNLJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestNLJoinExec.java
@@ -36,6 +36,7 @@
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.global.MasterPlan;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -151,7 +152,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testNLCrossJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[0]);
@@ -180,7 +181,7 @@
     FileFragment[] merged = TUtil.concat(empFrags, peopleFrags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testNLInnerJoin");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context =  analyzer.parse(QUERIES[1]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestPhysicalPlanner.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestPhysicalPlanner.java
index 50a0f44..a823d2b 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestPhysicalPlanner.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestPhysicalPlanner.java
@@ -44,6 +44,7 @@
 import org.apache.tajo.engine.planner.global.DataChannel;
 import org.apache.tajo.engine.planner.global.MasterPlan;
 import org.apache.tajo.engine.planner.logical.*;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.ipc.TajoWorkerProtocol;
 import org.apache.tajo.master.TajoMaster;
 import org.apache.tajo.master.session.Session;
@@ -200,7 +201,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(),
         employee.getPath(), Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testCreateScanPlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
@@ -230,7 +232,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(),
         employee.getPath(), Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testCreateScanWithFilterPlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[16]);
@@ -258,7 +261,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testGroupByPlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[7]);
@@ -289,7 +293,8 @@
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir(
         "target/test-data/testHashGroupByPlanWithALLField");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[15]);
@@ -317,7 +322,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testSortGroupByPlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[]{frags[0]}, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[7]);
@@ -376,7 +382,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testStorePlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     ctx.setOutputPath(new Path(workDir, "grouped1"));
@@ -417,7 +424,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testStorePlanWithRCFile");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     ctx.setOutputPath(new Path(workDir, "grouped2"));
@@ -457,7 +465,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testStorePlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     ctx.setOutputPath(new Path(workDir, "grouped3"));
@@ -483,7 +492,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testStorePlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(enforcer);
     ctx.setOutputPath(new Path(workDir, "grouped4"));
@@ -506,7 +516,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testStorePlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(enforcer);
     ctx.setOutputPath(new Path(workDir, "grouped5"));
@@ -522,7 +533,8 @@
         Integer.MAX_VALUE);
     QueryUnitAttemptId id = LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testPartitionedStorePlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, id, new FileFragment[] { frags[0] }, workDir);
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        id, new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[7]);
     LogicalPlan plan = planner.createPlan(session, context);
@@ -582,7 +594,8 @@
 
     Path workDir = CommonTestingUtil.getTestDir(
         "target/test-data/testPartitionedStorePlanWithEmptyGroupingSet");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, id, new FileFragment[] { frags[0] }, workDir);
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        id, new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[14]);
     LogicalPlan plan = planner.createPlan(session, expr);
@@ -635,7 +648,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testAggregationFunction");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[8]);
@@ -665,7 +679,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testCountFunction");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[9]);
@@ -692,7 +707,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.score", score.getMeta(), score.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testGroupByWithNullValue");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[11]);
@@ -716,7 +732,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testUnionPlan");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr  context = analyzer.parse(QUERIES[0]);
@@ -743,7 +760,8 @@
   @Test
   public final void testEvalExpr() throws IOException, PlanningException {
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testEvalExpr");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] { }, workDir);
     Expr expr = analyzer.parse(QUERIES[12]);
     LogicalPlan plan = planner.createPlan(session, expr);
@@ -779,7 +797,8 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testCreateIndex");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     Expr context = analyzer.parse(createIndexStmt[0]);
     LogicalPlan plan = planner.createPlan(session, context);
@@ -806,7 +825,8 @@
         Integer.MAX_VALUE);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testDuplicateEliminate");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(duplicateElimination[0]);
@@ -839,7 +859,8 @@
         employee.getPath(), Integer.MAX_VALUE);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testIndexedStoreExec");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(SORT_QUERY[0]);
@@ -940,7 +961,8 @@
 
     Enforcer enforcer = new Enforcer();
     enforcer.enforceSortAlgorithm(sortNode.getPID(), SortAlgorithm.IN_MEMORY_SORT);
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -961,7 +983,8 @@
 
     enforcer = new Enforcer();
     enforcer.enforceSortAlgorithm(sortNode.getPID(), SortAlgorithm.MERGE_SORT);
-    ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -988,7 +1011,8 @@
 
     Enforcer enforcer = new Enforcer();
     enforcer.enforceHashAggregation(groupByNode.getPID());
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -1009,7 +1033,8 @@
 
     enforcer = new Enforcer();
     enforcer.enforceSortAggregation(groupByNode.getPID(), null);
-    ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
+    ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(masterPlan),
         new FileFragment[] {frags[0]}, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestProgressExternalSortExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestProgressExternalSortExec.java
index c60e05c..ed6cb4e 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestProgressExternalSortExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestProgressExternalSortExec.java
@@ -38,6 +38,7 @@
 import org.apache.tajo.engine.planner.PhysicalPlannerImpl;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -138,7 +139,7 @@
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employee.getMeta(), employee.getPath(),
         Integer.MAX_VALUE);
     Path workDir = new Path(testDir, TestExternalSortExec.class.getName());
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterHashJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterHashJoinExec.java
index a45e397..de90f70 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterHashJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterHashJoinExec.java
@@ -35,6 +35,7 @@
 import org.apache.tajo.engine.planner.PlanningException;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -236,7 +237,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, dep3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestRightOuter_HashJoinExec0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[0]);
@@ -277,7 +278,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, job3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestRightOuter_HashJoinExec1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[1]);
@@ -318,7 +319,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestRightOuter_HashJoinExec2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(QUERIES[2]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterMergeJoinExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterMergeJoinExec.java
index 5b504b2..d971073 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterMergeJoinExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestRightOuterMergeJoinExec.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.engine.planner.logical.JoinNode;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
 import org.apache.tajo.engine.planner.logical.NodeType;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.master.session.Session;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -314,7 +315,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, dep3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuterMergeJoin0");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -351,7 +352,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuterMergeJoin1");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -387,7 +388,7 @@
     FileFragment[] merged = TUtil.concat(job3Frags, emp3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuterMergeJoin2");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -423,7 +424,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, dep4Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuter_MergeJoin3");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -461,7 +462,7 @@
     FileFragment[] merged = TUtil.concat(emp3Frags, phone3Frags);
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuter_MergeJoin4");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
@@ -498,7 +499,7 @@
 
 
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/testRightOuterMergeJoin5");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf,
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
         LocalTajoTestingUtility.newQueryUnitAttemptId(), merged, workDir);
     ctx.setEnforcer(enforcer);
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestSortExec.java b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestSortExec.java
index 87262e8..da6fb34 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestSortExec.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/planner/physical/TestSortExec.java
@@ -34,6 +34,7 @@
 import org.apache.tajo.engine.planner.*;
 import org.apache.tajo.engine.planner.enforce.Enforcer;
 import org.apache.tajo.engine.planner.logical.LogicalNode;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -109,7 +110,8 @@
   public final void testNext() throws IOException, PlanningException {
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employeeMeta, tablePath, Integer.MAX_VALUE);
     Path workDir = CommonTestingUtil.getTestDir("target/test-data/TestSortExec");
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility
         .newQueryUnitAttemptId(), new FileFragment[] { frags[0] }, workDir);
     ctx.setEnforcer(new Enforcer());
     Expr context = analyzer.parse(QUERIES[0]);
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCaseByCases.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCaseByCases.java
index 9836a57..73df4e1 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCaseByCases.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCaseByCases.java
@@ -24,6 +24,8 @@
 
 import java.sql.ResultSet;
 
+import static org.junit.Assert.assertEquals;
+
 public class TestCaseByCases extends QueryTestCaseBase {
 
   public TestCaseByCases() {
@@ -67,4 +69,68 @@
     assertResultSet(res);
     cleanupQuery(res);
   }
+
+  @Test
+  public final void testTAJO880_1() throws Exception {
+    //TAJO-880: NULL in CASE clause occurs Exception.
+    ResultSet res = executeString(
+        "select case when l_returnflag != 'R' then l_orderkey else null end from lineitem"
+    );
+
+    String expected =
+        "?casewhen\n" +
+        "-------------------------------\n" +
+        "1\n" +
+        "1\n" +
+        "2\n" +
+        "null\n" +
+        "null\n";
+
+    assertEquals(expected, resultSetToString(res));
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testTAJO880_2() throws Exception {
+    //TAJO-880: NULL in CASE clause occurs Exception.
+    ResultSet res = executeString(
+        "select case when l_returnflag != 'R' then null else l_orderkey end from lineitem"
+    );
+
+    String expected =
+        "?casewhen\n" +
+        "-------------------------------\n" +
+        "null\n" +
+        "null\n" +
+        "null\n" +
+        "3\n" +
+        "3\n";
+
+    assertEquals(expected, resultSetToString(res));
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testTAJO880_3() throws Exception {
+    //TAJO-880: NULL in CASE clause occurs Exception.
+    ResultSet res = executeString(
+        "select case " +
+            "when l_orderkey = 1 then null " +
+            "when l_orderkey = 2 then l_orderkey " +
+            "else null end " +
+        "from lineitem"
+    );
+
+    String expected =
+        "?casewhen\n" +
+            "-------------------------------\n" +
+            "null\n" +
+            "null\n" +
+            "2\n" +
+            "null\n" +
+            "null\n";
+
+    assertEquals(expected, resultSetToString(res));
+    cleanupQuery(res);
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCreateTable.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCreateTable.java
index 2d289ba..3d90a79 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCreateTable.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestCreateTable.java
@@ -22,18 +22,18 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.QueryTestCaseBase;
-import org.apache.tajo.catalog.CatalogUtil;
-import org.apache.tajo.catalog.TableDesc;
+import org.apache.tajo.catalog.*;
+import org.apache.tajo.catalog.partition.PartitionMethodDesc;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.storage.StorageUtil;
+import org.apache.tajo.util.KeyValueSet;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.sql.ResultSet;
 import java.util.List;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 
 @Category(IntegrationTest.class)
 public class TestCreateTable extends QueryTestCaseBase {
@@ -53,6 +53,7 @@
   public final void testCreateTable1() throws Exception {
     List<String> createdNames = executeDDL("table1_ddl.sql", "table1", "table1");
     assertTableExists(createdNames.get(0));
+    executeString("DROP TABLE table1");
   }
 
   @Test
@@ -154,17 +155,19 @@
 
   @Test
   public final void testDropTableIfExists() throws Exception {
-    executeString("CREATE DATABASE D4;").close();
+    executeString("CREATE DATABASE D7;").close();
 
-    assertTableNotExists("d4.table1");
-    executeString("CREATE TABLE d4.table1 (age int);").close();
-    assertTableExists("d4.table1");
+    assertTableNotExists("d7.table1");
+    executeString("CREATE TABLE d7.table1 (age int);").close();
+    assertTableExists("d7.table1");
 
-    executeString("DROP TABLE d4.table1;").close();
-    assertTableNotExists("d4.table1");
+    executeString("DROP TABLE d7.table1;").close();
+    assertTableNotExists("d7.table1");
 
-    executeString("DROP TABLE IF EXISTS d4.table1");
-    assertTableNotExists("d4.table1");
+    executeString("DROP TABLE IF EXISTS d7.table1");
+    assertTableNotExists("d7.table1");
+
+    executeString("DROP DATABASE D7;").close();
   }
 
   @Test
@@ -357,4 +360,174 @@
     createdNames = executeDDL("table1_ddl.sql", "table1", "varchar");
     assertTableExists(createdNames.get(0));
   }
+
+  private boolean isClonedSchema(Schema origSchema, Schema newSchema)  {
+    // Check schema of tables
+    boolean schemaEqual =
+      (origSchema.size() == newSchema.size());
+    if(schemaEqual == false)  {
+      fail("Number of columns in schema not equal");
+      return false;
+    }
+
+    for(int col = 0; col < origSchema.size(); col++)  {
+      Column colA = origSchema.getColumn(col);
+      Column colB = newSchema.getColumn(col);
+      if(colA.getSimpleName().equals(colB.getSimpleName()) == false)  {
+        fail("Column names at index " + col + " do not match");
+        return false;
+      }
+      if(colA.getDataType().equals(colB.getDataType()) == false) {
+        fail("Column datatypes at index " + col + " do not match");
+        return false;
+      }
+    }
+    return true;
+  }
+
+  private boolean isClonedTable(String orignalTable, String newTable) throws Exception  {
+    assertTableExists(newTable);
+    TableDesc origTableDesc = client.getTableDesc(orignalTable);
+    TableDesc newTableDesc = client.getTableDesc(newTable);
+
+    if(isClonedSchema(origTableDesc.getSchema(), newTableDesc.getSchema()) == false) {
+      fail("Schema of input tables do not match");
+      return false;
+    }
+
+    // Check partition information
+    PartitionMethodDesc origPartMethod = origTableDesc.getPartitionMethod();
+    PartitionMethodDesc newPartMethod = newTableDesc.getPartitionMethod();
+    if(origPartMethod != null) {
+      if(newPartMethod == null)  {
+        fail("New table does not have partition info");
+        return false;
+      }
+      if(isClonedSchema(origPartMethod.getExpressionSchema(),
+                        newPartMethod.getExpressionSchema()) == false) {
+	fail("Partition columns of input tables do not match");
+        return false;
+      }
+
+      if(origPartMethod.getPartitionType().equals(newPartMethod.getPartitionType()) == false)  {
+        fail("Partition type of input tables do not match");
+        return false;
+      }
+    }
+
+    // Check external flag
+    if(origTableDesc.isExternal() != newTableDesc.isExternal()) {
+      fail("External table flag on input tables not equal");
+      return false;
+    }
+
+    if(origTableDesc.getMeta() != null) {
+      TableMeta origMeta = origTableDesc.getMeta();
+      TableMeta newMeta = newTableDesc.getMeta();
+      if(origMeta.getStoreType().equals(newMeta.getStoreType()) == false) {
+        fail("Store type of input tables not equal");
+        return false;
+      }
+
+      KeyValueSet origOptions = origMeta.getOptions();
+      KeyValueSet newOptions = newMeta.getOptions();
+      if(origOptions.equals(newOptions) == false)  {
+        fail("Meta options of input tables not equal");
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @Test
+  public final void testCreateTableLike1() throws Exception {
+    // Basic create table with default database
+    executeString("CREATE TABLE table1 (c1 int, c2 varchar);").close();
+    executeString("CREATE TABLE table2 LIKE table1");
+    String testMsg = "testCreateTableLike1: Basic create table with default db";
+    assertTrue(testMsg,isClonedTable("table1","table2"));
+    executeString("DROP TABLE table1");
+    executeString("DROP TABLE table2");
+
+    // Basic create table with database
+    executeString("CREATE DATABASE d1").close();
+    executeString("CREATE TABLE d1.table1 (c1 int, c2 varchar);").close();
+    executeString("CREATE TABLE d1.table2 LIKE d1.table1");
+    testMsg = "testCreateTableLike1: Basic create table with db test failed";
+    assertTrue(testMsg, isClonedTable("d1.table1","d1.table2"));
+    executeString("DROP TABLE d1.table1");
+    executeString("DROP TABLE d1.table2");
+
+    // Table with non-default store type
+    executeString("CREATE TABLE table1 (c1 int, c2 varchar) USING rcfile;").close();
+    executeString("CREATE TABLE table2 LIKE table1");
+    testMsg = "testCreateTableLike1: Table with non-default store type test failed";
+    assertTrue(testMsg, isClonedTable("table1","table2"));
+    executeString("DROP TABLE table1");
+    executeString("DROP TABLE table2");
+
+    // Table with non-default meta options
+    executeString("CREATE TABLE table1 (c1 int, c2 varchar) USING csv WITH ('csvfile.delimiter'='|','compression.codec'='org.apache.hadoop.io.compress.DeflateCodec');").close();
+    executeString("CREATE TABLE table2 LIKE table1");
+    testMsg = "testCreateTableLike1: Table with non-default meta options test failed";
+    assertTrue(testMsg, isClonedTable("table1","table2"));
+    executeString("DROP TABLE table1");
+    executeString("DROP TABLE table2");
+
+
+    // Table with partitions (default partition type)
+    executeString("CREATE TABLE table1 (c1 int, c2 varchar) PARTITION BY COLUMN (c3 int, c4 float, c5 text);").close();
+    executeString("CREATE TABLE table2 LIKE table1");
+    testMsg = "testCreateTableLike1: Table with partitions test failed";
+    assertTrue(testMsg, isClonedTable("table1","table2"));
+    executeString("DROP TABLE table1");
+    executeString("DROP TABLE table2");
+
+
+    // Table with external flag
+    // Use existing file as input for creating external table
+    String className = getClass().getSimpleName();
+    Path currentDatasetPath = new Path(datasetBasePath, className);
+    Path filePath = StorageUtil.concatPath(currentDatasetPath, "table1");
+    executeString("CREATE EXTERNAL TABLE table3 (c1 int, c2 varchar) USING rcfile LOCATION '" + filePath.toUri() + "'").close();
+    executeString("CREATE TABLE table2 LIKE table3");
+    testMsg = "testCreateTableLike1: Table with external table flag test failed";
+    assertTrue(testMsg, isClonedTable("table3","table2"));
+    executeString("DROP TABLE table3");
+    executeString("DROP TABLE table2");
+
+
+    // Table created using CTAS
+    executeString("CREATE TABLE table3 (c1 int, c2 varchar) PARTITION BY COLUMN (c3 int);").close();
+    executeString("CREATE TABLE table4 AS SELECT c1*c1, c2, c2 as c2_a,c3 from table3;").close();
+    executeString("CREATE TABLE table2 LIKE table4");
+    testMsg = "testCreateTableLike1: Table using CTAS test failed";
+    assertTrue(testMsg, isClonedTable("table4","table2"));
+    executeString("DROP TABLE table3");
+    executeString("DROP TABLE table4");
+    executeString("DROP TABLE table2");
+
+
+    /* Enable when view is supported
+    // View
+    executeString("CREATE TABLE table3 (c1 int, c2 varchar) PARTITION BY COLUMN (c3 int);").close();
+    executeString("CREATE VIEW table4(c1,c2,c3) AS SELECT c1*c1, c2, c2,c3 from table3;").close();
+    executeString("CREATE TABLE table2 LIKE table4");
+    testMsg = "testCreateTableLike1: Table using VIEW test failed";
+    assertTrue(testMsg, isClonedTable("table4","table2"));
+    executeString("DROP TABLE table3");
+    executeString("DROP TABLE table4");
+    executeString("DROP TABLE table2");
+    */
+
+    /*  Enable when partition type other than column is supported
+    // Table with partitions (range partition)
+    executeString("CREATE TABLE table1 (c1 int, c2 varchar) PARTITION BY RANGE (c1) (  PARTITION c1 VALUES LESS THAN (2),  PARTITION c1 VALUES LESS THAN (5),  PARTITION c1 VALUES LESS THAN (MAXVALUE) );").close();
+    executeString("CREATE TABLE table2 LIKE table1");
+    testMsg = "testCreateTableLike1: Table using non-default partition type failed";
+    assertTrue(testMsg, isClonedTable("table1","table2"));
+    executeString("DROP TABLE table1");
+    executeString("DROP TABLE table2");
+    */
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestGroupByQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestGroupByQuery.java
index 91993a1..b5fd9f1 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestGroupByQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestGroupByQuery.java
@@ -21,15 +21,22 @@
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.QueryTestCaseBase;
 import org.apache.tajo.TajoConstants;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.storage.StorageConstants;
+import org.apache.tajo.util.KeyValueSet;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.sql.ResultSet;
 
+import static org.junit.Assert.assertEquals;
+
 @Category(IntegrationTest.class)
 public class TestGroupByQuery extends QueryTestCaseBase {
 
-  public TestGroupByQuery() {
+  public TestGroupByQuery() throws Exception {
     super(TajoConstants.DEFAULT_DATABASE_NAME);
   }
 
@@ -249,6 +256,38 @@
     res = executeFile("testDistinctAggregation_case7.sql");
     assertResultSet(res, "testDistinctAggregation_case7.result");
     res.close();
+
+    res = executeFile("testDistinctAggregation_case8.sql");
+    assertResultSet(res, "testDistinctAggregation_case8.result");
+    res.close();
+
+    // case9
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.TEXT);
+    schema.addColumn("code", Type.TEXT);
+    schema.addColumn("qty", Type.INT4);
+    schema.addColumn("qty2", Type.FLOAT8);
+    String[] data = new String[]{ "1|a|3|3.0", "1|a|4|4.0", "1|b|5|5.0", "2|a|1|6.0", "2|c|2|7.0", "2|d|3|8.0" };
+    TajoTestingCluster.createTable("table10", schema, tableOptions, data);
+
+    res = executeString("select id, count(distinct code), " +
+        "avg(qty), min(qty), max(qty), sum(qty), " +
+        "cast(avg(qty2) as INT8), cast(min(qty2) as INT8), cast(max(qty2) as INT8), cast(sum(qty2) as INT8) " +
+        "from table10 group by id");
+    String result = resultSetToString(res);
+
+    String expected = "id,?count,?avg_1,?min_2,?max_3,?sum_4,?cast_5,?cast_6,?cast_7,?cast_8\n" +
+        "-------------------------------\n" +
+        "1,2,4.0,0,5,12,4,0,5,12\n" +
+        "2,3,2.0,0,3,6,7,0,8,21\n";
+
+    assertEquals(expected, result);
+
+    executeString("DROP TABLE table10 PURGE").close();
   }
 
   @Test
@@ -300,4 +339,113 @@
     assertResultSet(res);
     cleanupQuery(res);
   }
+
+  @Test
+  public final void testGroupByWithNullData1() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData2() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData3() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData4() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData5() throws Exception {
+    executeString("CREATE TABLE table1 (age INT4, point FLOAT4);").close();
+    assertTableExists("table1");
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+
+    executeString("DROP TABLE table1");
+  }
+
+  @Test
+  public final void testGroupByWithNullData6() throws Exception {
+    executeString("CREATE TABLE table1 (age INT4, point FLOAT4);").close();
+    assertTableExists("table1");
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+
+    executeString("DROP TABLE table1");
+  }
+
+  @Test
+  public final void testGroupByWithNullData7() throws Exception {
+    executeString("CREATE TABLE table1 (age INT4, point FLOAT4);").close();
+    assertTableExists("table1");
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+
+    executeString("DROP TABLE table1");
+  }
+
+  @Test
+  public final void testGroupByWithNullData8() throws Exception {
+    executeString("CREATE TABLE table1 (age INT4, point FLOAT4);").close();
+    assertTableExists("table1");
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+
+    executeString("DROP TABLE table1");
+  }
+
+  @Test
+  public final void testGroupByWithNullData9() throws Exception {
+    executeString("CREATE TABLE table1 (age INT4, point FLOAT4);").close();
+    assertTableExists("table1");
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+
+    executeString("DROP TABLE table1");
+  }
+
+  @Test
+  public final void testGroupByWithNullData10() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData11() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testGroupByWithNullData12() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestInsertQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestInsertQuery.java
index 8453488..822bf51 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestInsertQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestInsertQuery.java
@@ -149,6 +149,7 @@
     if (!testingCluster.isHCatalogStoreRunning()) {
       assertEquals(2, desc.getStats().getNumRows().intValue());
     }
+    executeString("DROP TABLE full_table_csv PURGE");
   }
 
   @Test
@@ -176,6 +177,7 @@
     if (!testingCluster.isHCatalogStoreRunning()) {
       assertEquals(2, orderKeys.getStats().getNumRows().intValue());
     }
+    executeString("DROP TABLE " + tableName + " PURGE");
   }
 
   @Test
@@ -193,6 +195,7 @@
     if (!testingCluster.isHCatalogStoreRunning()) {
       assertEquals(2, desc.getStats().getNumRows().intValue());
     }
+    executeString("DROP TABLE " + tableName + " PURGE");
   }
 
   @Test
@@ -228,6 +231,7 @@
       CompressionCodec codec = factory.getCodec(file.getPath());
       assertTrue(codec instanceof DeflateCodec);
     }
+    executeString("DROP TABLE " + tableName + " PURGE");
   }
 
   @Test
@@ -271,7 +275,7 @@
       res = executeString("select l_orderkey, l_partkey from full_table_parquet;");
       assertResultSet(res, "testInsertOverwriteWithAsteriskUsingParquet2.result");
 
-      executeString("DROP TABLE full_table_parquet_ddl PURGE");
+      executeString("DROP TABLE full_table_parquet PURGE");
     }
   }
 
@@ -290,6 +294,7 @@
     if (!testingCluster.isHCatalogStoreRunning()) {
       assertEquals(5, desc.getStats().getNumRows().intValue());
     }
+    executeString("DROP TABLE table1 PURGE");
   }
 
   @Test
@@ -319,6 +324,7 @@
     assertEquals("test", res.getString(3));
 
     res.close();
+    executeString("DROP TABLE " + tableName + " PURGE");
   }
 
   @Test
@@ -342,10 +348,12 @@
 
     assertEquals(3, res.getMetaData().getColumnCount());
     assertEquals(1, res.getInt(1));
-    assertEquals("", res.getString(2));
+    assertNull(res.getString(2));
+    assertEquals(0.0, res.getDouble(2), 10);
     assertEquals("test", res.getString(3));
 
     res.close();
+    executeString("DROP TABLE " + tableName + " PURGE");
   }
 
   @Test
@@ -383,4 +391,100 @@
       reader.close();
     }
   }
+
+  @Test
+  public final void testInsertOverwriteWithUnion() throws Exception {
+    ResultSet res = executeFile("table1_ddl.sql");
+    res.close();
+
+    CatalogService catalog = testingCluster.getMaster().getCatalog();
+    assertTrue(catalog.existsTable(getCurrentDatabase(), "table1"));
+
+    res = executeFile("testInsertOverwriteWithUnion.sql");
+    res.close();
+
+    String tableDatas = getTableFileContents("table1");
+
+    String expected = "1|1|17.0\n" +
+        "1|1|36.0\n" +
+        "2|2|38.0\n" +
+        "3|2|45.0\n" +
+        "3|3|49.0\n" +
+        "1|3|173665.47\n" +
+        "2|4|46929.18\n" +
+        "3|2|193846.25\n";
+
+    assertNotNull(tableDatas);
+    assertEquals(expected, tableDatas);
+
+    executeString("DROP TABLE table1 PURGE");
+  }
+
+  @Test
+  public final void testInsertOverwriteWithUnionDifferentAlias() throws Exception {
+    ResultSet res = executeFile("table1_ddl.sql");
+    res.close();
+
+    CatalogService catalog = testingCluster.getMaster().getCatalog();
+    assertTrue(catalog.existsTable(getCurrentDatabase(), "table1"));
+
+    res = executeFile("testInsertOverwriteWithUnionDifferentAlias.sql");
+    res.close();
+
+    String tableDatas = getTableFileContents("table1");
+
+    String expected = "1|1|17.0\n" +
+        "1|1|36.0\n" +
+        "2|2|38.0\n" +
+        "3|2|45.0\n" +
+        "3|3|49.0\n" +
+        "1|3|173665.47\n" +
+        "2|4|46929.18\n" +
+        "3|2|193846.25\n";
+
+    assertNotNull(tableDatas);
+    assertEquals(expected, tableDatas);
+
+    executeString("DROP TABLE table1 PURGE");
+  }
+
+  @Test
+  public final void testInsertOverwriteLocationWithUnion() throws Exception {
+    ResultSet res = executeFile("testInsertOverwriteLocationWithUnion.sql");
+    res.close();
+
+    String resultDatas= getTableFileContents(new Path("/tajo-data/testInsertOverwriteLocationWithUnion"));
+
+    String expected = "1|1|17.0\n" +
+        "1|1|36.0\n" +
+        "2|2|38.0\n" +
+        "3|2|45.0\n" +
+        "3|3|49.0\n" +
+        "1|3|173665.47\n" +
+        "2|4|46929.18\n" +
+        "3|2|193846.25\n";
+
+    assertNotNull(resultDatas);
+    assertEquals(expected, resultDatas);
+  }
+
+  @Test
+  public final void testInsertOverwriteLocationWithUnionDifferenceAlias() throws Exception {
+    ResultSet res = executeFile("testInsertOverwriteLocationWithUnionDifferenceAlias.sql");
+    res.close();
+
+    String resultDatas= getTableFileContents(new Path("/tajo-data/testInsertOverwriteLocationWithUnionDifferenceAlias"));
+
+    String expected = "1|1|17.0\n" +
+        "1|1|36.0\n" +
+        "2|2|38.0\n" +
+        "3|2|45.0\n" +
+        "3|3|49.0\n" +
+        "1|3|173665.47\n" +
+        "2|4|46929.18\n" +
+        "3|2|193846.25\n";
+
+    assertNotNull(resultDatas);
+    assertEquals(expected, resultDatas);
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinBroadcast.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinBroadcast.java
index f5f98a5..2a2b8c3 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinBroadcast.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinBroadcast.java
@@ -18,29 +18,39 @@
 
 package org.apache.tajo.engine.query;
 
-import org.apache.tajo.IntegrationTest;
-import org.apache.tajo.QueryId;
-import org.apache.tajo.QueryTestCaseBase;
-import org.apache.tajo.TajoConstants;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.Path;
+import org.apache.tajo.*;
+import org.apache.tajo.catalog.*;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.datum.Datum;
+import org.apache.tajo.datum.Int4Datum;
+import org.apache.tajo.datum.TextDatum;
 import org.apache.tajo.engine.planner.global.ExecutionBlock;
 import org.apache.tajo.engine.planner.global.MasterPlan;
 import org.apache.tajo.engine.planner.logical.NodeType;
 import org.apache.tajo.jdbc.TajoResultSet;
 import org.apache.tajo.master.querymaster.QueryMasterTask;
+import org.apache.tajo.storage.Appender;
+import org.apache.tajo.storage.StorageManagerFactory;
+import org.apache.tajo.storage.Tuple;
+import org.apache.tajo.storage.VTuple;
+import org.apache.tajo.util.FileUtil;
 import org.apache.tajo.worker.TajoWorker;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.File;
 import java.sql.ResultSet;
 
-import static junit.framework.TestCase.assertEquals;
-import static junit.framework.TestCase.assertTrue;
-import static junit.framework.TestCase.fail;
+import static junit.framework.TestCase.*;
+import static org.apache.tajo.TajoConstants.DEFAULT_DATABASE_NAME;
 import static org.junit.Assert.assertNotNull;
 
 @Category(IntegrationTest.class)
 public class TestJoinBroadcast extends QueryTestCaseBase {
+  private static final Log LOG = LogFactory.getLog(TestJoinBroadcast.class);
   public TestJoinBroadcast() throws Exception {
     super(TajoConstants.DEFAULT_DATABASE_NAME);
     testingCluster.setAllTajoDaemonConfValue(TajoConf.ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.varname, "true");
@@ -290,6 +300,9 @@
     ResultSet res = executeQuery();
     assertResultSet(res);
     cleanupQuery(res);
+    executeString("DROP TABLE JOINS.part_ PURGE");
+    executeString("DROP TABLE JOINS.supplier_ PURGE");
+    executeString("DROP DATABASE JOINS");
   }
 
   private MasterPlan getQueryPlan(QueryId queryId) {
@@ -372,4 +385,152 @@
     cleanupQuery(res);
   }
 
+  @Test
+  public final void testBroadcastPartitionTable() throws Exception {
+    // If all tables participate in the BROADCAST JOIN, there is some missing data.
+    executeDDL("customer_partition_ddl.sql", null);
+    ResultSet res = executeFile("insert_into_customer_partition.sql");
+    res.close();
+
+    createMultiFile("nation", 2, new TupleCreator() {
+      public Tuple createTuple(String[] columnDatas) {
+        return new VTuple(new Datum[]{
+            new Int4Datum(Integer.parseInt(columnDatas[0])),
+            new TextDatum(columnDatas[1]),
+            new Int4Datum(Integer.parseInt(columnDatas[2])),
+            new TextDatum(columnDatas[3])
+        });
+      }
+    });
+
+    createMultiFile("orders", 1, new TupleCreator() {
+      public Tuple createTuple(String[] columnDatas) {
+        return new VTuple(new Datum[]{
+            new Int4Datum(Integer.parseInt(columnDatas[0])),
+            new Int4Datum(Integer.parseInt(columnDatas[1])),
+            new TextDatum(columnDatas[2])
+        });
+      }
+    });
+
+    res = executeQuery();
+    assertResultSet(res);
+    res.close();
+
+    executeString("DROP TABLE customer_broad_parts PURGE");
+    executeString("DROP TABLE nation_multifile PURGE");
+    executeString("DROP TABLE orders_multifile PURGE");
+  }
+
+  @Test
+  public final void testBroadcastMultiColumnPartitionTable() throws Exception {
+    String tableName = CatalogUtil.normalizeIdentifier("testBroadcastMultiColumnPartitionTable");
+    ResultSet res = testBase.execute(
+        "create table " + tableName + " (col1 int4, col2 float4) partition by column(col3 text, col4 text) ");
+    res.close();
+    TajoTestingCluster cluster = testBase.getTestingCluster();
+    CatalogService catalog = cluster.getMaster().getCatalog();
+    assertTrue(catalog.existsTable(DEFAULT_DATABASE_NAME, tableName));
+
+    res = executeString("insert overwrite into " + tableName
+        + " select o_orderkey, o_totalprice, substr(o_orderdate, 6, 2), substr(o_orderdate, 1, 4) from orders");
+    res.close();
+
+    res = executeString(
+        "select distinct a.col3 from " + tableName + " as a " +
+            "left outer join lineitem_large b " +
+            "on a.col1 = b.l_orderkey"
+    );
+
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testCasebyCase1() throws Exception {
+    // Left outer join with a small table and a large partition table which not matched any partition path.
+    String tableName = CatalogUtil.normalizeIdentifier("largePartitionedTable");
+    testBase.execute(
+        "create table " + tableName + " (l_partkey int4, l_suppkey int4, l_linenumber int4, \n" +
+            "l_quantity float8, l_extendedprice float8, l_discount float8, l_tax float8, \n" +
+            "l_returnflag text, l_linestatus text, l_shipdate text, l_commitdate text, \n" +
+            "l_receiptdate text, l_shipinstruct text, l_shipmode text, l_comment text) \n" +
+            "partition by column(l_orderkey int4) ").close();
+    TajoTestingCluster cluster = testBase.getTestingCluster();
+    CatalogService catalog = cluster.getMaster().getCatalog();
+    assertTrue(catalog.existsTable(DEFAULT_DATABASE_NAME, tableName));
+
+    executeString("insert overwrite into " + tableName +
+        " select l_partkey, l_suppkey, l_linenumber, \n" +
+        " l_quantity, l_extendedprice, l_discount, l_tax, \n" +
+        " l_returnflag, l_linestatus, l_shipdate, l_commitdate, \n" +
+        " l_receiptdate, l_shipinstruct, l_shipmode, l_comment, l_orderkey from lineitem_large");
+
+    ResultSet res = executeString(
+        "select a.l_orderkey as key1, b.l_orderkey as key2 from lineitem as a " +
+            "left outer join " + tableName + " b " +
+            "on a.l_partkey = b.l_partkey and b.l_orderkey = 1000"
+    );
+
+    String expected = "key1,key2\n" +
+        "-------------------------------\n" +
+        "1,null\n" +
+        "1,null\n" +
+        "2,null\n" +
+        "3,null\n" +
+        "3,null\n";
+
+    try {
+      assertEquals(expected, resultSetToString(res));
+    } finally {
+      cleanupQuery(res);
+    }
+  }
+
+  static interface TupleCreator {
+    public Tuple createTuple(String[] columnDatas);
+  }
+
+  private void createMultiFile(String tableName, int numRowsEachFile, TupleCreator tupleCreator) throws Exception {
+    // make multiple small file
+    String multiTableName = tableName + "_multifile";
+    executeDDL(multiTableName + "_ddl.sql", null);
+
+    TableDesc table = client.getTableDesc(multiTableName);
+    assertNotNull(table);
+
+    TableMeta tableMeta = table.getMeta();
+    Schema schema = table.getLogicalSchema();
+
+    File file = new File("src/test/tpch/" + tableName + ".tbl");
+
+    if (!file.exists()) {
+      file = new File(System.getProperty("user.dir") + "/tajo-core/src/test/tpch/" + tableName + ".tbl");
+    }
+    String[] rows = FileUtil.readTextFile(file).split("\n");
+
+    assertTrue(rows.length > 0);
+
+    int fileIndex = 0;
+
+    Appender appender = null;
+    for (int i = 0; i < rows.length; i++) {
+      if (i % numRowsEachFile == 0) {
+        if (appender != null) {
+          appender.flush();
+          appender.close();
+        }
+        Path dataPath = new Path(table.getPath(), fileIndex + ".csv");
+        fileIndex++;
+        appender = StorageManagerFactory.getStorageManager(conf).getAppender(tableMeta, schema,
+            dataPath);
+        appender.init();
+      }
+      String[] columnDatas = rows[i].split("\\|");
+      Tuple tuple = tupleCreator.createTuple(columnDatas);
+      appender.addTuple(tuple);
+    }
+    appender.flush();
+    appender.close();
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinOnPartitionedTables.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinOnPartitionedTables.java
index 3e28f9e..34ead13 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinOnPartitionedTables.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinOnPartitionedTables.java
@@ -24,6 +24,8 @@
 
 import java.sql.ResultSet;
 
+import static org.junit.Assert.assertEquals;
+
 public class TestJoinOnPartitionedTables extends QueryTestCaseBase {
 
   public TestJoinOnPartitionedTables() {
@@ -32,7 +34,6 @@
 
   @Test
   public void testPartitionTableJoinSmallTable() throws Exception {
-
     executeDDL("customer_ddl.sql", null);
     ResultSet res = executeFile("insert_into_customer.sql");
     res.close();
@@ -52,5 +53,54 @@
     res = executeFile("testPartialFilterPushDown.sql");
     assertResultSet(res, "testPartialFilterPushDown.result");
     res.close();
+
+    res = executeFile("testPartialFilterPushDownOuterJoin.sql");
+    assertResultSet(res, "testPartialFilterPushDownOuterJoin.result");
+    res.close();
+
+    res = executeFile("testPartialFilterPushDownOuterJoin2.sql");
+    assertResultSet(res, "testPartialFilterPushDownOuterJoin2.result");
+    res.close();
+
+    executeString("DROP TABLE customer_parts PURGE").close();
+  }
+
+  @Test
+  public void testPartitionMultiplePartitionFilter() throws Exception {
+    executeDDL("customer_ddl.sql", null);
+    ResultSet res = executeFile("insert_into_customer.sql");
+    res.close();
+
+    res = executeString(
+        "select a.c_custkey, b.c_custkey from " +
+            "  (select c_custkey, c_nationkey from customer_parts where c_nationkey < 0 " +
+            "   union all " +
+            "   select c_custkey, c_nationkey from customer_parts where c_nationkey < 0 " +
+            ") a " +
+            "left outer join customer_parts b " +
+            "on a.c_custkey = b.c_custkey " +
+            "and a.c_nationkey > 0"
+    );
+
+    String expected =
+        "c_custkey,c_custkey\n" +
+            "-------------------------------\n";
+    assertEquals(expected, resultSetToString(res));
+    res.close();
+
+    executeString("DROP TABLE customer_parts PURGE").close();
+  }
+
+  @Test
+  public void testFilterPushDownPartitionColumnCaseWhen() throws Exception {
+    executeDDL("customer_ddl.sql", null);
+    ResultSet res = executeFile("insert_into_customer.sql");
+    res.close();
+
+    res = executeQuery();
+    assertResultSet(res);
+    res.close();
+
+    executeString("DROP TABLE customer_parts PURGE").close();
   }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinQuery.java
index 9bedc10..13a0b2b 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestJoinQuery.java
@@ -21,16 +21,94 @@
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.QueryTestCaseBase;
 import org.apache.tajo.TajoConstants;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.storage.StorageConstants;
+import org.apache.tajo.util.KeyValueSet;
+import org.junit.AfterClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
 
 import java.sql.ResultSet;
+import java.util.Arrays;
+import java.util.Collection;
+
+import static org.junit.Assert.assertEquals;
 
 @Category(IntegrationTest.class)
+@RunWith(Parameterized.class)
 public class TestJoinQuery extends QueryTestCaseBase {
 
-  public TestJoinQuery() {
+  public TestJoinQuery(String joinOption) {
     super(TajoConstants.DEFAULT_DATABASE_NAME);
+
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.varname,
+        ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.defaultVal);
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD.varname,
+        ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD.defaultVal);
+
+    testingCluster.setAllTajoDaemonConfValue(
+        ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.defaultVal);
+
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.defaultVal);
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.defaultVal);
+
+    if (joinOption.indexOf("NoBroadcast") >= 0) {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.varname, "false");
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD.varname, "-1");
+    }
+
+    if (joinOption.indexOf("Hash") >= 0) {
+      testingCluster.setAllTajoDaemonConfValue(
+          ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.varname, String.valueOf(256 * 1048576));
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+          String.valueOf(256 * 1048576));
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.varname,
+          String.valueOf(256 * 1048576));
+    }
+    if (joinOption.indexOf("Sort") >= 0) {
+      testingCluster.setAllTajoDaemonConfValue(
+          ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.varname, String.valueOf(1));
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+          String.valueOf(1));
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.varname,
+          String.valueOf(1));
+    }
+  }
+
+  @Parameters
+  public static Collection<Object[]> generateParameters() {
+    return Arrays.asList(new Object[][]{
+        {"Hash_NoBroadcast"},
+        {"Sort_NoBroadcast"},
+        {"Hash"},
+        {"Sort"},
+    });
+  }
+
+  @AfterClass
+  public static void classTearDown() {
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.varname,
+        ConfVars.DIST_QUERY_BROADCAST_JOIN_AUTO.defaultVal);
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD.varname,
+        ConfVars.DIST_QUERY_BROADCAST_JOIN_THRESHOLD.defaultVal);
+
+    testingCluster.setAllTajoDaemonConfValue(
+        ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_INNER_JOIN_INMEMORY_HASH_THRESHOLD.defaultVal);
+
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_OUTER_JOIN_INMEMORY_HASH_THRESHOLD.defaultVal);
+    testingCluster.setAllTajoDaemonConfValue(ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.varname,
+        ConfVars.EXECUTOR_GROUPBY_INMEMORY_HASH_THRESHOLD.defaultVal);
   }
 
   @Test
@@ -205,9 +283,14 @@
   public void testOuterJoinAndCaseWhen1() throws Exception {
     executeDDL("oj_table1_ddl.sql", "table1");
     executeDDL("oj_table2_ddl.sql", "table2");
-    ResultSet res = executeQuery();
-    assertResultSet(res);
-    cleanupQuery(res);
+    try {
+      ResultSet res = executeQuery();
+      assertResultSet(res);
+      cleanupQuery(res);
+    } finally {
+      executeString("DROP TABLE table1");
+      executeString("DROP TABLE table2");
+    }
   }
 
   @Test
@@ -290,6 +373,13 @@
   }
 
   @Test
+  public final void testLeftOuterJoinWithEmptyTable5() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
   public final void testRightOuterJoinWithEmptyTable1() throws Exception {
     ResultSet res = executeQuery();
     assertResultSet(res);
@@ -297,6 +387,86 @@
   }
 
   @Test
+  public final void testLeftOuterJoinWithEmptySubquery1() throws Exception {
+    // Empty Null Supplying table
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    String[] data = new String[]{ "1|table11-1", "2|table11-2", "3|table11-3", "4|table11-4", "5|table11-5" };
+    TajoTestingCluster.createTable("table11", schema, tableOptions, data, 2);
+
+    data = new String[]{ "1|table11-1", "2|table11-2" };
+    TajoTestingCluster.createTable("table12", schema, tableOptions, data, 2);
+
+    try {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname, "2");
+
+      ResultSet res = executeString("select a.id, b.id from table11 a " +
+          "left outer join (" +
+          "select table12.id from table12 inner join lineitem on table12.id = lineitem.l_orderkey and table12.id > 10) b " +
+          "on a.id = b.id order by a.id");
+
+      String expected = "id,id\n" +
+          "-------------------------------\n" +
+          "1,null\n" +
+          "2,null\n" +
+          "3,null\n" +
+          "4,null\n" +
+          "5,null\n";
+
+      assertEquals(expected, resultSetToString(res));
+      cleanupQuery(res);
+    } finally {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname,
+          ConfVars.TESTCASE_MIN_TASK_NUM.defaultVal);
+      executeString("DROP TABLE table11 PURGE").close();
+      executeString("DROP TABLE table12 PURGE").close();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinWithEmptySubquery2() throws Exception {
+    //Empty Preserved Row table
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    String[] data = new String[]{ "1|table11-1", "2|table11-2", "3|table11-3", "4|table11-4", "5|table11-5" };
+    TajoTestingCluster.createTable("table11", schema, tableOptions, data, 2);
+
+    data = new String[]{ "1|table11-1", "2|table11-2" };
+    TajoTestingCluster.createTable("table12", schema, tableOptions, data, 2);
+
+    try {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname, "2");
+
+      ResultSet res = executeString("select a.id, b.id from " +
+          "(select table12.id, table12.name, lineitem.l_shipdate " +
+          "from table12 inner join lineitem on table12.id = lineitem.l_orderkey and table12.id > 10) a " +
+          "left outer join table11 b " +
+          "on a.id = b.id");
+
+      String expected = "id,id\n" +
+          "-------------------------------\n";
+
+      assertEquals(expected, resultSetToString(res));
+      cleanupQuery(res);
+    } finally {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname,
+          ConfVars.TESTCASE_MIN_TASK_NUM.defaultVal);
+      executeString("DROP TABLE table11 PURGE");
+      executeString("DROP TABLE table12 PURGE");
+    }
+  }
+  
+  @Test
   public final void testFullOuterJoinWithEmptyTable1() throws Exception {
     ResultSet res = executeQuery();
     assertResultSet(res);
@@ -321,6 +491,10 @@
     ResultSet res = executeQuery();
     assertResultSet(res);
     cleanupQuery(res);
+
+    executeString("DROP TABLE JOINS.part_ PURGE");
+    executeString("DROP TABLE JOINS.supplier_ PURGE");
+    executeString("DROP DATABASE JOINS");
   }
 
   @Test
@@ -360,5 +534,521 @@
     ResultSet res = executeJsonQuery();
     assertResultSet(res);
     cleanupQuery(res);
+
+    executeString("DROP TABLE JOINS.part_ PURGE");
+    executeString("DROP TABLE JOINS.supplier_ PURGE");
+    executeString("DROP DATABASE JOINS");
+  }
+
+  @Test
+  public final void testJoinAsterisk() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testLeftOuterJoinWithNull1() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testLeftOuterJoinWithNull2() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testLeftOuterJoinWithNull3() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase1() throws Exception {
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id\n" +
+              "left outer join table13 t3\n" +
+              "on t1.id = t3.id and t2.id = t3.id");
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "1,table11-1,1,null\n" +
+              "2,table11-2,null,null\n" +
+              "3,table11-3,null,null\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase2() throws Exception {
+    // outer -> outer -> inner
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id, t4.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id\n" +
+              "left outer join table13 t3\n" +
+              "on t2.id = t3.id\n" +
+              "inner join table14 t4\n" +
+              "on t2.id = t4.id"
+      );
+
+      String expected =
+          "id,name,id,id,id\n" +
+              "-------------------------------\n" +
+              "1,table11-1,1,null,1\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase2_1() throws Exception {
+    // inner(on predication) -> outer(on predication) -> outer -> where
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id, t4.id\n" +
+              "from table11 t1\n" +
+              "inner join table14 t4\n" +
+              "on t1.id = t4.id and t4.id > 1\n" +
+              "left outer join table13 t3\n" +
+              "on t4.id = t3.id and t3.id = 2\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id \n" +
+              "where t1.id > 1"
+      );
+
+      String expected =
+          "id,name,id,id,id\n" +
+              "-------------------------------\n" +
+              "2,table11-2,null,2,2\n" +
+              "3,table11-3,null,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase3() throws Exception {
+    // https://cwiki.apache.org/confluence/display/Hive/OuterJoinBehavior
+    // Case J1: Join Predicate on Preserved Row Table
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2 \n" +
+              "on t1.id = t2.id and (concat(t1.name, cast(t2.id as TEXT)) = 'table11-11' or concat(t1.name, cast(t2.id as TEXT)) = 'table11-33')\n" +
+              "left outer join table13 t3\n" +
+              "on t1.id = t3.id "
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "1,table11-1,1,null\n" +
+              "2,table11-2,null,2\n" +
+              "3,table11-3,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase4() throws Exception {
+    // https://cwiki.apache.org/confluence/display/Hive/OuterJoinBehavior
+    // Case J2: Join Predicate on Null Supplying Table
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id and t2.id > 1 \n" +
+              "left outer join table13 t3\n" +
+              "on t1.id = t3.id"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "1,table11-1,null,null\n" +
+              "2,table11-2,null,2\n" +
+              "3,table11-3,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase5() throws Exception {
+    // https://cwiki.apache.org/confluence/display/Hive/OuterJoinBehavior
+    // Case W1: Where Predicate on Preserved Row Table
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id\n" +
+              "left outer join table13 t3\n" +
+              "on t1.id = t3.id\n" +
+              "where t1.name > 'table11-1'"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "2,table11-2,null,2\n" +
+              "3,table11-3,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterJoinPredicationCaseByCase6() throws Exception {
+    // https://cwiki.apache.org/confluence/display/Hive/OuterJoinBehavior
+    // Case W2: Where Predicate on Null Supplying Table
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "left outer join table12 t2\n" +
+              "on t1.id = t2.id\n" +
+              "left outer join table13 t3\n" +
+              "on t1.id = t3.id\n" +
+              "where t3.id > 2"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "3,table11-3,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testLeftOuterWithEmptyTable() throws Exception {
+    // https://cwiki.apache.org/confluence/display/Hive/OuterJoinBehavior
+    // Case W2: Where Predicate on Null Supplying Table
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id\n" +
+              "from table11 t1\n" +
+              "left outer join table15 t2\n" +
+              "on t1.id = t2.id"
+      );
+
+      String expected =
+          "id,name,id\n" +
+              "-------------------------------\n" +
+              "1,table11-1,null\n" +
+              "2,table11-2,null\n" +
+              "3,table11-3,null\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testRightOuterJoinPredicationCaseByCase1() throws Exception {
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "right outer join table12 t2\n" +
+              "on t1.id = t2.id\n" +
+              "right outer join table13 t3\n" +
+              "on t1.id = t3.id and t2.id = t3.id"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "null,null,null,2\n" +
+              "null,null,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testRightOuterJoinPredicationCaseByCase2() throws Exception {
+    // inner -> right
+    // Notice: Join order should be preserved with origin order.
+    // JoinEdge: t1 -> t4, t3 -> t1,t4
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t3.id, t4.id\n" +
+              "from table11 t1\n" +
+              "inner join table14 t4\n" +
+              "on t1.id = t4.id and t4.id > 1\n" +
+              "right outer join table13 t3\n" +
+              "on t4.id = t3.id and t3.id = 2\n" +
+              "where t3.id > 1"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "2,table11-2,2,2\n" +
+              "null,null,3,null\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testRightOuterJoinPredicationCaseByCase3() throws Exception {
+    createOuterJoinTestTable();
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t2.id, t3.id\n" +
+              "from table11 t1\n" +
+              "right outer join table12 t2 \n" +
+              "on t1.id = t2.id and (concat(t1.name, cast(t2.id as TEXT)) = 'table11-11' or concat(t1.name, cast(t2.id as TEXT)) = 'table11-33')\n" +
+              "right outer join table13 t3\n" +
+              "on t1.id = t3.id "
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "null,null,null,2\n" +
+              "null,null,null,3\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  @Test
+  public final void testFullOuterJoinPredicationCaseByCase1() throws Exception {
+    createOuterJoinTestTable();
+
+    try {
+      ResultSet res = executeString(
+          "select t1.id, t1.name, t3.id, t4.id\n" +
+              "from table11 t1\n" +
+              "full outer join table13 t3\n" +
+              "on t1.id = t3.id\n" +
+              "full outer join table14 t4\n" +
+              "on t3.id = t4.id \n" +
+              "order by t4.id"
+      );
+
+      String expected =
+          "id,name,id,id\n" +
+              "-------------------------------\n" +
+              "null,null,null,1\n" +
+              "2,table11-2,2,2\n" +
+              "3,table11-3,3,3\n" +
+              "null,null,null,4\n" +
+              "1,table11-1,null,null\n";
+
+      String result = resultSetToString(res);
+
+      assertEquals(expected, result);
+    } finally {
+      dropOuterJoinTestTable();
+    }
+  }
+
+  private void createOuterJoinTestTable() throws Exception {
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    String[] data = new String[]{ "1|table11-1", "2|table11-2", "3|table11-3" };
+    TajoTestingCluster.createTable("table11", schema, tableOptions, data);
+
+    schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    data = new String[]{ "1|table12-1" };
+    TajoTestingCluster.createTable("table12", schema, tableOptions, data);
+
+    schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    data = new String[]{"2|table13-2", "3|table13-3" };
+    TajoTestingCluster.createTable("table13", schema, tableOptions, data);
+
+    schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    data = new String[]{"1|table14-1", "2|table14-2", "3|table14-3", "4|table14-4" };
+    TajoTestingCluster.createTable("table14", schema, tableOptions, data);
+
+    schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    data = new String[]{};
+    TajoTestingCluster.createTable("table15", schema, tableOptions, data);
+  }
+
+  private void dropOuterJoinTestTable() throws Exception {
+    executeString("DROP TABLE table11 PURGE;");
+    executeString("DROP TABLE table12 PURGE;");
+    executeString("DROP TABLE table13 PURGE;");
+    executeString("DROP TABLE table14 PURGE;");
+    executeString("DROP TABLE table15 PURGE;");
+  }
+
+  @Test
+  public void testDifferentTypesJoinCondition() throws Exception {
+    // select * from table20 t3 join table21 t4 on t3.id = t4.id;
+    executeDDL("table1_int8_ddl.sql", "table1", "table20");
+    executeDDL("table1_int4_ddl.sql", "table1", "table21");
+    try {
+      ResultSet res = executeQuery();
+      assertResultSet(res);
+      cleanupQuery(res);
+    } finally {
+      executeString("DROP TABLE table20");
+      executeString("DROP TABLE table21");
+    }
+  }
+
+  @Test
+  public void testComplexJoinCondition1() throws Exception {
+    // select n1.n_nationkey, n1.n_name, n2.n_name  from nation n1 join nation n2 on n1.n_name = upper(n2.n_name);
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition2() throws Exception {
+    // select n1.n_nationkey, n1.n_name, upper(n2.n_name) name from nation n1 join nation n2
+    // on n1.n_name = upper(n2.n_name);
+
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition3() throws Exception {
+    // select n1.n_nationkey, n1.n_name, n2.n_name from nation n1 join nation n2 on lower(n1.n_name) = lower(n2.n_name);
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition4() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition5() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition6() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testComplexJoinCondition7() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testFullOuterJoinWithEmptyIntermediateData() throws Exception {
+    ResultSet res = executeString(
+        "select a.l_orderkey \n" +
+            "from (select * from lineitem where l_orderkey < 0) a\n" +
+            "full outer join (select * from lineitem where l_orderkey < 0) b\n" +
+            "on a.l_orderkey = b.l_orderkey"
+    );
+
+    try {
+      String expected =
+          "l_orderkey\n" +
+              "-------------------------------\n";
+
+      assertEquals(expected, resultSetToString(res));
+    } finally {
+      cleanupQuery(res);
+    }
   }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestNullValues.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestNullValues.java
index e288066..f41b0ab 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestNullValues.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestNullValues.java
@@ -20,14 +20,15 @@
 
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.TajoTestingCluster;
-import org.apache.tajo.util.KeyValueSet;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.common.TajoDataTypes.Type;
 import org.apache.tajo.storage.StorageConstants;
+import org.apache.tajo.util.KeyValueSet;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.sql.ResultSet;
+import java.sql.SQLException;
 
 import static org.junit.Assert.*;
 
@@ -56,6 +57,7 @@
     ResultSet res = TajoTestingCluster
         .run(table, schemas, opts, new String[][]{data},
             "select * from nulltable1 where col3 is null");
+
     try {
       assertTrue(res.next());
       assertEquals(2, res.getInt(1));
@@ -159,4 +161,138 @@
       res.close();
     }
   }
+
+  @Test
+  public final void testResultSetNullSimpleQuery() throws Exception {
+    String tableName = "nulltable5";
+    ResultSet res = runNullTableQuery(tableName, "select col1, col2, col3, col4 from " + tableName);
+
+    try {
+      int numRows = 0;
+
+      String expected =
+              "null|a|1.0|true\n" +
+              "2|null|2.0|false\n" +
+              "3|c|null|true\n" +
+              "4|d|4.0|null";
+
+      String result = "";
+
+      String prefix = "";
+      while(res.next()) {
+        for (int i = 0; i < 4; i++) {
+          result += prefix + res.getObject(i + 1);
+          prefix = "|";
+        }
+        prefix = "\n";
+
+        assertResultSetNull(res, numRows, false, new int[]{1,2,3,4});
+        assertResultSetNull(res, numRows, true, new int[]{1,2,3,4});
+        numRows++;
+      }
+      assertEquals(4, numRows);
+      assertEquals(expected, result);
+    } finally {
+      res.close();
+    }
+  }
+
+  @Test
+  public final void testResultSetNull() throws Exception {
+    String tableName = "nulltable6";
+    String query = "select " +
+        "col1, coalesce(col1, 99999), " +
+        "col2, coalesce(col2, 'null_value'), " +
+        "col3, coalesce(col3, 99999.0)," +
+        "col4 " +
+        "from " + tableName;
+
+    ResultSet res = runNullTableQuery(tableName, query);
+
+    try {
+      int numRows = 0;
+      String expected =
+          "null|99999|a|a|1.0|1.0|true\n" +
+          "2|2|null|null_value|2.0|2.0|false\n" +
+          "3|3|c|c|null|99999.0|true\n" +
+          "4|4|d|d|4.0|4.0|null";
+
+      String result = "";
+
+      String prefix = "";
+      while(res.next()) {
+        for (int i = 0; i < 7; i++) {
+          result += prefix + res.getObject(i + 1);
+          prefix = "|";
+        }
+        prefix = "\n";
+
+        assertResultSetNull(res, numRows, false, new int[]{1,3,5,7});
+        assertResultSetNull(res, numRows, true, new int[]{1,3,5,7});
+        numRows++;
+      }
+      assertEquals(4, numRows);
+      assertEquals(expected, result);
+    } finally {
+      res.close();
+    }
+  }
+
+  private ResultSet runNullTableQuery(String tableName, String query) throws Exception {
+    String [] table = new String[] {tableName};
+    Schema schema = new Schema();
+    schema.addColumn("col1", Type.INT4);
+    schema.addColumn("col2", Type.TEXT);
+    schema.addColumn("col3", Type.FLOAT4);
+    schema.addColumn("col4", Type.BOOLEAN);
+    Schema [] schemas = new Schema[] {schema};
+    String [] data = {
+        "\\N|a|1.0|t",
+        "2|\\N|2.0|f",
+        "3|c|\\N|t",
+        "4|d|4.0|\\N"
+    };
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    ResultSet res = TajoTestingCluster
+        .run(table, schemas, tableOptions, new String[][]{data}, query);
+
+    return res;
+  }
+
+  private void assertResultSetNull(ResultSet res, int numRows, boolean useName, int[] nullIndex) throws SQLException {
+    if (numRows == 0) {
+      if (useName) {
+        assertEquals(0, res.getInt(res.getMetaData().getColumnName(nullIndex[numRows])));
+      } else {
+        assertEquals(0, res.getInt(nullIndex[numRows]));
+      }
+    }
+
+    if (numRows == 1) {
+      if (useName) {
+        assertNull(res.getString(res.getMetaData().getColumnName(nullIndex[numRows])));
+      } else {
+        assertNull(res.getString(nullIndex[numRows]));
+      };
+    }
+
+    if (numRows == 2) {
+      if (useName) {
+        assertEquals(0.0, res.getDouble(res.getMetaData().getColumnName(nullIndex[numRows])), 10);
+      } else {
+        assertEquals(0.0, res.getDouble(nullIndex[numRows]), 10);
+      }
+    }
+
+    if (numRows == 3) {
+      if (useName) {
+        assertEquals(false, res.getBoolean(res.getMetaData().getColumnName(nullIndex[numRows])));
+      } else {
+        assertEquals(false, res.getBoolean(nullIndex[numRows]));
+      }
+    }
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSelectQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSelectQuery.java
index 815994b..639c3ef 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSelectQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSelectQuery.java
@@ -21,17 +21,25 @@
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.QueryTestCaseBase;
 import org.apache.tajo.TajoConstants;
+import org.apache.tajo.TajoProtos.QueryState;
 import org.apache.tajo.TajoTestingCluster;
 import org.apache.tajo.catalog.CatalogService;
+import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableDesc;
+import org.apache.tajo.client.QueryStatus;
+import org.apache.tajo.common.TajoDataTypes.Type;
+import org.apache.tajo.conf.TajoConf.ConfVars;
+import org.apache.tajo.engine.utils.test.ErrorInjectionRewriter;
+import org.apache.tajo.jdbc.TajoResultSet;
+import org.apache.tajo.storage.StorageConstants;
+import org.apache.tajo.util.KeyValueSet;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.sql.ResultSet;
 
 import static org.apache.tajo.TajoConstants.DEFAULT_DATABASE_NAME;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 
 @Category(IntegrationTest.class)
 public class TestSelectQuery extends QueryTestCaseBase {
@@ -303,6 +311,7 @@
     cleanupQuery(res);
   }
 
+  @Test
   public final void testDatabaseRef() throws Exception {
     if (!testingCluster.isHCatalogStoreRunning()) {
       executeString("CREATE DATABASE \"TestSelectQuery\"").close();
@@ -323,4 +332,115 @@
       executeString("DROP DATABASE \"TestSelectQuery\"").close();
     }
   }
+
+  @Test
+  public final void testSumIntOverflow() throws Exception {
+    // Test data's min value is 17 and number of rows is 5.
+    // 25264513 = 2147483647/17/5
+    // result is 116,848,374,845 ==> int overflow
+    // select sum(cast(l_quantity * 25264513 as INT4)) from lineitem where l_quantity > 0;
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testSumFloatOverflow() throws Exception {
+    // Test data's min value is 21168.23 and number of rows is 5.
+    // 3.21506374375027E33 = 3.40282346638529E38/21168/ 5
+    // result is 6.838452478692677E38 ==> float4 overflow
+    // select sum(cast(L_EXTENDEDPRICE * 3.21506374375027E33 as FLOAT4)) from lineitem where l_quantity > 0;
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testQueryMasterTaskInitError() throws Exception {
+    // In this testcase we can check that a TajoClient receives QueryMasterTask's init error message.
+    testingCluster.setAllWorkersConfValue("tajo.plan.rewriter.classes",
+        ErrorInjectionRewriter.class.getCanonicalName());
+
+    try {
+      // If client can't receive error status, thread runs forever.
+      Thread t = new Thread() {
+        public void run() {
+          try {
+            TajoResultSet res = (TajoResultSet) client.executeQueryAndGetResult("select l_orderkey from lineitem");
+            QueryStatus status = client.getQueryStatus(res.getQueryId());
+            assertEquals(QueryState.QUERY_ERROR, status.getState());
+            assertEquals(NullPointerException.class.getName(), status.getErrorMessage());
+            cleanupQuery(res);
+          } catch (Exception e) {
+            fail(e.getMessage());
+          }
+        }
+      };
+
+      t.start();
+
+      for (int i = 0; i < 10; i++) {
+        Thread.sleep(1 * 1000);
+        if (!t.isAlive()) {
+          break;
+        }
+      }
+
+      // If query runs more than 10 secs, test is fail.
+      assertFalse(t.isAlive());
+    } finally {
+      testingCluster.setAllWorkersConfValue("tajo.plan.rewriter.classes", "");
+    }
+  }
+
+  @Test
+  public final void testNowInMultipleTasks() throws Exception {
+    KeyValueSet tableOptions = new KeyValueSet();
+    tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+    tableOptions.put(StorageConstants.CSVFILE_NULL, "\\\\N");
+
+    Schema schema = new Schema();
+    schema.addColumn("id", Type.INT4);
+    schema.addColumn("name", Type.TEXT);
+    String[] data = new String[]{ "1|table11-1", "2|table11-2", "3|table11-3", "4|table11-4", "5|table11-5" };
+    TajoTestingCluster.createTable("table11", schema, tableOptions, data, 2);
+
+    try {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname, "2");
+
+      ResultSet res = executeString("select concat(substr(to_char(now(),'yyyymmddhh24miss'), 1, 14), 'aaa'), sleep(1) from table11");
+
+      String nowValue = null;
+      int numRecords = 0;
+      while (res.next()) {
+        String currentNowValue = res.getString(1);
+        if (nowValue != null) {
+          assertTrue(nowValue.equals(currentNowValue));
+        }
+        nowValue = currentNowValue;
+        numRecords++;
+      }
+      assertEquals(5, numRecords);
+
+      res.close();
+
+      res = executeString("select concat(substr(to_char(current_timestamp,'yyyymmddhh24miss'), 1, 14), 'aaa'), sleep(1) from table11");
+
+      nowValue = null;
+      numRecords = 0;
+      while (res.next()) {
+        String currentNowValue = res.getString(1);
+        if (nowValue != null) {
+          assertTrue(nowValue.equals(currentNowValue));
+        }
+        nowValue = currentNowValue;
+        numRecords++;
+      }
+      assertEquals(5, numRecords);
+    } finally {
+      testingCluster.setAllTajoDaemonConfValue(ConfVars.TESTCASE_MIN_TASK_NUM.varname,
+          ConfVars.TESTCASE_MIN_TASK_NUM.defaultVal);
+      executeString("DROP TABLE table11 PURGE");
+    }
+  }
 }
\ No newline at end of file
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSortQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSortQuery.java
index 2f5a755..0b1831c 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSortQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestSortQuery.java
@@ -21,10 +21,12 @@
 import org.apache.tajo.IntegrationTest;
 import org.apache.tajo.QueryTestCaseBase;
 import org.apache.tajo.TajoConstants;
+import org.apache.tajo.conf.TajoConf;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.sql.ResultSet;
+import java.util.TimeZone;
 
 @Category(IntegrationTest.class)
 public class TestSortQuery extends QueryTestCaseBase {
@@ -117,13 +119,21 @@
   public final void testSortWithDate() throws Exception {
     // skip this test if catalog uses HCatalogStore.
     // It is because HCatalogStore does not support Time data type.
-    if (!testingCluster.isHCatalogStoreRunning()) {
-      // create external table table1 (col1 timestamp, col2 date, col3 time) ...
-      executeDDL("create_table_with_date_ddl.sql", "table1");
+    TimeZone oldTimeZone = TajoConf.setCurrentTimeZone(TimeZone.getTimeZone("UTC"));
+    TimeZone systemOldTimeZone = TimeZone.getDefault();
+    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+    try {
+      if (!testingCluster.isHCatalogStoreRunning()) {
+        // create external table table1 (col1 timestamp, col2 date, col3 time) ...
+        executeDDL("create_table_with_date_ddl.sql", "table1");
 
-      ResultSet res = executeQuery();
-      assertResultSet(res);
-      cleanupQuery(res);
+        ResultSet res = executeQuery();
+        assertResultSet(res);
+        cleanupQuery(res);
+      }
+    } finally {
+      TajoConf.setCurrentTimeZone(oldTimeZone);
+      TimeZone.setDefault(systemOldTimeZone);
     }
   }
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTablePartitions.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTablePartitions.java
index 0ec7de0..8c989b5 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTablePartitions.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTablePartitions.java
@@ -31,6 +31,7 @@
 import org.apache.tajo.catalog.CatalogService;
 import org.apache.tajo.catalog.CatalogUtil;
 import org.apache.tajo.catalog.TableDesc;
+import org.apache.tajo.ipc.ClientProtos;
 import org.junit.Test;
 
 import java.io.IOException;
@@ -59,7 +60,8 @@
     assertEquals(3, catalog.getTableDesc(DEFAULT_DATABASE_NAME, tableName).getLogicalSchema().size());
 
     res = testBase.execute(
-        "insert overwrite into " + tableName + " select l_orderkey, l_partkey, l_quantity from lineitem");
+        "insert overwrite into " + tableName + " select l_orderkey, l_partkey, " +
+            "l_quantity from lineitem");
     res.close();
   }
 
@@ -486,4 +488,83 @@
     assertFalse(res.next());
     res.close();
   }
+
+  @Test
+  public final void testColumnPartitionedTableWithSmallerExpressions1() throws Exception {
+    String tableName = CatalogUtil.normalizeIdentifier("testColumnPartitionedTableWithSmallerExpressions1");
+    ResultSet res = executeString(
+        "create table " + tableName + " (col1 int4, col2 int4, null_col int4) partition by column(key float8) ");
+    res.close();
+
+    assertTrue(catalog.existsTable(DEFAULT_DATABASE_NAME, tableName));
+
+    ClientProtos.SubmitQueryResponse response = client.executeQuery("insert overwrite into " + tableName
+        + " select l_orderkey, l_partkey from lineitem");
+
+    assertTrue(response.hasErrorMessage());
+    assertEquals(response.getErrorMessage(), "INSERT has smaller expressions than target columns\n");
+
+    res = executeFile("case14.sql");
+    assertResultSet(res, "case14.result");
+    res.close();
+  }
+
+  @Test
+  public final void testColumnPartitionedTableWithSmallerExpressions2() throws Exception {
+    String tableName = CatalogUtil.normalizeIdentifier("testColumnPartitionedTableWithSmallerExpressions2");
+    ResultSet res = executeString(
+        "create table " + tableName + " (col1 int4, col2 int4, null_col int4) partition by column(key float8) ");
+    res.close();
+
+    assertTrue(catalog.existsTable(DEFAULT_DATABASE_NAME, tableName));
+
+    ClientProtos.SubmitQueryResponse response = client.executeQuery("insert overwrite into " + tableName
+        + " select l_returnflag , l_orderkey, l_partkey from lineitem");
+
+    assertTrue(response.hasErrorMessage());
+    assertEquals(response.getErrorMessage(), "INSERT has smaller expressions than target columns\n");
+
+    res = executeFile("case15.sql");
+    assertResultSet(res, "case15.result");
+    res.close();
+  }
+
+
+  @Test
+  public final void testColumnPartitionedTableWithSmallerExpressions3() throws Exception {
+    ResultSet res = executeString("create database testinsertquery1;");
+    res.close();
+    res = executeString("create database testinsertquery2;");
+    res.close();
+
+    res = executeString("create table testinsertquery1.table1 " +
+        "(col1 int4, col2 int4, col3 float8)");
+    res.close();
+
+    res = executeString("create table testinsertquery2.table1 " +
+        "(col1 int4, col2 int4, col3 float8)");
+    res.close();
+
+    CatalogService catalog = testingCluster.getMaster().getCatalog();
+    assertTrue(catalog.existsTable("testinsertquery1", "table1"));
+    assertTrue(catalog.existsTable("testinsertquery2", "table1"));
+
+    res = executeString("insert overwrite into testinsertquery1.table1 " +
+        "select l_orderkey, l_partkey, l_quantity from default.lineitem;");
+    res.close();
+
+    TableDesc desc = catalog.getTableDesc("testinsertquery1", "table1");
+    if (!testingCluster.isHCatalogStoreRunning()) {
+      assertEquals(5, desc.getStats().getNumRows().intValue());
+    }
+
+    res = executeString("insert overwrite into testinsertquery2.table1 " +
+        "select col1, col2, col3 from testinsertquery1.table1;");
+    res.close();
+
+    desc = catalog.getTableDesc("testinsertquery2", "table1");
+    if (!testingCluster.isHCatalogStoreRunning()) {
+      assertEquals(5, desc.getStats().getNumRows().intValue());
+    }
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTruncateTable.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTruncateTable.java
new file mode 100644
index 0000000..455213b
--- /dev/null
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestTruncateTable.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.engine.query;
+
+import org.apache.tajo.IntegrationTest;
+import org.apache.tajo.QueryTestCaseBase;
+import org.apache.tajo.TajoConstants;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+import java.sql.ResultSet;
+
+import static org.junit.Assert.assertEquals;
+
+@Category(IntegrationTest.class)
+public class TestTruncateTable extends QueryTestCaseBase {
+  public TestTruncateTable() {
+    super(TajoConstants.DEFAULT_DATABASE_NAME);
+  }
+  @Test
+  public final void testTruncateTable() throws Exception {
+    try {
+      ResultSet res = executeFile("table1_ddl.sql");
+      res.close();
+      assertTableExists("truncate_table1");
+
+      res = executeString("select * from truncate_table1");
+      int numRows = 0;
+      while (res.next()) {
+        numRows++;
+      }
+      assertEquals(5, numRows);
+      res.close();
+
+      executeString("truncate table truncate_table1");
+      assertTableExists("truncate_table1");
+
+      res = executeString("select * from truncate_table1");
+      numRows = 0;
+      while (res.next()) {
+        numRows++;
+      }
+      assertEquals(0, numRows);
+      res.close();
+    } finally {
+      executeString("DROP TABLE truncate_table1 PURGE");
+    }
+  }
+
+
+  /*
+  Currently TajoClient can't throw exception when plan error.
+  The following test cast should be uncommented after https://issues.apache.org/jira/browse/TAJO-762
+
+  @Test
+  public final void testTruncateExternalTable() throws Exception {
+    try {
+      List<String> createdNames = executeDDL("table2_ddl.sql", "truncate_table2", "truncate_table2");
+      assertTableExists(createdNames.get(0));
+
+      ResultSet res = executeString("select * from truncate_table2");
+      int numRows = 0;
+      while (res.next()) {
+        numRows++;
+      }
+      assertEquals(4, numRows);
+      res.close();
+
+      executeString("truncate table truncate_table2");
+
+      fail("Can't truncate external table");
+    } catch (Exception e) {
+      // succeeded
+      assertTableExists("truncate_table2");
+
+      ResultSet res = executeString("select * from truncate_table2");
+      int numRows = 0;
+      while (res.next()) {
+        numRows++;
+      }
+      assertEquals(4, numRows);
+      res.close();
+    } finally {
+      executeString("DROP TABLE truncate_table2 PURGE");
+    }
+  }
+  */
+}
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestUnionQuery.java b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestUnionQuery.java
index a54f670..3d292a4 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/query/TestUnionQuery.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/query/TestUnionQuery.java
@@ -26,6 +26,8 @@
 
 import java.sql.ResultSet;
 
+import static org.junit.Assert.assertEquals;
+
 /*
  * Notations
  * - S - select
@@ -133,9 +135,319 @@
   }
 
   @Test
+  public final void testUnion11() throws Exception {
+    // test filter pushdown
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnion12() throws Exception {
+    // test filter pushdown
+    // with subquery in union query
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnion13() throws Exception {
+    // test filter pushdown
+    // with subquery in union query
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnion14() throws Exception {
+    // test filter pushdown
+    // with group by subquery in union query
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnion15() throws Exception {
+    // test filter pushdown
+    // with group by out of union query and join in union query
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnion16() throws Exception {
+    // test filter pushdown
+    // with count distinct out of union query and join in union query
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
   public final void testUnionWithSameAliasNames() throws Exception {
     ResultSet res = executeQuery();
     assertResultSet(res);
     cleanupQuery(res);
   }
+
+  @Test
+  public final void testUnionWithDifferentAlias() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnionWithDifferentAliasAndFunction() throws Exception {
+    ResultSet res = executeQuery();
+    assertResultSet(res);
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testLeftUnionWithJoin() throws Exception {
+    // https://issues.apache.org/jira/browse/TAJO-881
+    ResultSet res = executeString(
+        "select * from ( " +
+        "  select a.id, b.c_name, a.code from ( " +
+        "    select l_orderkey as id, 'lineitem' as code from lineitem " +
+        "    union all " +
+        "    select o_orderkey as id, 'order' as code from orders " +
+         "  ) a " +
+         "  join customer b on a.id = b.c_custkey" +
+        ") c order by id, code"
+    );
+
+    String expected =
+        "id,c_name,code\n" +
+            "-------------------------------\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000001,order\n" +
+            "2,Customer#000000002,lineitem\n" +
+            "2,Customer#000000002,order\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000003,order\n";
+
+    assertEquals(expected, resultSetToString(res));
+
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testRightUnionWithJoin() throws Exception {
+    // https://issues.apache.org/jira/browse/TAJO-881
+    ResultSet res = executeString(
+            "select * from ( " +
+            "  select a.id, b.c_name, a.code from customer b " +
+            "  join ( " +
+            "    select l_orderkey as id, 'lineitem' as code from lineitem " +
+            "    union all " +
+            "    select o_orderkey as id, 'order' as code from orders " +
+            "  ) a on a.id = b.c_custkey" +
+            ") c order by id, code"
+    );
+
+    String expected =
+        "id,c_name,code\n" +
+            "-------------------------------\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000001,order\n" +
+            "2,Customer#000000002,lineitem\n" +
+            "2,Customer#000000002,order\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000003,order\n";
+
+    assertEquals(expected, resultSetToString(res));
+
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testAllUnionWithJoin() throws Exception {
+    // https://issues.apache.org/jira/browse/TAJO-881
+    ResultSet res = executeString(
+        "select * from ( " +
+        "  select a.id, a.code as code, b.name, b.code as code2 from ( " +
+        "    select l_orderkey as id, 'lineitem' as code from lineitem " +
+        "    union all " +
+        "    select o_orderkey as id, 'order' as code from orders " +
+        "  ) a " +
+        "  join ( " +
+        "    select c_custkey as id, c_name as name, 'customer' as code from customer " +
+        "    union all " +
+        "    select p_partkey as id, p_name as name, 'part' as code from part " +
+        "  ) b on a.id = b.id" +
+        ") c order by id, code, code2"
+    );
+
+    String expected =
+        "id,code,name,code2\n" +
+            "-------------------------------\n" +
+            "1,lineitem,Customer#000000001,customer\n" +
+            "1,lineitem,Customer#000000001,customer\n" +
+            "1,lineitem,goldenrod lavender spring chocolate lace,part\n" +
+            "1,lineitem,goldenrod lavender spring chocolate lace,part\n" +
+            "1,order,Customer#000000001,customer\n" +
+            "1,order,goldenrod lavender spring chocolate lace,part\n" +
+            "2,lineitem,Customer#000000002,customer\n" +
+            "2,lineitem,blush thistle blue yellow saddle,part\n" +
+            "2,order,Customer#000000002,customer\n" +
+            "2,order,blush thistle blue yellow saddle,part\n" +
+            "3,lineitem,Customer#000000003,customer\n" +
+            "3,lineitem,Customer#000000003,customer\n" +
+            "3,lineitem,spring green yellow purple cornsilk,part\n" +
+            "3,lineitem,spring green yellow purple cornsilk,part\n" +
+            "3,order,Customer#000000003,customer\n" +
+            "3,order,spring green yellow purple cornsilk,part\n";
+
+    assertEquals(expected, resultSetToString(res));
+
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testUnionWithCrossJoin() throws Exception {
+    // https://issues.apache.org/jira/browse/TAJO-881
+    ResultSet res = executeString(
+        "select * from ( " +
+        "  select a.id, b.c_name, a.code from ( " +
+            "    select l_orderkey as id, 'lineitem' as code from lineitem " +
+            "    union all " +
+            "    select o_orderkey as id, 'order' as code from orders " +
+            "  ) a, " +
+            "  customer b " +
+        ") c order by id, code, c_name"
+    );
+
+    String expected =
+        "id,c_name,code\n" +
+            "-------------------------------\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000001,lineitem\n" +
+            "1,Customer#000000002,lineitem\n" +
+            "1,Customer#000000002,lineitem\n" +
+            "1,Customer#000000003,lineitem\n" +
+            "1,Customer#000000003,lineitem\n" +
+            "1,Customer#000000004,lineitem\n" +
+            "1,Customer#000000004,lineitem\n" +
+            "1,Customer#000000005,lineitem\n" +
+            "1,Customer#000000005,lineitem\n" +
+            "1,Customer#000000001,order\n" +
+            "1,Customer#000000002,order\n" +
+            "1,Customer#000000003,order\n" +
+            "1,Customer#000000004,order\n" +
+            "1,Customer#000000005,order\n" +
+            "2,Customer#000000001,lineitem\n" +
+            "2,Customer#000000002,lineitem\n" +
+            "2,Customer#000000003,lineitem\n" +
+            "2,Customer#000000004,lineitem\n" +
+            "2,Customer#000000005,lineitem\n" +
+            "2,Customer#000000001,order\n" +
+            "2,Customer#000000002,order\n" +
+            "2,Customer#000000003,order\n" +
+            "2,Customer#000000004,order\n" +
+            "2,Customer#000000005,order\n" +
+            "3,Customer#000000001,lineitem\n" +
+            "3,Customer#000000001,lineitem\n" +
+            "3,Customer#000000002,lineitem\n" +
+            "3,Customer#000000002,lineitem\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000003,lineitem\n" +
+            "3,Customer#000000004,lineitem\n" +
+            "3,Customer#000000004,lineitem\n" +
+            "3,Customer#000000005,lineitem\n" +
+            "3,Customer#000000005,lineitem\n" +
+            "3,Customer#000000001,order\n" +
+            "3,Customer#000000002,order\n" +
+            "3,Customer#000000003,order\n" +
+            "3,Customer#000000004,order\n" +
+            "3,Customer#000000005,order\n";
+
+    assertEquals(expected, resultSetToString(res));
+
+    cleanupQuery(res);
+  }
+
+  @Test
+  public final void testThreeJoinInUnion() throws Exception {
+    // https://issues.apache.org/jira/browse/TAJO-881
+    ResultSet res = executeString(
+      "select orders.o_orderkey \n" +
+          "from orders\n" +
+          "join lineitem on orders.o_orderkey = lineitem.l_orderkey\n" +
+          "join customer on orders.o_custkey =  customer.c_custkey\n" +
+          "union all \n" +
+          "select nation.n_nationkey from nation"
+    );
+    String expected =
+        "o_orderkey\n" +
+            "-------------------------------\n" +
+            "1\n" +
+            "1\n" +
+            "2\n" +
+            "3\n" +
+            "3\n" +
+            "0\n" +
+            "1\n" +
+            "2\n" +
+            "3\n" +
+            "4\n" +
+            "5\n" +
+            "6\n" +
+            "7\n" +
+            "8\n" +
+            "9\n" +
+            "10\n" +
+            "11\n" +
+            "12\n" +
+            "13\n" +
+            "14\n" +
+            "15\n" +
+            "16\n" +
+            "17\n" +
+            "18\n" +
+            "19\n" +
+            "20\n" +
+            "21\n" +
+            "22\n" +
+            "23\n" +
+            "24\n";
+
+    assertEquals(expected, resultSetToString(res));
+
+    cleanupQuery(res);
+  }
+
+  @Test
+  public void testUnionCaseOfFirstEmptyAndJoin() throws Exception {
+    ResultSet res = executeString(
+        "select a.c_custkey, b.c_custkey from " +
+            "  (select c_custkey, c_nationkey from customer where c_nationkey < 0 " +
+            "   union all " +
+            "   select c_custkey, c_nationkey from customer where c_nationkey > 0 " +
+            ") a " +
+            "left outer join customer b " +
+            "on a.c_custkey = b.c_custkey "
+    );
+
+    String expected =
+        "c_custkey,c_custkey\n" +
+            "-------------------------------\n" +
+            "1,1\n" +
+            "2,2\n" +
+            "3,3\n" +
+            "4,4\n" +
+            "5,5\n";
+
+    assertEquals(expected, resultSetToString(res));
+    res.close();
+  }
 }
\ No newline at end of file
diff --git a/tajo-core/src/test/java/org/apache/tajo/engine/util/TestTupleCache.java b/tajo-core/src/test/java/org/apache/tajo/engine/util/TestTupleCache.java
index 1cbbdf9..3d2f307 100644
--- a/tajo-core/src/test/java/org/apache/tajo/engine/util/TestTupleCache.java
+++ b/tajo-core/src/test/java/org/apache/tajo/engine/util/TestTupleCache.java
@@ -53,7 +53,7 @@
     ExecutionBlockId ebId = QueryIdFactory.newExecutionBlockId(
         QueryIdFactory.newQueryId(System.currentTimeMillis(), 0));
 
-    TupleCacheKey cacheKey = new TupleCacheKey(ebId.toString(), "TestTable");
+    TupleCacheKey cacheKey = new TupleCacheKey(ebId.toString(), "TestTable", "test");
     TupleCache tupleCache = TupleCache.getInstance();
 
     assertFalse(tupleCache.isBroadcastCacheReady(cacheKey));
diff --git a/tajo-core/src/test/java/org/apache/tajo/jdbc/TestResultSet.java b/tajo-core/src/test/java/org/apache/tajo/jdbc/TestResultSet.java
index 4477fa5..fdbca61 100644
--- a/tajo-core/src/test/java/org/apache/tajo/jdbc/TestResultSet.java
+++ b/tajo-core/src/test/java/org/apache/tajo/jdbc/TestResultSet.java
@@ -36,14 +36,16 @@
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.datum.DatumFactory;
 import org.apache.tajo.storage.*;
+import org.apache.tajo.util.KeyValueSet;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.io.IOException;
-import java.sql.ResultSetMetaData;
-import java.sql.SQLException;
+import java.sql.*;
+import java.util.Calendar;
+import java.util.TimeZone;
 
 import static org.junit.Assert.*;
 
@@ -124,4 +126,90 @@
     assertEquals(10000, i);
     assertTrue(rs.isAfterLast());
   }
+
+  @Test
+  public void testDateTimeType() throws Exception {
+    TimeZone tajoCurrentTimeZone = TajoConf.getCurrentTimeZone();
+    TajoConf.setCurrentTimeZone(TimeZone.getTimeZone("UTC"));
+
+    TimeZone systemCurrentTimeZone = TimeZone.getDefault();
+    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+
+    ResultSet res = null;
+    try {
+      String tableName = "datetimetable";
+      String query = "select col1, col2, col3 from " + tableName;
+
+      String [] table = new String[] {tableName};
+      Schema schema = new Schema();
+      schema.addColumn("col1", Type.DATE);
+      schema.addColumn("col2", Type.TIME);
+      schema.addColumn("col3", Type.TIMESTAMP);
+      Schema [] schemas = new Schema[] {schema};
+      String [] data = {
+          "2014-01-01|01:00:00|2014-01-01 01:00:00"
+      };
+      KeyValueSet tableOptions = new KeyValueSet();
+      tableOptions.put(StorageConstants.CSVFILE_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
+
+      res = TajoTestingCluster
+          .run(table, schemas, tableOptions, new String[][]{data}, query);
+
+      assertTrue(res.next());
+
+      Date date = res.getDate(1);
+      assertNotNull(date);
+      assertEquals(Date.valueOf("2014-01-01"), date);
+
+      date = res.getDate("col1");
+      assertNotNull(date);
+      assertEquals(Date.valueOf("2014-01-01"), date);
+
+      Time time = res.getTime(2);
+      assertNotNull(time);
+      assertEquals(Time.valueOf("01:00:00"), time);
+
+      time = res.getTime("col2");
+      assertNotNull(time);
+      assertEquals(Time.valueOf("01:00:00"), time);
+
+      Timestamp timestamp = res.getTimestamp(3);
+      assertNotNull(timestamp);
+      assertEquals(Timestamp.valueOf("2014-01-01 01:00:00"), timestamp);
+
+      timestamp = res.getTimestamp("col3");
+      assertNotNull(timestamp);
+      assertEquals(Timestamp.valueOf("2014-01-01 01:00:00"), timestamp);
+
+      // assert with timezone
+      Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT+9"));
+      date = res.getDate(1, cal);
+      assertNotNull(date);
+      assertEquals("2014-01-01", date.toString());
+
+      date = res.getDate("col1", cal);
+      assertNotNull(date);
+      assertEquals("2014-01-01", date.toString());
+
+      time = res.getTime(2, cal);
+      assertNotNull(time);
+      assertEquals("10:00:00", time.toString());
+
+      time = res.getTime("col2", cal);
+      assertNotNull(time);
+      assertEquals("10:00:00", time.toString());
+
+      timestamp = res.getTimestamp(3, cal);
+      assertNotNull(timestamp);
+      assertEquals("2014-01-01 10:00:00.0", timestamp.toString());
+
+      timestamp = res.getTimestamp("col3", cal);
+      assertNotNull(timestamp);
+      assertEquals("2014-01-01 10:00:00.0", timestamp.toString());
+    } finally {
+      TajoConf.setCurrentTimeZone(tajoCurrentTimeZone);
+      TimeZone.setDefault(systemCurrentTimeZone);
+      res.close();
+    }
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/master/querymaster/TestQueryUnitStatusUpdate.java b/tajo-core/src/test/java/org/apache/tajo/master/querymaster/TestQueryUnitStatusUpdate.java
index fa89dc3..c52b277 100644
--- a/tajo-core/src/test/java/org/apache/tajo/master/querymaster/TestQueryUnitStatusUpdate.java
+++ b/tajo-core/src/test/java/org/apache/tajo/master/querymaster/TestQueryUnitStatusUpdate.java
@@ -86,9 +86,13 @@
 
       res = executeQuery();
 
-      long[] expectedNumRows = new long[]{7, 2, 2, 2, 7, 2, 2, 2};
-      long[] expectedNumBytes = new long[]{63, 34, 34, 18, 109, 34, 34, 18};
-      long[] expectedReadBytes = new long[]{63, 0, 34, 0, 109, 0, 34, 0};
+      String actualResult = resultSetToString(res);
+      System.out.println(actualResult);
+
+      // first stage's num rows = (left: 1 , right: 2 (filtered)) * 5 (tasks)
+      long[] expectedNumRows = new long[]{15, 2, 2, 2, 7, 2, 2, 2};
+      long[] expectedNumBytes = new long[]{45, 34, 34, 18, 109, 34, 34, 18};
+      long[] expectedReadBytes = new long[]{45, 0, 34, 0, 109, 0, 34, 0};
 
       assertStatus(2, expectedNumRows, expectedNumBytes, expectedReadBytes);
     } finally {
@@ -109,6 +113,7 @@
 
     res = testBase.execute(
         "insert overwrite into " + tableName + " select l_orderkey, l_partkey, l_quantity from lineitem");
+
     res.close();
   }
 
diff --git a/tajo-core/src/test/java/org/apache/tajo/master/rm/TestTajoResourceManager.java b/tajo-core/src/test/java/org/apache/tajo/master/rm/TestTajoResourceManager.java
index 34deb29..09d674a 100644
--- a/tajo-core/src/test/java/org/apache/tajo/master/rm/TestTajoResourceManager.java
+++ b/tajo-core/src/test/java/org/apache/tajo/master/rm/TestTajoResourceManager.java
@@ -34,8 +34,7 @@
 import java.util.concurrent.TimeUnit;
 
 import static org.apache.tajo.ipc.TajoResourceTrackerProtocol.NodeHeartbeat;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.*;
 
 public class TestTajoResourceManager {
   private final PrimitiveProtos.BoolProto BOOL_TRUE = PrimitiveProtos.BoolProto.newBuilder().setValue(true).build();
@@ -375,4 +374,80 @@
       }
     }
   }
+
+  @Test
+  public void testDiskResourceWithStoppedQuery() throws Exception {
+    TajoWorkerResourceManager tajoWorkerResourceManager = null;
+
+    try {
+      tajoWorkerResourceManager = initResourceManager(false);
+
+      final float minDiskSlots = 1.0f;
+      final float maxDiskSlots = 2.0f;
+      int memoryMB = 256;
+
+      QueryId queryId = QueryIdFactory.newQueryId(queryIdTime, 3);
+
+      WorkerResourceAllocationRequest request = WorkerResourceAllocationRequest.newBuilder()
+          .setResourceRequestPriority(ResourceRequestPriority.DISK)
+          .setNumContainers(60)
+          .setQueryId(queryId.getProto())
+          .setMaxDiskSlotPerContainer(maxDiskSlots)
+          .setMinDiskSlotPerContainer(minDiskSlots)
+          .setMinMemoryMBPerContainer(memoryMB)
+          .setMaxMemoryMBPerContainer(memoryMB)
+          .build();
+
+      final CountDownLatch barrier = new CountDownLatch(1);
+      final List<YarnProtos.ContainerIdProto> containerIds = new ArrayList<YarnProtos.ContainerIdProto>();
+
+
+      RpcCallback<WorkerResourceAllocationResponse> callBack = new RpcCallback<WorkerResourceAllocationResponse>() {
+
+        @Override
+        public void run(WorkerResourceAllocationResponse response) {
+          TestTajoResourceManager.this.response = response;
+          barrier.countDown();
+        }
+      };
+
+      tajoWorkerResourceManager.getRMContext().getStoppedQueryIds().add(queryId);
+      tajoWorkerResourceManager.allocateWorkerResources(request, callBack);
+      assertFalse(barrier.await(3, TimeUnit.SECONDS));
+
+      assertNull(response);
+
+      // assert after callback
+      int totalUsedDisks = 0;
+      for(Worker worker: tajoWorkerResourceManager.getWorkers().values()) {
+        WorkerResource resource = worker.getResource();
+        //each worker allocated 3 container (2 disk slot = 2, 1 disk slot = 1)
+        assertEquals(5.0f, resource.getAvailableDiskSlots(), 0);
+        assertEquals(0, resource.getUsedDiskSlots(), 0);
+        assertEquals(0, resource.getUsedMemoryMB());
+
+        totalUsedDisks += resource.getUsedDiskSlots();
+      }
+
+      assertEquals(0, totalUsedDisks, 0);
+
+      for(YarnProtos.ContainerIdProto eachContainerId: containerIds) {
+        tajoWorkerResourceManager.releaseWorkerResource(eachContainerId);
+      }
+
+      for(Worker worker: tajoWorkerResourceManager.getWorkers().values()) {
+        WorkerResource resource = worker.getResource();
+        assertEquals(workerMemoryMB, resource.getAvailableMemoryMB());
+        assertEquals(0, resource.getUsedMemoryMB());
+
+        assertEquals(workerDiskSlots, resource.getAvailableDiskSlots(), 0);
+        assertEquals(0.0f, resource.getUsedDiskSlots(), 0);
+      }
+    } finally {
+      if (tajoWorkerResourceManager != null) {
+        tajoWorkerResourceManager.stop();
+      }
+    }
+  }
+
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/worker/TestFetcher.java b/tajo-core/src/test/java/org/apache/tajo/worker/TestFetcher.java
index 0c47320..c933294 100644
--- a/tajo-core/src/test/java/org/apache/tajo/worker/TestFetcher.java
+++ b/tajo-core/src/test/java/org/apache/tajo/worker/TestFetcher.java
@@ -22,6 +22,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.net.NetUtils;
+import org.apache.tajo.TajoProtos;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.rpc.RpcChannelFactory;
 import org.apache.tajo.util.CommonTestingUtil;
@@ -91,4 +92,33 @@
     assertEquals(0.45f, Task.adjustFetchProcess(10, 1), 0);
     assertEquals(0.5f, Task.adjustFetchProcess(10, 0), 0);
   }
+
+  @Test
+  public void testStatus() throws Exception {
+    Random rnd = new Random();
+    FileWriter writer = new FileWriter(INPUT_DIR + "data");
+    String data;
+    for (int i = 0; i < 100; i++) {
+      data = ""+rnd.nextInt();
+      writer.write(data);
+    }
+    writer.flush();
+    writer.close();
+
+    DataRetriever ret = new DirectoryRetriever(INPUT_DIR);
+    final HttpDataServer server = new HttpDataServer(
+        NetUtils.createSocketAddr("127.0.0.1:0"), ret);
+    server.start();
+    InetSocketAddress addr = server.getBindAddress();
+
+    URI uri = URI.create("http://127.0.0.1:"+addr.getPort() + "/data");
+    ClientSocketChannelFactory channelFactory = RpcChannelFactory.createClientChannelFactory("Fetcher", 1);
+
+    final Fetcher fetcher = new Fetcher(uri, new File(OUTPUT_DIR + "data"), channelFactory);
+    assertEquals(TajoProtos.FetcherState.FETCH_INIT, fetcher.getState());
+
+    fetcher.get();
+    assertEquals(TajoProtos.FetcherState.FETCH_FINISHED, fetcher.getState());
+    server.stop();
+  }
 }
diff --git a/tajo-core/src/test/java/org/apache/tajo/worker/TestHistory.java b/tajo-core/src/test/java/org/apache/tajo/worker/TestHistory.java
new file mode 100644
index 0000000..15ead84
--- /dev/null
+++ b/tajo-core/src/test/java/org/apache/tajo/worker/TestHistory.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.tajo.worker;
+
+import com.google.protobuf.ServiceException;
+import org.apache.hadoop.service.Service;
+import org.apache.tajo.QueryUnitAttemptId;
+import org.apache.tajo.TajoProtos;
+import org.apache.tajo.TajoTestingCluster;
+import org.apache.tajo.TpchTestBase;
+import org.apache.tajo.client.TajoClient;
+import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.master.TajoMaster;
+import org.apache.tajo.master.querymaster.QueryInProgress;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+
+import static org.junit.Assert.*;
+
+public class TestHistory {
+  private TajoTestingCluster cluster;
+  private TajoMaster master;
+  private TajoConf conf;
+  private TajoClient client;
+
+  @Before
+  public void setUp() throws Exception {
+    cluster = TpchTestBase.getInstance().getTestingCluster();
+    master = cluster.getMaster();
+    conf = cluster.getConfiguration();
+    client = new TajoClient(conf);
+  }
+
+  @After
+  public void tearDown() {
+    client.close();
+  }
+
+
+  @Test
+  public final void testTaskRunnerHistory() throws IOException, ServiceException, InterruptedException {
+    int beforeFinishedQueriesCount = master.getContext().getQueryJobManager().getFinishedQueries().size();
+    client.executeQueryAndGetResult("select sleep(1) from lineitem");
+
+    Collection<QueryInProgress> finishedQueries = master.getContext().getQueryJobManager().getFinishedQueries();
+    assertTrue(finishedQueries.size() > beforeFinishedQueriesCount);
+
+    TajoWorker worker = cluster.getTajoWorkers().get(0);
+    TaskRunnerManager taskRunnerManager = worker.getWorkerContext().getTaskRunnerManager();
+    assertNotNull(taskRunnerManager);
+
+
+    Collection<TaskRunnerHistory> histories = taskRunnerManager.getExecutionBlockHistories();
+    assertTrue(histories.size() > 0);
+
+    TaskRunnerHistory history = histories.iterator().next();
+    assertEquals(Service.STATE.STOPPED, history.getState());
+
+    assertEquals(history, new TaskRunnerHistory(history.getProto()));
+  }
+
+  @Test
+  public final void testTaskHistory() throws IOException, ServiceException, InterruptedException {
+    int beforeFinishedQueriesCount = master.getContext().getQueryJobManager().getFinishedQueries().size();
+    client.executeQueryAndGetResult("select sleep(1) from lineitem");
+
+    Collection<QueryInProgress> finishedQueries = master.getContext().getQueryJobManager().getFinishedQueries();
+    assertTrue(finishedQueries.size() > beforeFinishedQueriesCount);
+
+    TajoWorker worker = cluster.getTajoWorkers().get(0);
+    TaskRunnerManager taskRunnerManager = worker.getWorkerContext().getTaskRunnerManager();
+    assertNotNull(taskRunnerManager);
+
+
+    Collection<TaskRunnerHistory> histories = taskRunnerManager.getExecutionBlockHistories();
+    assertTrue(histories.size() > 0);
+
+    TaskRunnerHistory history = histories.iterator().next();
+
+    assertTrue(history.size() > 0);
+    assertEquals(Service.STATE.STOPPED, history.getState());
+
+    Map.Entry<QueryUnitAttemptId, TaskHistory> entry =
+        history.getTaskHistoryMap().entrySet().iterator().next();
+
+    QueryUnitAttemptId queryUnitAttemptId = entry.getKey();
+    TaskHistory taskHistory = entry.getValue();
+
+    assertEquals(TajoProtos.TaskAttemptState.TA_SUCCEEDED, taskHistory.getState());
+    assertEquals(queryUnitAttemptId, taskHistory.getQueryUnitAttemptId());
+  }
+}
diff --git a/tajo-core/src/test/java/org/apache/tajo/worker/TestRangeRetrieverHandler.java b/tajo-core/src/test/java/org/apache/tajo/worker/TestRangeRetrieverHandler.java
index b904bb1..5375749 100644
--- a/tajo-core/src/test/java/org/apache/tajo/worker/TestRangeRetrieverHandler.java
+++ b/tajo-core/src/test/java/org/apache/tajo/worker/TestRangeRetrieverHandler.java
@@ -41,6 +41,7 @@
 import org.apache.tajo.engine.planner.physical.PhysicalExec;
 import org.apache.tajo.engine.planner.physical.ProjectionExec;
 import org.apache.tajo.engine.planner.physical.RangeShuffleFileWriteExec;
+import org.apache.tajo.engine.query.QueryContext;
 import org.apache.tajo.storage.*;
 import org.apache.tajo.storage.RowStoreUtil.RowStoreEncoder;
 import org.apache.tajo.storage.fragment.FileFragment;
@@ -142,7 +143,8 @@
 
     FileFragment[] frags = StorageManager.splitNG(conf, "default.employee", employeeMeta, tableDir, Integer.MAX_VALUE);
 
-    TaskAttemptContext ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(),
+    TaskAttemptContext ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(),
         new FileFragment[] {frags[0]}, testDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(SORT_QUERY[0]);
@@ -265,7 +267,8 @@
     FileFragment[] frags = sm.splitNG(conf, "default.employee", meta, tablePath, Integer.MAX_VALUE);
 
     TaskAttemptContext
-        ctx = new TaskAttemptContext(conf, LocalTajoTestingUtility.newQueryUnitAttemptId(),
+        ctx = new TaskAttemptContext(conf, new QueryContext(),
+        LocalTajoTestingUtility.newQueryUnitAttemptId(),
         new FileFragment[] {frags[0]}, testDir);
     ctx.setEnforcer(new Enforcer());
     Expr expr = analyzer.parse(SORT_QUERY[1]);
diff --git a/tajo-core/src/test/resources/dataset/TestTruncateTable/truncate_table2/table2.tbl b/tajo-core/src/test/resources/dataset/TestTruncateTable/truncate_table2/table2.tbl
new file mode 100644
index 0000000..f99db9c
--- /dev/null
+++ b/tajo-core/src/test/resources/dataset/TestTruncateTable/truncate_table2/table2.tbl
@@ -0,0 +1,4 @@
+4|jkl1|7
+5|opq2|8
+6|stu3|9
+7|kkk4|10
diff --git a/tajo-core/src/test/resources/queries/TestCaseByCases/testTAJO418Case.sql b/tajo-core/src/test/resources/queries/TestCaseByCases/testTAJO418Case.sql
index b3ad48a..2e62923 100644
--- a/tajo-core/src/test/resources/queries/TestCaseByCases/testTAJO418Case.sql
+++ b/tajo-core/src/test/resources/queries/TestCaseByCases/testTAJO418Case.sql
@@ -8,7 +8,7 @@
   FROM
     lineitem
   WHERE
-    l_returnflag = 'K'
+    l_returnflag = 'N'
 
   UNION ALL
 
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testDistinctAggregation_case8.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testDistinctAggregation_case8.sql
new file mode 100644
index 0000000..ed8e363
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testDistinctAggregation_case8.sql
@@ -0,0 +1,10 @@
+select
+    lineitem.l_orderkey as l_orderkey,
+    lineitem.l_partkey as l_partkey,
+    count(distinct lineitem.l_partkey) as cnt1,
+    count(distinct lineitem.l_suppkey) as cnt2,
+    sum(lineitem.l_quantity) as sum1
+from
+    lineitem
+group by
+    lineitem.l_orderkey, lineitem.l_partkey
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData1.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData1.sql
new file mode 100644
index 0000000..af089df
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData1.sql
@@ -0,0 +1 @@
+select count(1) as unique_key from lineitem where l_orderkey = 1000;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData10.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData10.sql
new file mode 100644
index 0000000..98546db
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData10.sql
@@ -0,0 +1,3 @@
+select count(distinct l_linenumber) as unique_key, count(distinct l_returnflag || l_linestatus) flag
+from lineitem
+where l_orderkey = 1000
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData11.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData11.sql
new file mode 100644
index 0000000..3fae412
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData11.sql
@@ -0,0 +1,5 @@
+select l_orderkey, count(distinct l_linenumber) as unique_key
+, count(distinct l_returnflag || l_linestatus) flag
+from lineitem
+where l_orderkey = 1000
+group by l_orderkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData12.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData12.sql
new file mode 100644
index 0000000..3a94d82
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData12.sql
@@ -0,0 +1,29 @@
+select
+	s.s_acctbal,
+	s.s_name,
+	n.t_name,
+	p.p_partkey,
+	p.p_mfgr,
+	s.s_address,
+	s.s_phone,
+	s.s_comment
+from (
+   select n_name as t_name, n_nationkey as t_nationkey
+    , n_regionkey as t_regionkey
+    , count(distinct n_comment) as cnt
+    , count(distinct n_nationkey / n_regionkey) as diff
+   from nation
+   where n_nationkey > 10000
+   group by n_name, n_nationkey, n_regionkey, n_regionkey
+) n
+join region r on (n.t_regionkey = r.r_regionkey)
+join supplier s on (s.s_nationkey = n.t_nationkey)
+join partsupp ps on (s.s_suppkey = ps.ps_suppkey)
+join part p on (p.p_partkey = ps.ps_partkey)
+where n.t_regionkey = ps.ps_suppkey
+and n.cnt > 0
+order by
+  s.s_acctbal,
+  s.s_name,
+  n.t_name,
+  p.p_partkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData2.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData2.sql
new file mode 100644
index 0000000..84351af
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData2.sql
@@ -0,0 +1 @@
+select count(1) as unique_key, max(l_orderkey) as max_key from lineitem where l_orderkey = 1000;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData3.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData3.sql
new file mode 100644
index 0000000..c6c23c6
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData3.sql
@@ -0,0 +1,3 @@
+select max(l_orderkey) as maximum, count(l_linenumber) as unique_key
+from lineitem
+where l_orderkey = 1000
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData4.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData4.sql
new file mode 100644
index 0000000..ee345f9
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData4.sql
@@ -0,0 +1,3 @@
+select max(l_orderkey) as maximum, count(distinct l_linenumber) as unique_key
+from lineitem
+where l_orderkey = 1000
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData5.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData5.sql
new file mode 100644
index 0000000..2247784
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData5.sql
@@ -0,0 +1 @@
+select count(1) as unique_key from table1;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData6.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData6.sql
new file mode 100644
index 0000000..aea9c1c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData6.sql
@@ -0,0 +1 @@
+select count(distinct age) as unique_key, max(point) as maximum from table1;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData7.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData7.sql
new file mode 100644
index 0000000..bab8e23
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData7.sql
@@ -0,0 +1 @@
+select max(point) as maximum, count(distinct age) as unique_key from table1 where age > 100;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData8.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData8.sql
new file mode 100644
index 0000000..0bd8b53
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData8.sql
@@ -0,0 +1 @@
+select max(point) as maximum, count(age) as unique_key from table1;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData9.sql b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData9.sql
new file mode 100644
index 0000000..56fb65c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestGroupByQuery/testGroupByWithNullData9.sql
@@ -0,0 +1,4 @@
+select l_orderkey, count(distinct l_linenumber) as unique_key
+from lineitem
+where l_orderkey = 1000
+group by l_orderkey
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnion.sql b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnion.sql
new file mode 100644
index 0000000..65bab10
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnion.sql
@@ -0,0 +1,4 @@
+insert overwrite into location '/tajo-data/testInsertOverwriteLocationWithUnion'
+select l_orderkey as col1, l_partkey as col2, l_quantity as col3 from default.lineitem
+union all
+select o_orderkey as col1, o_custkey as col2, o_totalprice as col3 from default.orders
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnionDifferenceAlias.sql b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnionDifferenceAlias.sql
new file mode 100644
index 0000000..cbd7a94
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteLocationWithUnionDifferenceAlias.sql
@@ -0,0 +1,4 @@
+insert overwrite into location '/tajo-data/testInsertOverwriteLocationWithUnionDifferenceAlias'
+select l_orderkey as col1, l_partkey as col2, l_quantity as col3 from default.lineitem
+union all
+select o_orderkey as col4, o_custkey as col5, o_totalprice as col6 from default.orders
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnion.sql b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnion.sql
new file mode 100644
index 0000000..843d24c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnion.sql
@@ -0,0 +1,4 @@
+insert overwrite into table1
+select l_orderkey as col1, l_partkey as col2, l_quantity as col3 from default.lineitem
+union all
+select o_orderkey as col1, o_custkey as col2, o_totalprice as col3 from default.orders
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnionDifferentAlias.sql b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnionDifferentAlias.sql
new file mode 100644
index 0000000..1660eb5
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestInsertQuery/testInsertOverwriteWithUnionDifferentAlias.sql
@@ -0,0 +1,4 @@
+insert overwrite into table1
+select l_orderkey as col1, l_partkey as col2, l_quantity as col3 from default.lineitem
+union all
+select o_orderkey as col4, o_custkey as col5, o_totalprice as col6 from default.orders
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/customer_partition_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/customer_partition_ddl.sql
new file mode 100644
index 0000000..7d07474
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/customer_partition_ddl.sql
@@ -0,0 +1,9 @@
+CREATE TABLE customer_broad_parts (
+  c_nationkey INT4,
+  c_name    TEXT,
+  c_address    TEXT,
+  c_phone    TEXT,
+  c_acctbal    FLOAT8,
+  c_mktsegment    TEXT,
+  c_comment    TEXT
+) PARTITION BY COLUMN (c_custkey    INT4);
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/insert_into_customer_partition.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/insert_into_customer_partition.sql
new file mode 100644
index 0000000..3a500a1
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/insert_into_customer_partition.sql
@@ -0,0 +1,11 @@
+INSERT OVERWRITE INTO customer_broad_parts
+  SELECT
+    c_nationkey,
+    c_name,
+    c_address,
+    c_phone,
+    c_acctbal,
+    c_mktsegment,
+    c_comment,
+    c_custkey
+  FROM customer;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/nation_multifile_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/nation_multifile_ddl.sql
new file mode 100644
index 0000000..c3f595a
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/nation_multifile_ddl.sql
@@ -0,0 +1,5 @@
+create table nation_multifile (
+    n_nationkey int,
+    n_name text,
+    n_regionkey int,
+    n_comment text);
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/orders_multifile_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/orders_multifile_ddl.sql
new file mode 100644
index 0000000..64f70f2
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/orders_multifile_ddl.sql
@@ -0,0 +1,5 @@
+create table orders_multifile (
+    o_orderkey int,
+    o_custkey int,
+    o_orderstatus text
+);
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/testBroadcastPartitionTable.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/testBroadcastPartitionTable.sql
new file mode 100644
index 0000000..800375b
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/testBroadcastPartitionTable.sql
@@ -0,0 +1,16 @@
+select
+  c_custkey,
+  c_name,
+  c_nationkey,
+  n_nationkey,
+  o_orderkey
+from
+  customer_broad_parts,
+  nation_multifile,
+  orders_multifile
+where
+  c_nationkey = n_nationkey
+and
+  o_custkey = c_custkey
+order by
+  c_custkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr3.sql b/tajo-core/src/test/resources/queries/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr3.sql
index 90be13b..f79b18b 100644
--- a/tajo-core/src/test/resources/queries/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr3.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr3.sql
@@ -14,4 +14,4 @@
   b
 on a.c_custkey = b.c_custkey
 order by
-  c_custkey;
\ No newline at end of file
+  a.c_custkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.sql b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.sql
new file mode 100644
index 0000000..951b831
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.sql
@@ -0,0 +1,5 @@
+select c_custkey, c_nationkey, c_name, o_custkey, (case when a.c_nationkey > 3 then 4 else 3 end)
+from customer_parts a
+inner join orders b
+on a.c_custkey = b.o_custkey
+where a.c_custkey = (case when a.c_name like 'Customer%' and a.c_nationkey > 3 then 4 else 3 end)
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.sql b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.sql
new file mode 100644
index 0000000..bbb2c45
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.sql
@@ -0,0 +1,4 @@
+select a.n_nationkey, a.n_name, b.c_custkey, b.c_nationkey, b.c_name
+from nation a
+left outer join customer_parts b on a.n_nationkey = b.c_custkey
+and b.c_nationkey = 1
diff --git a/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.sql b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.sql
new file mode 100644
index 0000000..b30b2cb
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.sql
@@ -0,0 +1,5 @@
+-- In the case of no partition directory
+select a.n_nationkey, a.n_name, b.c_custkey, b.c_nationkey, b.c_name
+from nation a
+left outer join customer_parts b on a.n_nationkey = b.c_custkey
+and b.c_nationkey = 100
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table1_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table1_ddl.sql
index ea2c954..a37403c 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table1_ddl.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table1_ddl.sql
@@ -1,6 +1,3 @@
--- Outer Join's Left Table
--- It is used in TestJoin::testOuterJoinAndCaseWhen
-
 create external table table1 (id int, name text, score float, type text) using csv
 with ('csvfile.delimiter'='|', 'csvfile.null'='NULL') location ${table.path};
 
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table2_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table2_ddl.sql
index ac6459a..d60b145 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table2_ddl.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/oj_table2_ddl.sql
@@ -1,6 +1,3 @@
--- Outer Join's Left Table
--- It is used in TestJoin::testOuterJoinAndCaseWhen
-
 create external table table2 (id int, name text, score float, type text) using csv
 with ('csvfile.delimiter'='|', 'csvfile.null'='NULL') location ${table.path};
 
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int4_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int4_ddl.sql
new file mode 100644
index 0000000..0d35cee
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int4_ddl.sql
@@ -0,0 +1,3 @@
+create external table ${0} (id int, name text, score float, type text) using csv
+with ('csvfile.delimiter'='|', 'csvfile.null'='NULL') location ${table.path};
+
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int8_ddl.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int8_ddl.sql
new file mode 100644
index 0000000..3a7a44a
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/table1_int8_ddl.sql
@@ -0,0 +1,3 @@
+create external table ${0} (id bigint, name text, score float, type text) using csv
+with ('csvfile.delimiter'='|', 'csvfile.null'='NULL') location ${table.path};
+
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition1.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition1.sql
new file mode 100644
index 0000000..b61ad38
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition1.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  n1.n_name,
+  n2.n_name
+from nation n1 join nation n2 on n1.n_name = upper(n2.n_name)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition2.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition2.sql
new file mode 100644
index 0000000..33effbb
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition2.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  n1.n_name,
+  upper(n2.n_name) as name
+from nation n1 join nation n2 on n1.n_name = upper(n2.n_name)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition3.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition3.sql
new file mode 100644
index 0000000..5674269
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition3.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  n1.n_name,
+  n2.n_name
+from nation n1 join nation n2 on lower(n1.n_name) = lower(n2.n_name)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition4.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition4.sql
new file mode 100644
index 0000000..45d8adf
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition4.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  substr(n1.n_name, 1, 4) name1,
+  substr(n2.n_name, 1, 4) name2
+from nation n1 join nation n2 on substr(n1.n_name, 1, 4) = substr(n2.n_name, 1, 4)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition5.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition5.sql
new file mode 100644
index 0000000..f604bc7
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition5.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  substr(n1.n_name, 1, 4) name1,
+  substr(n2.n_name, 1, 4) name2
+from nation n1 join (select * from nation) n2 on substr(n1.n_name, 1, 4) = substr(n2.n_name, 1, 4)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition6.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition6.sql
new file mode 100644
index 0000000..704b821
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition6.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  substr(n1.n_name, 1, 4) name1,
+  substr(n2.n_name, 1, 4) name2
+from nation n1 join (select * from nation union select * from nation) n2 on substr(n1.n_name, 1, 4) = substr(n2.n_name, 1, 4)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition7.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition7.sql
new file mode 100644
index 0000000..ddd669c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testComplexJoinCondition7.sql
@@ -0,0 +1,6 @@
+select
+  n1.n_nationkey,
+  n1.n_name,
+  n2.n_name
+from nation n1 join (select * from nation union select * from nation) n2 on substr(n1.n_name, 1, 4) = substr(n2.n_name, 1, 4)
+order by n1.n_nationkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testDifferentTypesJoinCondition.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testDifferentTypesJoinCondition.sql
new file mode 100644
index 0000000..6bd0a4c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testDifferentTypesJoinCondition.sql
@@ -0,0 +1 @@
+select * from table20 t3 join table21 t4 on t3.id = t4.id;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinAsterisk.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinAsterisk.sql
new file mode 100644
index 0000000..e3de03c
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinAsterisk.sql
@@ -0,0 +1,3 @@
+select *
+from nation b
+join customer a on b.n_nationkey = a.c_nationkey
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinCoReferredEvalsFilterPushdown.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinCoReferredEvalsFilterPushdown.sql
new file mode 100644
index 0000000..680311d
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testJoinCoReferredEvalsFilterPushdown.sql
@@ -0,0 +1,13 @@
+select * from (
+select
+  r_regionkey,
+  n_regionkey,
+  (r_regionkey + n_regionkey) as plus
+from
+  region,
+  nation
+where
+  r_regionkey = n_regionkey
+order by
+  r_regionkey, n_regionkey
+) where plus > 10
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithConstantExpr3.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithConstantExpr3.sql
index 90be13b..f79b18b 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithConstantExpr3.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithConstantExpr3.sql
@@ -14,4 +14,4 @@
   b
 on a.c_custkey = b.c_custkey
 order by
-  c_custkey;
\ No newline at end of file
+  a.c_custkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.sql
new file mode 100644
index 0000000..d4ef649
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.sql
@@ -0,0 +1,11 @@
+select
+  l_linenumber,
+  sum(empty_orders.o_orderkey),
+  max(empty_orders.o_orderstatus),
+  max(empty_orders.o_orderdate),
+  avg(l_quantity),
+  sum(l_quantity)
+from
+  lineitem left outer join empty_orders on l_orderkey = o_orderkey
+  group by l_linenumber
+order by l_linenumber ;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull1.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull1.sql
new file mode 100644
index 0000000..5698a74
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull1.sql
@@ -0,0 +1,10 @@
+select
+  c_custkey,
+  orders.o_orderkey,
+  coalesce(orders.o_orderstatus, 'N/A'),
+  orders.o_orderdate
+from
+  customer left outer join orders on c_custkey = o_orderkey
+where o_orderkey is null
+order by
+  c_custkey, o_orderkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull2.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull2.sql
new file mode 100644
index 0000000..000ab68
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull2.sql
@@ -0,0 +1,11 @@
+select
+  c_custkey,
+  orders.o_orderkey,
+  coalesce(orders.o_orderstatus, 'N/A'),
+  orders.o_orderdate
+from
+  customer left outer join orders on c_custkey = o_orderkey
+where orders.o_orderdate is not null
+and orders.o_orderdate like '1996%'
+order by
+  c_custkey, o_orderkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull3.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull3.sql
new file mode 100644
index 0000000..32a7750
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testLeftOuterJoinWithNull3.sql
@@ -0,0 +1,10 @@
+select
+  c_custkey,
+  orders.o_orderkey,
+  coalesce(orders.o_orderstatus, 'N/A'),
+  orders.o_orderdate
+from
+  customer left outer join orders on c_custkey = o_orderkey
+where orders.o_orderkey = 100
+order by
+  c_custkey, o_orderkey;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin1.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin1.sql
index 069be09..575456f 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin1.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin1.sql
@@ -7,4 +7,5 @@
   nation,
   region
 where
-  n_regionkey = r_regionkey;
\ No newline at end of file
+  n_regionkey = r_regionkey
+order by n_name;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin2.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin2.sql
index 0c6539b..efc07b3 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin2.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin2.sql
@@ -5,4 +5,5 @@
   nation,
   region
 where
-  n_regionkey = r_regionkey;
\ No newline at end of file
+  n_regionkey = r_regionkey
+order by n_name;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin3.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin3.sql
index 6495958..04fd25b 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin3.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin3.sql
@@ -6,4 +6,5 @@
 from
   nation, region
 where
-  n_regionkey = r_regionkey;
\ No newline at end of file
+  n_regionkey = r_regionkey
+order by n_name;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin4.sql b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin4.sql
index d8ee615..777de8a 100644
--- a/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin4.sql
+++ b/tajo-core/src/test/resources/queries/TestJoinQuery/testWhereClauseJoin4.sql
@@ -5,4 +5,5 @@
 from
   nation, region
 where
-  n_regionkey = r_regionkey;
\ No newline at end of file
+  n_regionkey = r_regionkey
+order by n_name;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestQueryUnitStatusUpdate/case3.sql b/tajo-core/src/test/resources/queries/TestQueryUnitStatusUpdate/case3.sql
index 9c9362e..a0f9c78 100644
--- a/tajo-core/src/test/resources/queries/TestQueryUnitStatusUpdate/case3.sql
+++ b/tajo-core/src/test/resources/queries/TestQueryUnitStatusUpdate/case3.sql
@@ -7,5 +7,4 @@
     (a.key = 45.0 or a.key = 38.0)
 ) test
 order by
-  col1, col2
-;
\ No newline at end of file
+  col1, col2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestSelectQuery/testSumFloatOverflow.sql b/tajo-core/src/test/resources/queries/TestSelectQuery/testSumFloatOverflow.sql
new file mode 100644
index 0000000..9ec941a
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestSelectQuery/testSumFloatOverflow.sql
@@ -0,0 +1 @@
+select sum(cast(L_EXTENDEDPRICE * 3.21506374375027E33 as FLOAT8)) from lineitem where l_quantity > 0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestSelectQuery/testSumIntOverflow.sql b/tajo-core/src/test/resources/queries/TestSelectQuery/testSumIntOverflow.sql
new file mode 100644
index 0000000..96421eb
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestSelectQuery/testSumIntOverflow.sql
@@ -0,0 +1 @@
+select sum(cast(l_quantity * 25264513 as INT4)) from lineitem where l_quantity > 0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestSelectQuery/testWhereCond2.sql b/tajo-core/src/test/resources/queries/TestSelectQuery/testWhereCond2.sql
index ff50369..7fa0d18 100644
--- a/tajo-core/src/test/resources/queries/TestSelectQuery/testWhereCond2.sql
+++ b/tajo-core/src/test/resources/queries/TestSelectQuery/testWhereCond2.sql
@@ -1,8 +1,9 @@
 select *
 from (
- select a.l_orderkey, count(*) as cnt
+ select a.l_orderkey, count(*) as cnt, sum(l_extendedprice) as sum1
   from lineitem a
   group by a.l_orderkey
+  having sum1 > 70000
 ) t
-where t.cnt > 0
+where t.cnt > 1
 order by t.l_orderkey
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestTablePartitions/case14.sql b/tajo-core/src/test/resources/queries/TestTablePartitions/case14.sql
new file mode 100644
index 0000000..ac1ad20
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestTablePartitions/case14.sql
@@ -0,0 +1,3 @@
+select count(*) as cnt
+  from
+    testColumnPartitionedTableWithSmallerExpressions1
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestTablePartitions/case15.sql b/tajo-core/src/test/resources/queries/TestTablePartitions/case15.sql
new file mode 100644
index 0000000..03cf55b
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestTablePartitions/case15.sql
@@ -0,0 +1,3 @@
+select count(*) as cnt
+  from
+    testColumnPartitionedTableWithSmallerExpressions2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestTruncateTable/table1_ddl.sql b/tajo-core/src/test/resources/queries/TestTruncateTable/table1_ddl.sql
new file mode 100644
index 0000000..dc67e75
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestTruncateTable/table1_ddl.sql
@@ -0,0 +1 @@
+CREATE TABLE truncate_table1 AS SELECT * FROM lineitem;
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestTruncateTable/table2_ddl.sql b/tajo-core/src/test/resources/queries/TestTruncateTable/table2_ddl.sql
new file mode 100644
index 0000000..8d7fba0
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestTruncateTable/table2_ddl.sql
@@ -0,0 +1 @@
+CREATE EXTERNAL TABLE ${0} (id int, str text, num int) using csv location ${table.path};
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion11.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion11.sql
new file mode 100644
index 0000000..ec1a430
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion11.sql
@@ -0,0 +1,8 @@
+select col1, col2, col3
+from (
+    select L_RETURNFLAG as col1, L_EXTENDEDPRICE as col2, concat(L_RECEIPTDATE, L_LINESTATUS) as col3 from lineitem
+    union all
+    select P_TYPE as col1, P_RETAILPRICE col2, P_NAME col3 from part
+) a
+where col3 like '1993%' and col2 > 46796
+
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion12.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion12.sql
new file mode 100644
index 0000000..6b6a9ad
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion12.sql
@@ -0,0 +1,13 @@
+select col1, col2, col3
+from (
+    select
+        col1, col2, col3
+    from
+        (select
+            L_RETURNFLAG as col1, L_EXTENDEDPRICE as col2, concat(L_RECEIPTDATE, L_LINESTATUS) as col3
+        from
+            lineitem) b
+    union all
+    select P_TYPE as col1, P_RETAILPRICE * 100 col2, concat('1993', P_NAME) col3 from part
+) a
+where col3 like '1993%' and col2 > 46796
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion13.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion13.sql
new file mode 100644
index 0000000..70b0891
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion13.sql
@@ -0,0 +1,14 @@
+select col1, col2, col3
+from (
+    select
+        col1, col2, col3
+    from
+        (select
+            L_RETURNFLAG as col1, L_EXTENDEDPRICE as col2, concat(L_RECEIPTDATE, L_LINESTATUS) as col3
+        from
+            lineitem
+        where col2 > 46796) b
+    union all
+    select P_TYPE as col1, P_RETAILPRICE * 100 col2, concat('1993', P_NAME) col3 from part
+) a
+where col3 like '1993%'
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion14.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion14.sql
new file mode 100644
index 0000000..f47510e
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion14.sql
@@ -0,0 +1,8 @@
+select col1, cnt
+from (
+    select L_RETURNFLAG as col1, count(*) as cnt from lineitem group by col1
+    union all
+    select cast(n_regionkey as TEXT) as col1, count(*) as cnt from nation group by col1
+) a
+where a.cnt > 1
+order by a.col1
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion15.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion15.sql
new file mode 100644
index 0000000..2e382d0
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion15.sql
@@ -0,0 +1,15 @@
+select col1, sum(cnt)
+from (
+    select col1, cnt
+    from (  select l_returnflag col1, count(*) cnt from lineitem
+            join orders on l_orderkey = o_orderkey and o_custkey > 0
+            group by l_returnflag) b
+    where col1 = 'N'
+    union all
+    select cast(n_regionkey as TEXT) as col1, count(*) as cnt from nation
+    where n_regionkey > 2
+    group by col1
+) a
+where round(cast(a.cnt as FLOAT4)) > 1.0
+group by a.col1
+order by a.col1
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion16.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion16.sql
new file mode 100644
index 0000000..59e9c1f
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnion16.sql
@@ -0,0 +1,15 @@
+select col1, sum(cnt)
+from (
+    select col1, cnt
+    from (  select l_returnflag col1, count(distinct l_orderkey) cnt from lineitem
+            join orders on l_orderkey = o_orderkey and o_custkey > 0
+            group by l_returnflag) b
+    where col1 = 'N'
+    union all
+    select cast(n_regionkey as TEXT) as col1, count(*) as cnt from nation
+    where n_regionkey > 2
+    group by col1
+) a
+where round(cast(a.cnt as FLOAT4)) > 1.0
+group by a.col1
+order by a.col1
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAlias.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAlias.sql
new file mode 100644
index 0000000..f41ee32
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAlias.sql
@@ -0,0 +1,29 @@
+SELECT
+  col1,
+  col2
+
+FROM (
+  SELECT
+    l_returnflag col1, l_linestatus col2, l_orderkey col3
+  FROM
+    lineitem
+  WHERE
+    l_returnflag = 'N'
+
+  UNION ALL
+
+  SELECT
+    l_returnflag col2, l_linestatus col5, l_orderkey col6
+  FROM
+    lineitem
+  WHERE
+    l_returnflag = 'R'
+) T
+
+GROUP BY
+  col1,
+	col2
+
+ORDER BY
+  col1,
+	col2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAliasAndFunction.sql b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAliasAndFunction.sql
new file mode 100644
index 0000000..598f399
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/TestUnionQuery/testUnionWithDifferentAliasAndFunction.sql
@@ -0,0 +1,29 @@
+SELECT
+  col1,
+  col2
+
+FROM (
+  SELECT
+    l_returnflag col1, concat(l_linestatus, l_shipdate) col2, l_orderkey col3
+  FROM
+    lineitem
+  WHERE
+    l_returnflag = 'N'
+
+  UNION ALL
+
+  SELECT
+    concat(l_returnflag, l_shipdate) col3, l_linestatus col4, l_orderkey col5
+  FROM
+    lineitem
+  WHERE
+    l_returnflag = 'R'
+) T
+
+GROUP BY
+  col1,
+	col2
+
+ORDER BY
+  col1,
+	col2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/queries/default/create_table_like_1.sql b/tajo-core/src/test/resources/queries/default/create_table_like_1.sql
new file mode 100644
index 0000000..73f2f46
--- /dev/null
+++ b/tajo-core/src/test/resources/queries/default/create_table_like_1.sql
@@ -0,0 +1 @@
+create table new_table like orig_name;
diff --git a/tajo-core/src/test/resources/results/TestBuiltinFunctions/testSplitPartNested.result b/tajo-core/src/test/resources/results/TestBuiltinFunctions/testSplitPartNested.result
index 5dd4193..ec0f07b 100644
--- a/tajo-core/src/test/resources/results/TestBuiltinFunctions/testSplitPartNested.result
+++ b/tajo-core/src/test/resources/results/TestBuiltinFunctions/testSplitPartNested.result
@@ -1,7 +1,7 @@
 ?split_part
 -------------------------------
-
+null
 KE
 KE
-
+null
 KE
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO415Case.result b/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO415Case.result
index 675037b..6c527af 100644
--- a/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO415Case.result
+++ b/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO415Case.result
@@ -3,5 +3,5 @@
 1,1,1
 2,2,1
 3,3,1
-4,0,1
-5,0,1
\ No newline at end of file
+4,null,1
+5,null,1
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO418Case.result b/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO418Case.result
index ba35aa1..d78067e 100644
--- a/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO418Case.result
+++ b/tajo-core/src/test/resources/results/TestCaseByCases/testTAJO418Case.result
@@ -1,3 +1,4 @@
 l_returnflag,l_linestatus
 -------------------------------
+N,O
 R,F
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testDistinctAggregation_case8.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testDistinctAggregation_case8.result
new file mode 100644
index 0000000..e234896
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testDistinctAggregation_case8.result
@@ -0,0 +1,6 @@
+l_orderkey,l_partkey,cnt1,cnt2,sum1
+-------------------------------
+1,1,1,2,53.0
+2,2,1,1,38.0
+3,2,1,1,45.0
+3,3,1,1,49.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData1.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData1.result
new file mode 100644
index 0000000..13335cb
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData1.result
@@ -0,0 +1,3 @@
+unique_key
+-------------------------------
+0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData10.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData10.result
new file mode 100644
index 0000000..c865378
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData10.result
@@ -0,0 +1,3 @@
+unique_key,flag
+-------------------------------
+0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData11.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData11.result
new file mode 100644
index 0000000..bd69ff2
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData11.result
@@ -0,0 +1,2 @@
+l_orderkey,unique_key,flag
+-------------------------------
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData12.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData12.result
new file mode 100644
index 0000000..2b8b687
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData12.result
@@ -0,0 +1,2 @@
+s_acctbal,s_name,t_name,p_partkey,p_mfgr,s_address,s_phone,s_comment
+-------------------------------
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData2.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData2.result
new file mode 100644
index 0000000..f4f9a5b
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData2.result
@@ -0,0 +1,3 @@
+unique_key,max_key
+-------------------------------
+0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData3.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData3.result
new file mode 100644
index 0000000..fef3d0c
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData3.result
@@ -0,0 +1,3 @@
+maximum,unique_key
+-------------------------------
+0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData4.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData4.result
new file mode 100644
index 0000000..fef3d0c
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData4.result
@@ -0,0 +1,3 @@
+maximum,unique_key
+-------------------------------
+0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData5.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData5.result
new file mode 100644
index 0000000..13335cb
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData5.result
@@ -0,0 +1,3 @@
+unique_key
+-------------------------------
+0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData6.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData6.result
new file mode 100644
index 0000000..58aaa20
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData6.result
@@ -0,0 +1,3 @@
+unique_key,maximum
+-------------------------------
+0,0.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData7.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData7.result
new file mode 100644
index 0000000..e19a623
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData7.result
@@ -0,0 +1,3 @@
+maximum,unique_key
+-------------------------------
+0.0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData8.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData8.result
new file mode 100644
index 0000000..e19a623
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData8.result
@@ -0,0 +1,3 @@
+maximum,unique_key
+-------------------------------
+0.0,0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData9.result b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData9.result
new file mode 100644
index 0000000..1f6d988
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestGroupByQuery/testGroupByWithNullData9.result
@@ -0,0 +1,2 @@
+l_orderkey,unique_key
+-------------------------------
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastMultiColumnPartitionTable.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastMultiColumnPartitionTable.result
new file mode 100644
index 0000000..df3c7bc
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastMultiColumnPartitionTable.result
@@ -0,0 +1,5 @@
+col3
+-------------------------------
+01
+10
+12
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastPartitionTable.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastPartitionTable.result
new file mode 100644
index 0000000..c03a275
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testBroadcastPartitionTable.result
@@ -0,0 +1,5 @@
+c_custkey,c_name,c_nationkey,n_nationkey,o_orderkey
+-------------------------------
+2,Customer#000000002,13,13,3
+3,Customer#000000003,1,1,1
+4,Customer#000000004,4,4,2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoin1.result
index 81dc055..695a414 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1
 2,2
 3,3
-4,0
-5,0
\ No newline at end of file
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoinWithEmptyTable1.result
index 9124c6c..5b849fc 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testFullOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey
 -------------------------------
-1,0
-2,0
-3,0
-4,0
-5,0
\ No newline at end of file
+1,null
+2,null
+3,null
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoin1.result
index 8893e12..9e2a53a 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1,O,1996-01-02
 2,2,O,1996-12-01
 3,3,F,1993-10-14
-4,0,,
-5,0,,
\ No newline at end of file
+4,null,null,null
+5,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr1.result
index ed28172..670a069 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr1.result
@@ -3,5 +3,5 @@
 1,1,val
 2,2,val
 3,3,val
-4,0,val
-5,0,val
\ No newline at end of file
+4,null,val
+5,null,val
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr2.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr2.result
index ed28172..670a069 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr2.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithConstantExpr2.result
@@ -3,5 +3,5 @@
 1,1,val
 2,2,val
 3,3,val
-4,0,val
-5,0,val
\ No newline at end of file
+4,null,val
+5,null,val
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithEmptyTable1.result
index 5e85b28..e2f94f4 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testLeftOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey,o_orderstatus,o_orderdate
 -------------------------------
-1,0,,
-2,0,,
-3,0,,
-4,0,,
-5,0,,
\ No newline at end of file
+1,null,null,null
+2,null,null,null
+3,null,null,null
+4,null,null,null
+5,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoin1.result
index 81dc055..695a414 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1
 2,2
 3,3
-4,0
-5,0
\ No newline at end of file
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoinWithEmptyTable1.result
index 9124c6c..5b849fc 100644
--- a/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinBroadcast/testRightOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey
 -------------------------------
-1,0
-2,0
-3,0
-4,0
-5,0
\ No newline at end of file
+1,null
+2,null
+3,null
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.result b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.result
new file mode 100644
index 0000000..4ba41a3
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testFilterPushDownPartitionColumnCaseWhen.result
@@ -0,0 +1,4 @@
+c_custkey,c_nationkey,c_name,o_custkey,?casewhen
+-------------------------------
+3,1,Customer#000000003,3,3
+4,4,Customer#000000004,4,4
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.result b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.result
new file mode 100644
index 0000000..73150a0
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin.result
@@ -0,0 +1,27 @@
+n_nationkey,n_name,c_custkey,c_nationkey,c_name
+-------------------------------
+0,ALGERIA,null,null,null
+1,ARGENTINA,null,null,null
+2,BRAZIL,null,null,null
+3,CANADA,3,1,Customer#000000003
+4,EGYPT,null,null,null
+5,ETHIOPIA,null,null,null
+6,FRANCE,null,null,null
+7,GERMANY,null,null,null
+8,INDIA,null,null,null
+9,INDONESIA,null,null,null
+10,IRAN,null,null,null
+11,IRAQ,null,null,null
+12,JAPAN,null,null,null
+13,JORDAN,null,null,null
+14,KENYA,null,null,null
+15,MOROCCO,null,null,null
+16,MOZAMBIQUE,null,null,null
+17,PERU,null,null,null
+18,CHINA,null,null,null
+19,ROMANIA,null,null,null
+20,SAUDI ARABIA,null,null,null
+21,VIETNAM,null,null,null
+22,RUSSIA,null,null,null
+23,UNITED KINGDOM,null,null,null
+24,UNITED STATES,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.result b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.result
new file mode 100644
index 0000000..442ac75
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinOnPartitionedTables/testPartialFilterPushDownOuterJoin2.result
@@ -0,0 +1,27 @@
+n_nationkey,n_name,c_custkey,c_nationkey,c_name
+-------------------------------
+0,ALGERIA,null,null,null
+1,ARGENTINA,null,null,null
+2,BRAZIL,null,null,null
+3,CANADA,null,null,null
+4,EGYPT,null,null,null
+5,ETHIOPIA,null,null,null
+6,FRANCE,null,null,null
+7,GERMANY,null,null,null
+8,INDIA,null,null,null
+9,INDONESIA,null,null,null
+10,IRAN,null,null,null
+11,IRAQ,null,null,null
+12,JAPAN,null,null,null
+13,JORDAN,null,null,null
+14,KENYA,null,null,null
+15,MOROCCO,null,null,null
+16,MOZAMBIQUE,null,null,null
+17,PERU,null,null,null
+18,CHINA,null,null,null
+19,ROMANIA,null,null,null
+20,SAUDI ARABIA,null,null,null
+21,VIETNAM,null,null,null
+22,RUSSIA,null,null,null
+23,UNITED KINGDOM,null,null,null
+24,UNITED STATES,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition1.result
new file mode 100644
index 0000000..e0691a7
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition1.result
@@ -0,0 +1,27 @@
+n_nationkey,n_name,n_name
+-------------------------------
+0,ALGERIA,ALGERIA
+1,ARGENTINA,ARGENTINA
+2,BRAZIL,BRAZIL
+3,CANADA,CANADA
+4,EGYPT,EGYPT
+5,ETHIOPIA,ETHIOPIA
+6,FRANCE,FRANCE
+7,GERMANY,GERMANY
+8,INDIA,INDIA
+9,INDONESIA,INDONESIA
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+12,JAPAN,JAPAN
+13,JORDAN,JORDAN
+14,KENYA,KENYA
+15,MOROCCO,MOROCCO
+16,MOZAMBIQUE,MOZAMBIQUE
+17,PERU,PERU
+18,CHINA,CHINA
+19,ROMANIA,ROMANIA
+20,SAUDI ARABIA,SAUDI ARABIA
+21,VIETNAM,VIETNAM
+22,RUSSIA,RUSSIA
+23,UNITED KINGDOM,UNITED KINGDOM
+24,UNITED STATES,UNITED STATES
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition2.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition2.result
new file mode 100644
index 0000000..63289e1
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition2.result
@@ -0,0 +1,27 @@
+n_nationkey,n_name,name
+-------------------------------
+0,ALGERIA,ALGERIA
+1,ARGENTINA,ARGENTINA
+2,BRAZIL,BRAZIL
+3,CANADA,CANADA
+4,EGYPT,EGYPT
+5,ETHIOPIA,ETHIOPIA
+6,FRANCE,FRANCE
+7,GERMANY,GERMANY
+8,INDIA,INDIA
+9,INDONESIA,INDONESIA
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+12,JAPAN,JAPAN
+13,JORDAN,JORDAN
+14,KENYA,KENYA
+15,MOROCCO,MOROCCO
+16,MOZAMBIQUE,MOZAMBIQUE
+17,PERU,PERU
+18,CHINA,CHINA
+19,ROMANIA,ROMANIA
+20,SAUDI ARABIA,SAUDI ARABIA
+21,VIETNAM,VIETNAM
+22,RUSSIA,RUSSIA
+23,UNITED KINGDOM,UNITED KINGDOM
+24,UNITED STATES,UNITED STATES
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition3.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition3.result
new file mode 100644
index 0000000..e0691a7
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition3.result
@@ -0,0 +1,27 @@
+n_nationkey,n_name,n_name
+-------------------------------
+0,ALGERIA,ALGERIA
+1,ARGENTINA,ARGENTINA
+2,BRAZIL,BRAZIL
+3,CANADA,CANADA
+4,EGYPT,EGYPT
+5,ETHIOPIA,ETHIOPIA
+6,FRANCE,FRANCE
+7,GERMANY,GERMANY
+8,INDIA,INDIA
+9,INDONESIA,INDONESIA
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+12,JAPAN,JAPAN
+13,JORDAN,JORDAN
+14,KENYA,KENYA
+15,MOROCCO,MOROCCO
+16,MOZAMBIQUE,MOZAMBIQUE
+17,PERU,PERU
+18,CHINA,CHINA
+19,ROMANIA,ROMANIA
+20,SAUDI ARABIA,SAUDI ARABIA
+21,VIETNAM,VIETNAM
+22,RUSSIA,RUSSIA
+23,UNITED KINGDOM,UNITED KINGDOM
+24,UNITED STATES,UNITED STATES
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition4.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition4.result
new file mode 100644
index 0000000..325375d
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition4.result
@@ -0,0 +1,29 @@
+n_nationkey,name1,name2
+-------------------------------
+0,ALGE,ALGE
+1,ARGE,ARGE
+2,BRAZ,BRAZ
+3,CANA,CANA
+4,EGYP,EGYP
+5,ETHI,ETHI
+6,FRAN,FRAN
+7,GERM,GERM
+8,INDI,INDI
+9,INDO,INDO
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+12,JAPA,JAPA
+13,JORD,JORD
+14,KENY,KENY
+15,MORO,MORO
+16,MOZA,MOZA
+17,PERU,PERU
+18,CHIN,CHIN
+19,ROMA,ROMA
+20,SAUD,SAUD
+21,VIET,VIET
+22,RUSS,RUSS
+23,UNIT,UNIT
+23,UNIT,UNIT
+24,UNIT,UNIT
+24,UNIT,UNIT
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition5.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition5.result
new file mode 100644
index 0000000..325375d
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition5.result
@@ -0,0 +1,29 @@
+n_nationkey,name1,name2
+-------------------------------
+0,ALGE,ALGE
+1,ARGE,ARGE
+2,BRAZ,BRAZ
+3,CANA,CANA
+4,EGYP,EGYP
+5,ETHI,ETHI
+6,FRAN,FRAN
+7,GERM,GERM
+8,INDI,INDI
+9,INDO,INDO
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+12,JAPA,JAPA
+13,JORD,JORD
+14,KENY,KENY
+15,MORO,MORO
+16,MOZA,MOZA
+17,PERU,PERU
+18,CHIN,CHIN
+19,ROMA,ROMA
+20,SAUD,SAUD
+21,VIET,VIET
+22,RUSS,RUSS
+23,UNIT,UNIT
+23,UNIT,UNIT
+24,UNIT,UNIT
+24,UNIT,UNIT
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition6.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition6.result
new file mode 100644
index 0000000..82158bc
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition6.result
@@ -0,0 +1,56 @@
+n_nationkey,name1,name2
+-------------------------------
+0,ALGE,ALGE
+0,ALGE,ALGE
+1,ARGE,ARGE
+1,ARGE,ARGE
+2,BRAZ,BRAZ
+2,BRAZ,BRAZ
+3,CANA,CANA
+3,CANA,CANA
+4,EGYP,EGYP
+4,EGYP,EGYP
+5,ETHI,ETHI
+5,ETHI,ETHI
+6,FRAN,FRAN
+6,FRAN,FRAN
+7,GERM,GERM
+7,GERM,GERM
+8,INDI,INDI
+8,INDI,INDI
+9,INDO,INDO
+9,INDO,INDO
+10,IRAN,IRAN
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+11,IRAQ,IRAQ
+12,JAPA,JAPA
+12,JAPA,JAPA
+13,JORD,JORD
+13,JORD,JORD
+14,KENY,KENY
+14,KENY,KENY
+15,MORO,MORO
+15,MORO,MORO
+16,MOZA,MOZA
+16,MOZA,MOZA
+17,PERU,PERU
+17,PERU,PERU
+18,CHIN,CHIN
+18,CHIN,CHIN
+19,ROMA,ROMA
+19,ROMA,ROMA
+20,SAUD,SAUD
+20,SAUD,SAUD
+21,VIET,VIET
+21,VIET,VIET
+22,RUSS,RUSS
+22,RUSS,RUSS
+23,UNIT,UNIT
+23,UNIT,UNIT
+23,UNIT,UNIT
+23,UNIT,UNIT
+24,UNIT,UNIT
+24,UNIT,UNIT
+24,UNIT,UNIT
+24,UNIT,UNIT
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition7.result b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition7.result
new file mode 100644
index 0000000..edd83cd
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testComplexJoinCondition7.result
@@ -0,0 +1,56 @@
+n_nationkey,n_name,n_name
+-------------------------------
+0,ALGERIA,ALGERIA
+0,ALGERIA,ALGERIA
+1,ARGENTINA,ARGENTINA
+1,ARGENTINA,ARGENTINA
+2,BRAZIL,BRAZIL
+2,BRAZIL,BRAZIL
+3,CANADA,CANADA
+3,CANADA,CANADA
+4,EGYPT,EGYPT
+4,EGYPT,EGYPT
+5,ETHIOPIA,ETHIOPIA
+5,ETHIOPIA,ETHIOPIA
+6,FRANCE,FRANCE
+6,FRANCE,FRANCE
+7,GERMANY,GERMANY
+7,GERMANY,GERMANY
+8,INDIA,INDIA
+8,INDIA,INDIA
+9,INDONESIA,INDONESIA
+9,INDONESIA,INDONESIA
+10,IRAN,IRAN
+10,IRAN,IRAN
+11,IRAQ,IRAQ
+11,IRAQ,IRAQ
+12,JAPAN,JAPAN
+12,JAPAN,JAPAN
+13,JORDAN,JORDAN
+13,JORDAN,JORDAN
+14,KENYA,KENYA
+14,KENYA,KENYA
+15,MOROCCO,MOROCCO
+15,MOROCCO,MOROCCO
+16,MOZAMBIQUE,MOZAMBIQUE
+16,MOZAMBIQUE,MOZAMBIQUE
+17,PERU,PERU
+17,PERU,PERU
+18,CHINA,CHINA
+18,CHINA,CHINA
+19,ROMANIA,ROMANIA
+19,ROMANIA,ROMANIA
+20,SAUDI ARABIA,SAUDI ARABIA
+20,SAUDI ARABIA,SAUDI ARABIA
+21,VIETNAM,VIETNAM
+21,VIETNAM,VIETNAM
+22,RUSSIA,RUSSIA
+22,RUSSIA,RUSSIA
+23,UNITED KINGDOM,UNITED KINGDOM
+23,UNITED KINGDOM,UNITED STATES
+23,UNITED KINGDOM,UNITED KINGDOM
+23,UNITED KINGDOM,UNITED STATES
+24,UNITED STATES,UNITED KINGDOM
+24,UNITED STATES,UNITED STATES
+24,UNITED STATES,UNITED KINGDOM
+24,UNITED STATES,UNITED STATES
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testDifferentTypesJoinCondition.result b/tajo-core/src/test/resources/results/TestJoinQuery/testDifferentTypesJoinCondition.result
new file mode 100644
index 0000000..d5b7510
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testDifferentTypesJoinCondition.result
@@ -0,0 +1,7 @@
+id,name,score,type,id,name,score,type
+-------------------------------
+1,ooo,1.1,a,1,ooo,1.1,a
+2,ppp,2.3,b,2,ppp,2.3,b
+3,qqq,3.4,c,3,qqq,3.4,c
+4,rrr,4.5,d,4,rrr,4.5,d
+5,xxx,5.6,e,5,xxx,5.6,e
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoin1.result
index 81dc055..695a414 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1
 2,2
 3,3
-4,0
-5,0
\ No newline at end of file
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoinWithEmptyTable1.result
index 9124c6c..5b849fc 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testFullOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey
 -------------------------------
-1,0
-2,0
-3,0
-4,0
-5,0
\ No newline at end of file
+1,null
+2,null
+3,null
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testJoinAsterisk.result b/tajo-core/src/test/resources/results/TestJoinQuery/testJoinAsterisk.result
new file mode 100644
index 0000000..d01fa21
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testJoinAsterisk.result
@@ -0,0 +1,7 @@
+n_nationkey,n_name,n_regionkey,n_comment,c_custkey,c_name,c_address,c_nationkey,c_phone,c_acctbal,c_mktsegment,c_comment
+-------------------------------
+1,ARGENTINA,1,al foxes promise slyly according to the regular accounts. bold requests alon,3,Customer#000000003,MG9kdTD2WBHm,1,11-719-748-3364,7498.12,AUTOMOBILE, deposits eat slyly ironic, even instructions. express foxes detect slyly. blithely even accounts abov
+3,CANADA,1,eas hang ironic, silent packages. slyly regular packages are furiously over the tithes. fluffily bold,5,Customer#000000005,KvpyuHCplrB84WgAiGV6sYpZq7Tj,3,13-750-942-6364,794.47,HOUSEHOLD,n accounts will have to unwind. foxes cajole accor
+4,EGYPT,4,y above the carefully unusual theodolites. final dugouts are quickly across the furiously regular d,4,Customer#000000004,XxVSJsLAGtn,4,14-128-190-5944,2866.83,MACHINERY, requests. final, regular ideas sleep final accou
+13,JORDAN,4,ic deposits are blithely about the carefully regular pa,2,Customer#000000002,XSTf4,NCwDVaWNe6tEgvwfmRchLXak,13,23-768-687-3665,121.65,AUTOMOBILE,l accounts. blithely ironic theodolites integrate boldly: caref
+15,MOROCCO,0,rns. blithely bold courts among the closely regular packages use furiously bold platelets?,1,Customer#000000001,IVhzIApeRb ot,c,E,15,25-989-741-2988,711.56,BUILDING,to the even, regular platelets. regular, ironic epitaphs nag e
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoin1.result
index 8893e12..9e2a53a 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1,O,1996-01-02
 2,2,O,1996-12-01
 3,3,F,1993-10-14
-4,0,,
-5,0,,
\ No newline at end of file
+4,null,null,null
+5,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr1.result
index ed28172..670a069 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr1.result
@@ -3,5 +3,5 @@
 1,1,val
 2,2,val
 3,3,val
-4,0,val
-5,0,val
\ No newline at end of file
+4,null,val
+5,null,val
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr2.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr2.result
index ed28172..670a069 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr2.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithConstantExpr2.result
@@ -3,5 +3,5 @@
 1,1,val
 2,2,val
 3,3,val
-4,0,val
-5,0,val
\ No newline at end of file
+4,null,val
+5,null,val
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable1.result
index 5e85b28..e2f94f4 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey,o_orderstatus,o_orderdate
 -------------------------------
-1,0,,
-2,0,,
-3,0,,
-4,0,,
-5,0,,
\ No newline at end of file
+1,null,null,null
+2,null,null,null
+3,null,null,null
+4,null,null,null
+5,null,null,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.result
new file mode 100644
index 0000000..f1d80e4
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithEmptyTable5.result
@@ -0,0 +1,4 @@
+l_linenumber,?sum,?max_1,?max_2,?avg_3,?sum_4
+-------------------------------
+1,0,,,33.333333333333336,100.0
+2,0,,,42.5,85.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull1.result
new file mode 100644
index 0000000..81b907d
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull1.result
@@ -0,0 +1,4 @@
+c_custkey,o_orderkey,?coalesce,o_orderdate
+-------------------------------
+4,null,N/A,null
+5,null,N/A,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull2.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull2.result
new file mode 100644
index 0000000..08f745c
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull2.result
@@ -0,0 +1,4 @@
+c_custkey,o_orderkey,?coalesce,o_orderdate
+-------------------------------
+1,1,O,1996-01-02
+2,2,O,1996-12-01
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull3.result b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull3.result
new file mode 100644
index 0000000..efd2e74
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testLeftOuterJoinWithNull3.result
@@ -0,0 +1,2 @@
+c_custkey,o_orderkey,?coalesce,o_orderdate
+-------------------------------
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testOuterJoinAndCaseWhen1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testOuterJoinAndCaseWhen1.result
index f032d32..8f204fb 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testOuterJoinAndCaseWhen1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testOuterJoinAndCaseWhen1.result
@@ -1,7 +1,7 @@
 id,name,id2,name2,c1,c2
 -------------------------------
-1,ooo,1,,9991231,ooo
-2,ppp,2,,9991231,ppp
-3,qqq,0,,9991231,9991231
-4,rrr,0,,9991231,9991231
-5,xxx,0,,9991231,9991231
\ No newline at end of file
+1,ooo,1,null,9991231,ooo
+2,ppp,2,null,9991231,ppp
+3,qqq,null,null,9991231,9991231
+4,rrr,null,null,9991231,9991231
+5,xxx,null,null,9991231,9991231
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoin1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoin1.result
index 81dc055..695a414 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoin1.result
@@ -3,5 +3,5 @@
 1,1
 2,2
 3,3
-4,0
-5,0
\ No newline at end of file
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoinWithEmptyTable1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoinWithEmptyTable1.result
index 9124c6c..5b849fc 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoinWithEmptyTable1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testRightOuterJoinWithEmptyTable1.result
@@ -1,7 +1,7 @@
 c_custkey,o_orderkey
 -------------------------------
-1,0
-2,0
-3,0
-4,0
-5,0
\ No newline at end of file
+1,null
+2,null
+3,null
+4,null
+5,null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin1.result b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin1.result
index 5c54325..5691b50 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin1.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin1.result
@@ -4,6 +4,7 @@
 ARGENTINA,AMERICA,1,1
 BRAZIL,AMERICA,1,1
 CANADA,AMERICA,1,1
+CHINA,ASIA,2,2
 EGYPT,MIDDLE EAST,4,4
 ETHIOPIA,AFRICA,0,0
 FRANCE,EUROPE,3,3
@@ -18,10 +19,9 @@
 MOROCCO,AFRICA,0,0
 MOZAMBIQUE,AFRICA,0,0
 PERU,AMERICA,1,1
-CHINA,ASIA,2,2
 ROMANIA,EUROPE,3,3
-SAUDI ARABIA,MIDDLE EAST,4,4
-VIETNAM,ASIA,2,2
 RUSSIA,EUROPE,3,3
+SAUDI ARABIA,MIDDLE EAST,4,4
 UNITED KINGDOM,EUROPE,3,3
-UNITED STATES,AMERICA,1,1
\ No newline at end of file
+UNITED STATES,AMERICA,1,1
+VIETNAM,ASIA,2,2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin2.result b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin2.result
index 178ddd6..c83d6d6 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin2.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin2.result
@@ -4,6 +4,7 @@
 ARGENTINA,AMERICA
 BRAZIL,AMERICA
 CANADA,AMERICA
+CHINA,ASIA
 EGYPT,MIDDLE EAST
 ETHIOPIA,AFRICA
 FRANCE,EUROPE
@@ -18,10 +19,9 @@
 MOROCCO,AFRICA
 MOZAMBIQUE,AFRICA
 PERU,AMERICA
-CHINA,ASIA
 ROMANIA,EUROPE
-SAUDI ARABIA,MIDDLE EAST
-VIETNAM,ASIA
 RUSSIA,EUROPE
+SAUDI ARABIA,MIDDLE EAST
 UNITED KINGDOM,EUROPE
-UNITED STATES,AMERICA
\ No newline at end of file
+UNITED STATES,AMERICA
+VIETNAM,ASIA
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin3.result b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin3.result
index 9f3123a..e559818 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin3.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin3.result
@@ -4,6 +4,7 @@
 ARGENTINA,AMERICA,2,2
 BRAZIL,AMERICA,3,2
 CANADA,AMERICA,4,2
+CHINA,ASIA,19,3
 EGYPT,MIDDLE EAST,5,5
 ETHIOPIA,AFRICA,6,1
 FRANCE,EUROPE,7,4
@@ -18,10 +19,9 @@
 MOROCCO,AFRICA,16,1
 MOZAMBIQUE,AFRICA,17,1
 PERU,AMERICA,18,2
-CHINA,ASIA,19,3
 ROMANIA,EUROPE,20,4
-SAUDI ARABIA,MIDDLE EAST,21,5
-VIETNAM,ASIA,22,3
 RUSSIA,EUROPE,23,4
+SAUDI ARABIA,MIDDLE EAST,21,5
 UNITED KINGDOM,EUROPE,24,4
-UNITED STATES,AMERICA,25,2
\ No newline at end of file
+UNITED STATES,AMERICA,25,2
+VIETNAM,ASIA,22,3
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin4.result b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin4.result
index b7f95a8..90df873 100644
--- a/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin4.result
+++ b/tajo-core/src/test/resources/results/TestJoinQuery/testWhereClauseJoin4.result
@@ -4,6 +4,7 @@
 ARGENTINA,AMERICA,2
 BRAZIL,AMERICA,3
 CANADA,AMERICA,4
+CHINA,ASIA,20
 EGYPT,MIDDLE EAST,8
 ETHIOPIA,AFRICA,5
 FRANCE,EUROPE,9
@@ -18,10 +19,9 @@
 MOROCCO,AFRICA,15
 MOZAMBIQUE,AFRICA,16
 PERU,AMERICA,18
-CHINA,ASIA,20
 ROMANIA,EUROPE,22
-SAUDI ARABIA,MIDDLE EAST,24
-VIETNAM,ASIA,23
 RUSSIA,EUROPE,25
+SAUDI ARABIA,MIDDLE EAST,24
 UNITED KINGDOM,EUROPE,26
-UNITED STATES,AMERICA,25
\ No newline at end of file
+UNITED STATES,AMERICA,25
+VIETNAM,ASIA,23
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestNetTypes/testJoin.result b/tajo-core/src/test/resources/results/TestNetTypes/testJoin.result
index ea979ac..568b488 100644
--- a/tajo-core/src/test/resources/results/TestNetTypes/testJoin.result
+++ b/tajo-core/src/test/resources/results/TestNetTypes/testJoin.result
@@ -1,6 +1,6 @@
 id,name,score,type,addr,id,name,score,type,addr
 -------------------------------
-1,ooo,1.1,a,127.0.0.1,0,,20.0,d,127.0.0.1
-3,qqq,3.4,c,127.0.0.8,1,,0.0,a,127.0.0.8
-3,qqq,3.4,c,127.0.0.8,2,,0.0,b,127.0.0.8
-4,rrr,4.5,d,127.0.0.1,0,,20.0,d,127.0.0.1
\ No newline at end of file
+1,ooo,1.1,a,127.0.0.1,null,null,20.0,d,127.0.0.1
+3,qqq,3.4,c,127.0.0.8,1,null,null,a,127.0.0.8
+3,qqq,3.4,c,127.0.0.8,2,null,null,b,127.0.0.8
+4,rrr,4.5,d,127.0.0.1,null,null,20.0,d,127.0.0.1
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestNetTypes/testSort2.result b/tajo-core/src/test/resources/results/TestNetTypes/testSort2.result
index debbb98..9bd5038 100644
--- a/tajo-core/src/test/resources/results/TestNetTypes/testSort2.result
+++ b/tajo-core/src/test/resources/results/TestNetTypes/testSort2.result
@@ -3,4 +3,5 @@
 127.0.0.1
 127.0.0.8
 127.0.0.8
-255.255.255.255
\ No newline at end of file
+255.255.255.255
+null
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestQueryUnitStatusUpdate/case3.result b/tajo-core/src/test/resources/results/TestQueryUnitStatusUpdate/case3.result
new file mode 100644
index 0000000..025d0b4
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestQueryUnitStatusUpdate/case3.result
@@ -0,0 +1,4 @@
+col1,col2,key
+-------------------------------
+2,2,38.0
+3,2,45.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestSelectQuery/testCaseWhenWithoutElse.result b/tajo-core/src/test/resources/results/TestSelectQuery/testCaseWhenWithoutElse.result
index 843b4e8..bde7df8 100644
--- a/tajo-core/src/test/resources/results/TestSelectQuery/testCaseWhenWithoutElse.result
+++ b/tajo-core/src/test/resources/results/TestSelectQuery/testCaseWhenWithoutElse.result
@@ -1,6 +1,6 @@
 r_regionkey,cond
 -------------------------------
-0,0
+0,null
 1,11
 2,12
 3,13
diff --git a/tajo-core/src/test/resources/results/TestSelectQuery/testSumFloatOverflow.result b/tajo-core/src/test/resources/results/TestSelectQuery/testSumFloatOverflow.result
new file mode 100644
index 0000000..13b9ef4
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestSelectQuery/testSumFloatOverflow.result
@@ -0,0 +1,3 @@
+?sum
+-------------------------------
+6.838452478692677E38
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestSelectQuery/testSumIntOverflow.result b/tajo-core/src/test/resources/results/TestSelectQuery/testSumIntOverflow.result
new file mode 100644
index 0000000..cf2e0a8
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestSelectQuery/testSumIntOverflow.result
@@ -0,0 +1,3 @@
+?sum
+-------------------------------
+4673934905
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestSelectQuery/testWhereCond2.result b/tajo-core/src/test/resources/results/TestSelectQuery/testWhereCond2.result
index 32c93ed..c2bee17 100644
--- a/tajo-core/src/test/resources/results/TestSelectQuery/testWhereCond2.result
+++ b/tajo-core/src/test/resources/results/TestSelectQuery/testWhereCond2.result
@@ -1,5 +1,3 @@
-l_orderkey,cnt
+l_orderkey,cnt,sum1
 -------------------------------
-1,2
-2,1
-3,2
\ No newline at end of file
+3,2,100854.52
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestTablePartitions/case1.result b/tajo-core/src/test/resources/results/TestTablePartitions/case1.result
index 4bfe8a2..aece52e 100644
--- a/tajo-core/src/test/resources/results/TestTablePartitions/case1.result
+++ b/tajo-core/src/test/resources/results/TestTablePartitions/case1.result
@@ -1,4 +1,4 @@
 col1,col2,null_col,key
 -------------------------------
-2,2,0,38.0
-3,2,0,45.0
\ No newline at end of file
+2,2,null,38.0
+3,2,null,45.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestTablePartitions/case14.result b/tajo-core/src/test/resources/results/TestTablePartitions/case14.result
new file mode 100644
index 0000000..785d264
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestTablePartitions/case14.result
@@ -0,0 +1,3 @@
+cnt
+-------------------------------
+0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestTablePartitions/case15.result b/tajo-core/src/test/resources/results/TestTablePartitions/case15.result
new file mode 100644
index 0000000..785d264
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestTablePartitions/case15.result
@@ -0,0 +1,3 @@
+cnt
+-------------------------------
+0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestTablePartitions/case2.result b/tajo-core/src/test/resources/results/TestTablePartitions/case2.result
index 45b1a44..af56618 100644
--- a/tajo-core/src/test/resources/results/TestTablePartitions/case2.result
+++ b/tajo-core/src/test/resources/results/TestTablePartitions/case2.result
@@ -1,6 +1,6 @@
 col1,col2,null_col,key
 -------------------------------
-2,2,0,38.0
-2,2,0,38.0
-3,2,0,45.0
-3,2,0,45.0
\ No newline at end of file
+2,2,null,38.0
+2,2,null,38.0
+3,2,null,45.0
+3,2,null,45.0
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullFalse.result b/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullFalse.result
new file mode 100644
index 0000000..a5a6039
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullFalse.result
@@ -0,0 +1,8 @@
+c_custkey,  o_orderkey,  o_orderstatus
+-------------------------------
+1,  1,  O
+2,  2,  O
+3,  3,  F
+4,  ,  
+5,  ,  
+(5 rows, , 30 B selected)
diff --git a/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullTrue.result b/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullTrue.result
new file mode 100644
index 0000000..566262e
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestTajoCli/testSelectResultWithNullTrue.result
@@ -0,0 +1,8 @@
+c_custkey,  o_orderkey,  o_orderstatus
+-------------------------------
+1,  1,  O
+2,  2,  O
+3,  3,  F
+4,  testnull,  testnull
+5,  testnull,  testnull
+(5 rows, , 30 B selected)
diff --git a/tajo-core/src/test/resources/results/TestTajoCli/testStopWhenError.result b/tajo-core/src/test/resources/results/TestTajoCli/testStopWhenError.result
new file mode 100644
index 0000000..183e6c5
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestTajoCli/testStopWhenError.result
@@ -0,0 +1,5 @@
+?count
+-------------------------------
+5
+(1 rows, , 2 B selected)
+ERROR: relation "default.lineitem2" does not exist
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion11.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion11.result
new file mode 100644
index 0000000..6e8d2cd
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion11.result
@@ -0,0 +1,3 @@
+col1,col2,col3
+-------------------------------
+R,46796.47,1993-11-24F
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion12.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion12.result
new file mode 100644
index 0000000..c130afa
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion12.result
@@ -0,0 +1,6 @@
+col1,col2,col3
+-------------------------------
+R,46796.47,1993-11-24F
+PROMO BURNISHED COPPER,90100.0,1993goldenrod lavender spring chocolate lace
+LARGE BRUSHED BRASS,90200.0,1993blush thistle blue yellow saddle
+STANDARD POLISHED BRASS,90300.0,1993spring green yellow purple cornsilk
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion13.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion13.result
new file mode 100644
index 0000000..c130afa
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion13.result
@@ -0,0 +1,6 @@
+col1,col2,col3
+-------------------------------
+R,46796.47,1993-11-24F
+PROMO BURNISHED COPPER,90100.0,1993goldenrod lavender spring chocolate lace
+LARGE BRUSHED BRASS,90200.0,1993blush thistle blue yellow saddle
+STANDARD POLISHED BRASS,90300.0,1993spring green yellow purple cornsilk
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion14.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion14.result
new file mode 100644
index 0000000..3838ab4
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion14.result
@@ -0,0 +1,9 @@
+col1,cnt
+-------------------------------
+0,5
+1,5
+2,5
+3,5
+4,5
+N,3
+R,2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion15.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion15.result
new file mode 100644
index 0000000..4e4d9e9
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion15.result
@@ -0,0 +1,5 @@
+col1,?sum
+-------------------------------
+3,5
+4,5
+N,3
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnion16.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion16.result
new file mode 100644
index 0000000..38be978
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnion16.result
@@ -0,0 +1,5 @@
+col1,?sum
+-------------------------------
+3,5
+4,5
+N,2
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAlias.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAlias.result
new file mode 100644
index 0000000..7415ec4
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAlias.result
@@ -0,0 +1,4 @@
+col1,col2
+-------------------------------
+N,O
+R,F
\ No newline at end of file
diff --git a/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAliasAndFunction.result b/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAliasAndFunction.result
new file mode 100644
index 0000000..7e6f6cd
--- /dev/null
+++ b/tajo-core/src/test/resources/results/TestUnionQuery/testUnionWithDifferentAliasAndFunction.result
@@ -0,0 +1,7 @@
+col1,col2
+-------------------------------
+N,O1996-03-13
+N,O1996-04-12
+N,O1997-01-28
+R1993-11-09,F
+R1994-02-02,F
\ No newline at end of file
diff --git a/tajo-dist/pom.xml b/tajo-dist/pom.xml
index c826c57..d893010 100644
--- a/tajo-dist/pom.xml
+++ b/tajo-dist/pom.xml
@@ -120,6 +120,14 @@
                       run cp -r $ROOT/tajo-jdbc/target/tajo-jdbc-${project.version}.jar ./share/jdbc-dist
                       run cp -r $ROOT/tajo-jdbc/target/lib/* ./share/jdbc-dist
 
+                      if [ -f $ROOT/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/target/lib/parquet-hive-bundle-*.jar ]
+                      then
+                      run cp -r $ROOT/tajo-catalog/tajo-catalog-drivers/tajo-hcatalog/target/lib/parquet-hive-bundle-*.jar lib/
+                      echo
+                      echo "Tajo installed parquet-hive-bundle library at: ${project.build.directory}/tajo-${project.version}"
+                      echo
+                      fi
+
                       echo
                       echo "Tajo dist layout available at: ${project.build.directory}/tajo-${project.version}"
                       echo
@@ -127,6 +135,7 @@
                     <exec executable="sh" dir="${project.build.directory}" failonerror="true">
                       <arg line="./dist-layout-stitching.sh"/>
                     </exec>
+                    <chmod dir="${project.build.directory}/tajo-${project.version}/bin" perm="ugo+rx" includes="**/*.sh,tajo,tajo-dump,tsql"/>
                   </target>
                 </configuration>
               </execution>
diff --git a/tajo-dist/src/main/bin/tajo-dump b/tajo-dist/src/main/bin/tajo-dump
old mode 100644
new mode 100755
diff --git a/tajo-docs/src/main/sphinx/configuration/catalog_configuration.rst b/tajo-docs/src/main/sphinx/configuration/catalog_configuration.rst
index d6e8186..b8a77d2 100644
--- a/tajo-docs/src/main/sphinx/configuration/catalog_configuration.rst
+++ b/tajo-docs/src/main/sphinx/configuration/catalog_configuration.rst
@@ -13,6 +13,8 @@
 | tajo.catalog.store.DerbyStore     | this storage class uses Apache Derby.          |
 +-----------------------------------+------------------------------------------------+
 | tajo.catalog.store.MySQLStore     | this storage class uses MySQL.                 |
+++-----------------------------------+------------------------------------------------+
+| tajo.catalog.store.MariaDBStore   | this storage class uses MariaDB.               |
 +-----------------------------------+------------------------------------------------+
 | tajo.catalog.store.MemStore       | this is the in-memory storage. It is only used |
 |                                   | in unit tests to shorten the duration of unit  |
@@ -63,12 +65,34 @@
     <name>tajo.catalog.jdbc.connection.password</name>
     <value><mysql user password></value>
   </property>
-    <property>
+  <property>
     <name>tajo.catalog.jdbc.uri</name>
     <value>jdbc:mysql://<mysql host name>:<mysql port>/<database name for tajo>?createDatabaseIfNotExist=true</value>
   </property>
 
 
+===========================
+MariaDBStore Configuration
+===========================
+
+All configurations for using MariaDBStore is compatible with MySQLStore except following:
+
+.. code-block:: sh
+
+  export TAJO_CLASSPATH=/usr/local/mariadb/lib/mariadb-java-client-x.x.x.jar
+
+.. code-block:: xml
+
+  <property>
+    <name>tajo.catalog.store.class</name>
+    <value>org.apache.tajo.catalog.store.MariaDBStore</value>
+  </property>
+  <property>
+    <name>tajo.catalog.jdbc.uri</name>
+    <value>jdbc:mariadb://<mariadb host name>:<mariadb port>/<database name for tajo>?createDatabaseIfNotExist=true</value>
+  </property>
+
+
 ----------------------------------
   HCatalogStore Configuration
 ----------------------------------
@@ -105,4 +129,4 @@
   <property>
     <name>tajo.catalog.store.class</name>
     <value>org.apache.tajo.catalog.store.HCatalogStore</value>
-  </property>
\ No newline at end of file
+  </property>
diff --git a/tajo-project/pom.xml b/tajo-project/pom.xml
index 2167bd1..7c0da53 100644
--- a/tajo-project/pom.xml
+++ b/tajo-project/pom.xml
@@ -65,6 +65,16 @@
       <timezone></timezone>
     </developer>
     <developer>
+      <id>ahenrick</id>
+      <name>Alvin Derek Henrick</name>
+      <email>ahenrick@apache.org</email>
+      <organization></organization>
+      <roles>
+        <role>Committer</role>
+      </roles>
+      <timezone></timezone>
+    </developer>
+    <developer>
       <id>mattmann</id>
       <name>Chris Mattmann</name>
       <email>chris.a.mattmann@jpl.nasa.gov</email>
@@ -207,10 +217,6 @@
 
   <contributors>
     <contributor>
-      <name>Alvin Henrick</name>
-      <roles><role>Contributor</role></roles>
-    </contributor>
-    <contributor>
       <name>Camelia</name>
       <roles><role>Contributor</role></roles>
     </contributor>
@@ -245,6 +251,16 @@
       <roles><role>Contributor</role></roles>
     </contributor>
     <contributor>
+      <name>Jinhang Choi</name>
+      <organization>Korea University</organization>
+      <roles><role>Contributor</role></roles>
+    </contributor>
+    <contributor>
+      <name>Prafulla T.</name>
+      <organization></organization>
+      <roles><role>Contributor</role></roles>
+    </contributor>
+    <contributor>
       <name>Tae-kyeong Goh</name>
       <organization>Gruter</organization>
       <roles><role>Logo Designer</role></roles>
diff --git a/tajo-project/src/site/apt/index.apt b/tajo-project/src/site/apt/index.apt
index 75eea31..35f7a64 100644
--- a/tajo-project/src/site/apt/index.apt
+++ b/tajo-project/src/site/apt/index.apt
@@ -67,6 +67,12 @@
 
 News
 
+  * <<[2014-06-14]>> Hyunsik Choi had a talk at {{{https://www.campsite.org/bigdatacampla/event/531}Big Data Camp LA 2014}}. ({{{http://www.slideshare.net/gruter/tajo-la-bigdatacamp2014}Slide}})
+
+  * <<[2014-06-02]>> Hyunsik Choi had a talk at {{{http://hadoopsummit.org/san-jose/}Hadoop Summit North America 2014}}. ({{{http://www.slideshare.net/gruter/hadoop-summit-2014-query-optimization-and-jitbased-vectorized-execution-in-apache-tajo}Slide}})
+
+  * <<[2014-05-24]>> Alvin Derek Henrick was invited to a new committer.
+
   * <<[2014-04-01]>> Keuntae Park presented the real usecases of Apache Tajo in ApacheCon North America 2014. ({{{http://www.youtube.com/watch?v=m8THYeA8R7Q}Video}}, {{{http://events.linuxfoundation.org/sites/events/files/slides/ApacheCon_Keuntae_Park_0.pdf}Slide}})
 
   * <<[2014-04-01]>> Min Zhou was invited to a new committer.
diff --git a/tajo-project/src/site/markdown/irc.md b/tajo-project/src/site/markdown/irc.md
new file mode 100644
index 0000000..d1cbffe
--- /dev/null
+++ b/tajo-project/src/site/markdown/irc.md
@@ -0,0 +1,28 @@
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one
+  or more contributor license agreements.  See the NOTICE file
+  distributed with this work for additional information
+  regarding copyright ownership.  The ASF licenses this file
+  to you under the Apache License, Version 2.0 (the
+  "License"); you may not use this file except in compliance
+  with the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+
+## IRC (Internet Relay Chat) Communication
+
+IRC, or Internet Relay Chat, is often used as a real-time communication capability with open source projects. Apache Tajo team also uses IRC to immediate communication. You can ask questions or talk about all things Tajo to the channel. Tajo channel is at:
+
+ * tajo in [freenode.net] (http://freenode.net/)
+
+IRC software can be found for all operating systems. The [IRC clients comparison chart on Wikipedia] (http://en.wikipedia.org/wiki/Comparison_of_Internet_Relay_Chat_clients) can help you pick one for your operating system.
+
+You don't have to have a complex setup to use IRC. You can use the web client for Freenode, which doesn't require any download or setup. Just pick a nickname and join #tajo via http://webchat.freenode.net/?channels=tajo.
\ No newline at end of file
diff --git a/tajo-project/src/site/site.xml b/tajo-project/src/site/site.xml
index df08267..3ffd57e 100644
--- a/tajo-project/src/site/site.xml
+++ b/tajo-project/src/site/site.xml
@@ -97,6 +97,7 @@
       <item name="Wiki" href="http://cwiki.apache.org/confluence/display/TAJO/" />
       <item name="Team" href="team-list.html" />
       <item name="Mailing Lists" href="mailing-lists.html" />
+      <item name="IRC" href="irc.html" />
       <item name="Issue Tracker" href="https://issues.apache.org/jira/browse/TAJO" />
       <item name="Powered By" href="https://cwiki.apache.org/confluence/display/TAJO/Powered+By" />
       <item name="Presentations" href="https://cwiki.apache.org/confluence/display/TAJO/Presentations" />
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java b/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
index 652a8e9..17b9229 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/CSVFile.java
@@ -28,7 +28,6 @@
 import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.compress.*;
-import org.apache.tajo.catalog.CatalogConstants;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.TableMeta;
 import org.apache.tajo.catalog.proto.CatalogProtos;
@@ -287,14 +286,18 @@
     private boolean eof = false;
     private final byte[] nullChars;
     private SplitLineReader reader;
-    private ArrayList<Long> fileOffsets = new ArrayList<Long>();
-    private ArrayList<Integer> rowLengthList = new ArrayList<Integer>();
-    private ArrayList<Integer> startOffsets = new ArrayList<Integer>();
-    private NonSyncByteArrayOutputStream buffer = new NonSyncByteArrayOutputStream(DEFAULT_PAGE_SIZE);
+    private ArrayList<Long> fileOffsets;
+    private ArrayList<Integer> rowLengthList;
+    private ArrayList<Integer> startOffsets;
+    private NonSyncByteArrayOutputStream buffer;
     private SerializerDeserializer serde;
 
     @Override
     public void init() throws IOException {
+      fileOffsets = new ArrayList<Long>();
+      rowLengthList = new ArrayList<Integer>();
+      startOffsets = new ArrayList<Integer>();
+      buffer = new NonSyncByteArrayOutputStream(DEFAULT_PAGE_SIZE);
 
       // FileFragment information
       if(fs == null) {
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/MergeScanner.java b/tajo-storage/src/main/java/org/apache/tajo/storage/MergeScanner.java
index 0235ce9..8917f21 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/MergeScanner.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/MergeScanner.java
@@ -135,6 +135,7 @@
   public void close() throws IOException {
     if(currentScanner != null) {
       currentScanner.close();
+      currentScanner = null;
     }
     iterator = null;
     progress = 1.0f;
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/NullScanner.java b/tajo-storage/src/main/java/org/apache/tajo/storage/NullScanner.java
new file mode 100644
index 0000000..4cec67d
--- /dev/null
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/NullScanner.java
@@ -0,0 +1,62 @@
+package org.apache.tajo.storage; /**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.tajo.catalog.Schema;
+import org.apache.tajo.catalog.TableMeta;
+import org.apache.tajo.storage.fragment.FileFragment;
+
+import java.io.IOException;
+
+public class NullScanner extends FileScanner {
+  public NullScanner(Configuration conf, Schema schema, TableMeta meta, FileFragment fragment) {
+    super(conf, schema, meta, fragment);
+  }
+
+  @Override
+  public Tuple next() throws IOException {
+    progress = 1.0f;
+
+    return null;
+  }
+
+  @Override
+  public void reset() throws IOException {
+    progress = 0.0f;
+  }
+
+  @Override
+  public void close() throws IOException {
+    progress = 0.0f;
+  }
+
+  @Override
+  public boolean isProjectable() {
+    return false;
+  }
+
+  @Override
+  public boolean isSelectable() {
+    return true;
+  }
+
+  @Override
+  public boolean isSplittable() {
+    return true;
+  }
+}
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/RawFile.java b/tajo-storage/src/main/java/org/apache/tajo/storage/RawFile.java
index 22757b5..9677bca 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/RawFile.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/RawFile.java
@@ -62,7 +62,6 @@
     public RawFileScanner(Configuration conf, Schema schema, TableMeta meta, Path path) throws IOException {
       super(conf, schema, meta, null);
       this.path = path;
-      init();
     }
 
     @SuppressWarnings("unused")
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageManager.java b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageManager.java
index 1b852d4..4b23f4d 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/StorageManager.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/StorageManager.java
@@ -22,6 +22,7 @@
 import org.apache.tajo.catalog.TableMeta;
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.conf.TajoConf;
+import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.storage.fragment.Fragment;
 
 import java.io.IOException;
@@ -54,6 +55,16 @@
 
   @Override
   public Scanner getScanner(TableMeta meta, Schema schema, Fragment fragment, Schema target) throws IOException {
+    if (fragment instanceof FileFragment) {
+      FileFragment fileFragment = (FileFragment)fragment;
+      if (fileFragment.getEndKey() == 0) {
+        Scanner scanner = new NullScanner(conf, schema, meta, fileFragment);
+        scanner.setTarget(target.toArray());
+
+        return scanner;
+      }
+    }
+
     Scanner scanner;
 
     Class<? extends Scanner> scannerClass = getScannerClass(meta.getStoreType());
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/TableStatistics.java b/tajo-storage/src/main/java/org/apache/tajo/storage/TableStatistics.java
index cbee95e..ac9bd8a 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/TableStatistics.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/TableStatistics.java
@@ -18,6 +18,8 @@
 
 package org.apache.tajo.storage;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.tajo.catalog.Schema;
 import org.apache.tajo.catalog.statistics.ColumnStats;
 import org.apache.tajo.catalog.statistics.TableStats;
@@ -30,6 +32,7 @@
  * This class is not thread-safe.
  */
 public class TableStatistics {
+  private static final Log LOG = LogFactory.getLog(TableStatistics.class);
   private Schema schema;
   private Tuple minValues;
   private Tuple maxValues;
@@ -104,8 +107,18 @@
     for (int i = 0; i < schema.size(); i++) {
       columnStats = new ColumnStats(schema.getColumn(i));
       columnStats.setNumNulls(numNulls[i]);
-      columnStats.setMinValue(minValues.get(i));
-      columnStats.setMaxValue(maxValues.get(i));
+      if (minValues.get(i) == null || schema.getColumn(i).getDataType().getType() == minValues.get(i).type()) {
+        columnStats.setMinValue(minValues.get(i));
+      } else {
+        LOG.warn("Wrong statistics column type (" + minValues.get(i).type() +
+            ", expected=" + schema.getColumn(i).getDataType().getType() + ")");
+      }
+      if (minValues.get(i) == null || schema.getColumn(i).getDataType().getType() == minValues.get(i).type()) {
+        columnStats.setMaxValue(maxValues.get(i));
+      } else {
+        LOG.warn("Wrong statistics column type (" + minValues.get(i).type() +
+            ", expected=" + schema.getColumn(i).getDataType().getType() + ")");
+      }
       stat.addColumnStat(columnStats);
     }
 
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/TextSerializerDeserializer.java b/tajo-storage/src/main/java/org/apache/tajo/storage/TextSerializerDeserializer.java
index 41ee720..ad732c7 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/TextSerializerDeserializer.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/TextSerializerDeserializer.java
@@ -22,6 +22,7 @@
 import org.apache.commons.codec.binary.Base64;
 import org.apache.tajo.catalog.Column;
 import org.apache.tajo.common.TajoDataTypes;
+import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.datum.*;
 import org.apache.tajo.datum.protobuf.ProtobufJsonFormat;
 import org.apache.tajo.util.Bytes;
@@ -77,13 +78,21 @@
       case FLOAT8:
       case INET4:
       case DATE:
-      case TIME:
-      case TIMESTAMP:
       case INTERVAL:
         bytes = datum.asTextBytes();
         length = bytes.length;
         out.write(bytes);
         break;
+      case TIME:
+        bytes = ((TimeDatum)datum).asChars(TajoConf.getCurrentTimeZone(), true).getBytes();
+        length = bytes.length;
+        out.write(bytes);
+        break;
+      case TIMESTAMP:
+        bytes = ((TimestampDatum)datum).asChars(TajoConf.getCurrentTimeZone(), true).getBytes();
+        length = bytes.length;
+        out.write(bytes);
+        break;
       case INET6:
       case BLOB:
         bytes = Base64.encodeBase64(datum.asByteArray(), false);
@@ -158,7 +167,7 @@
         break;
       case TIMESTAMP:
         datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get()
-            : DatumFactory.createTimeStamp(new String(bytes, offset, length));
+            : DatumFactory.createTimestamp(new String(bytes, offset, length));
         break;
       case INTERVAL:
         datum = isNull(bytes, offset, length, nullCharacters) ? NullDatum.get()
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/TupleComparator.java b/tajo-storage/src/main/java/org/apache/tajo/storage/TupleComparator.java
index 30f2810..51388a4 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/TupleComparator.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/TupleComparator.java
@@ -58,7 +58,11 @@
     this.asc = new boolean[sortKeys.length];
     this.nullFirsts = new boolean[sortKeys.length];
     for (int i = 0; i < sortKeys.length; i++) {
-      this.sortKeyIds[i] = schema.getColumnId(sortKeys[i].getSortKey().getQualifiedName());
+      if (sortKeys[i].getSortKey().hasQualifier()) {
+        this.sortKeyIds[i] = schema.getColumnId(sortKeys[i].getSortKey().getQualifiedName());
+      } else {
+        this.sortKeyIds[i] = schema.getColumnIdByName(sortKeys[i].getSortKey().getSimpleName());
+      }
           
       this.asc[i] = sortKeys[i].isAscending();
       this.nullFirsts[i]= sortKeys[i].isNullFirst();
@@ -160,4 +164,18 @@
 
     return builder.build();
   }
+
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+
+    String prefix = "";
+    for (int i = 0; i < sortKeyIds.length; i++) {
+      sb.append(prefix).append("SortKeyId=").append(sortKeyIds[i])
+        .append(",Asc=").append(asc[i])
+        .append(",NullFirst=").append(nullFirsts[i]);
+      prefix = " ,";
+    }
+    return sb.toString();
+  }
 }
\ No newline at end of file
diff --git a/tajo-storage/src/main/java/org/apache/tajo/storage/v2/StorageManagerV2.java b/tajo-storage/src/main/java/org/apache/tajo/storage/v2/StorageManagerV2.java
index cffff00..2fd4a99 100644
--- a/tajo-storage/src/main/java/org/apache/tajo/storage/v2/StorageManagerV2.java
+++ b/tajo-storage/src/main/java/org/apache/tajo/storage/v2/StorageManagerV2.java
@@ -25,7 +25,9 @@
 import org.apache.tajo.catalog.proto.CatalogProtos;
 import org.apache.tajo.conf.TajoConf;
 import org.apache.tajo.storage.AbstractStorageManager;
+import org.apache.tajo.storage.NullScanner;
 import org.apache.tajo.storage.Scanner;
+import org.apache.tajo.storage.fragment.FileFragment;
 import org.apache.tajo.storage.fragment.Fragment;
 
 import java.io.IOException;
@@ -72,6 +74,16 @@
 
   @Override
   public Scanner getScanner(TableMeta meta, Schema schema, Fragment fragment, Schema target) throws IOException {
+    if (fragment instanceof FileFragment) {
+      FileFragment fileFragment = (FileFragment)fragment;
+      if (fileFragment.getEndKey() == 0) {
+        Scanner scanner = new NullScanner(conf, schema, meta, fileFragment);
+        scanner.setTarget(target.toArray());
+
+        return scanner;
+      }
+    }
+
     Scanner scanner;
 
     Class<? extends Scanner> scannerClass = getScannerClass(meta.getStoreType());
diff --git a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
index 0b764cc..944aa5c 100644
--- a/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
+++ b/tajo-storage/src/test/java/org/apache/tajo/storage/TestStorages.java
@@ -683,7 +683,7 @@
       tuple.put(new Datum[]{
           DatumFactory.createDate("1980-04-01"),
           DatumFactory.createTime("12:34:56"),
-          DatumFactory.createTimeStamp((int) System.currentTimeMillis() / 1000)
+          DatumFactory.createTimestmpDatumWithUnixTime((int)(System.currentTimeMillis() / 1000))
       });
       appender.addTuple(tuple);
       appender.flush();